Message ID | 1372375867-1003-5-git-send-email-ben@bwidawsk.net (mailing list archive) |
---|---|
State | New, archived |
Headers | show |
On Thu, Jun 27, 2013 at 04:30:05PM -0700, Ben Widawsky wrote: > From: Chris Wilson <chris@chris-wilson.co.uk> > > Clients like i915 needs to segregate cache domains within the GTT which > can lead to small amounts of fragmentation. By allocating the uncached > buffers from the bottom and the cacheable buffers from the top, we can > reduce the amount of wasted space and also optimize allocation of the > mappable portion of the GTT to only those buffers that require CPU > access through the GTT. > > v2 by Ben: > Update callers in i915_gem_object_bind_to_gtt() > Turn search flags and allocation flags into separate enums > Make checkpatch happy where logical/easy > > Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> > Signed-off-by: Ben Widawsky <ben@bwidawsk.net> Since this is a core drm patch it must be cc'ed to dri-devel (and acked by Dave) before I can merge it. Can you please resend? -Daniel > --- > drivers/gpu/drm/drm_mm.c | 122 ++++++++++++++++++--------------- > drivers/gpu/drm/i915/i915_gem.c | 4 +- > include/drm/drm_mm.h | 148 ++++++++++++++++++++++++---------------- > 3 files changed, 161 insertions(+), 113 deletions(-) > > diff --git a/drivers/gpu/drm/drm_mm.c b/drivers/gpu/drm/drm_mm.c > index 07cf99c..7095328 100644 > --- a/drivers/gpu/drm/drm_mm.c > +++ b/drivers/gpu/drm/drm_mm.c > @@ -49,7 +49,7 @@ > > #define MM_UNUSED_TARGET 4 > > -static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic) > +static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, bool atomic) > { > struct drm_mm_node *child; > > @@ -105,7 +105,8 @@ EXPORT_SYMBOL(drm_mm_pre_get); > static void drm_mm_insert_helper(struct drm_mm_node *hole_node, > struct drm_mm_node *node, > unsigned long size, unsigned alignment, > - unsigned long color) > + unsigned long color, > + enum drm_mm_allocator_flags flags) > { > struct drm_mm *mm = hole_node->mm; > unsigned long hole_start = drm_mm_hole_node_start(hole_node); > @@ -118,12 +119,22 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, > if (mm->color_adjust) > mm->color_adjust(hole_node, color, &adj_start, &adj_end); > > + if (flags & DRM_MM_CREATE_TOP) > + adj_start = adj_end - size; > + > if (alignment) { > unsigned tmp = adj_start % alignment; > - if (tmp) > - adj_start += alignment - tmp; > + if (tmp) { > + if (flags & DRM_MM_CREATE_TOP) > + adj_start -= tmp; > + else > + adj_start += alignment - tmp; > + } > } > > + BUG_ON(adj_start < hole_start); > + BUG_ON(adj_end > hole_end); > + > if (adj_start == hole_start) { > hole_node->hole_follows = 0; > list_del(&hole_node->hole_stack); > @@ -150,7 +161,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, > struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, > unsigned long start, > unsigned long size, > - bool atomic) > + enum drm_mm_allocator_flags flags) > { > struct drm_mm_node *hole, *node; > unsigned long end = start + size; > @@ -161,7 +172,7 @@ struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, > if (hole_start > start || hole_end < end) > continue; > > - node = drm_mm_kmalloc(mm, atomic); > + node = drm_mm_kmalloc(mm, flags & DRM_MM_CREATE_ATOMIC); > if (unlikely(node == NULL)) > return NULL; > > @@ -196,15 +207,15 @@ struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *hole_node, > unsigned long size, > unsigned alignment, > unsigned long color, > - int atomic) > + enum drm_mm_allocator_flags flags) > { > struct drm_mm_node *node; > > - node = drm_mm_kmalloc(hole_node->mm, atomic); > + node = drm_mm_kmalloc(hole_node->mm, flags & DRM_MM_CREATE_ATOMIC); > if (unlikely(node == NULL)) > return NULL; > > - drm_mm_insert_helper(hole_node, node, size, alignment, color); > + drm_mm_insert_helper(hole_node, node, size, alignment, color, flags); > > return node; > } > @@ -217,32 +228,28 @@ EXPORT_SYMBOL(drm_mm_get_block_generic); > */ > int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node, > unsigned long size, unsigned alignment, > - unsigned long color) > + unsigned long color, > + enum drm_mm_allocator_flags aflags, > + enum drm_mm_search_flags sflags) > { > struct drm_mm_node *hole_node; > > hole_node = drm_mm_search_free_generic(mm, size, alignment, > - color, 0); > + color, sflags); > if (!hole_node) > return -ENOSPC; > > - drm_mm_insert_helper(hole_node, node, size, alignment, color); > + drm_mm_insert_helper(hole_node, node, size, alignment, color, aflags); > return 0; > } > EXPORT_SYMBOL(drm_mm_insert_node_generic); > > -int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node, > - unsigned long size, unsigned alignment) > -{ > - return drm_mm_insert_node_generic(mm, node, size, alignment, 0); > -} > -EXPORT_SYMBOL(drm_mm_insert_node); > - > static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, > struct drm_mm_node *node, > unsigned long size, unsigned alignment, > unsigned long color, > - unsigned long start, unsigned long end) > + unsigned long start, unsigned long end, > + enum drm_mm_search_flags flags) > { > struct drm_mm *mm = hole_node->mm; > unsigned long hole_start = drm_mm_hole_node_start(hole_node); > @@ -257,13 +264,20 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, > if (adj_end > end) > adj_end = end; > > + if (flags & DRM_MM_CREATE_TOP) > + adj_start = adj_end - size; > + > if (mm->color_adjust) > mm->color_adjust(hole_node, color, &adj_start, &adj_end); > > if (alignment) { > unsigned tmp = adj_start % alignment; > - if (tmp) > - adj_start += alignment - tmp; > + if (tmp) { > + if (flags & DRM_MM_CREATE_TOP) > + adj_start -= tmp; > + else > + adj_start += alignment - tmp; > + } > } > > if (adj_start == hole_start) { > @@ -280,6 +294,8 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, > INIT_LIST_HEAD(&node->hole_stack); > list_add(&node->node_list, &hole_node->node_list); > > + BUG_ON(node->start < start); > + BUG_ON(node->start < adj_start); > BUG_ON(node->start + node->size > adj_end); > BUG_ON(node->start + node->size > end); > > @@ -290,22 +306,23 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, > } > } > > -struct drm_mm_node *drm_mm_get_block_range_generic(struct drm_mm_node *hole_node, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - unsigned long start, > - unsigned long end, > - int atomic) > +struct drm_mm_node * > +drm_mm_get_block_range_generic(struct drm_mm_node *hole_node, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + unsigned long start, > + unsigned long end, > + enum drm_mm_allocator_flags flags) > { > struct drm_mm_node *node; > > - node = drm_mm_kmalloc(hole_node->mm, atomic); > + node = drm_mm_kmalloc(hole_node->mm, flags & DRM_MM_CREATE_ATOMIC); > if (unlikely(node == NULL)) > return NULL; > > drm_mm_insert_helper_range(hole_node, node, size, alignment, color, > - start, end); > + start, end, flags); > > return node; > } > @@ -318,31 +335,25 @@ EXPORT_SYMBOL(drm_mm_get_block_range_generic); > */ > int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node, > unsigned long size, unsigned alignment, unsigned long color, > - unsigned long start, unsigned long end) > + unsigned long start, unsigned long end, > + enum drm_mm_allocator_flags aflags, > + enum drm_mm_search_flags sflags) > { > struct drm_mm_node *hole_node; > > hole_node = drm_mm_search_free_in_range_generic(mm, > size, alignment, color, > - start, end, 0); > + start, end, sflags); > if (!hole_node) > return -ENOSPC; > > drm_mm_insert_helper_range(hole_node, node, > size, alignment, color, > - start, end); > + start, end, aflags); > return 0; > } > EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic); > > -int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node, > - unsigned long size, unsigned alignment, > - unsigned long start, unsigned long end) > -{ > - return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end); > -} > -EXPORT_SYMBOL(drm_mm_insert_node_in_range); > - > /** > * Remove a memory node from the allocator. > */ > @@ -418,7 +429,7 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, > unsigned long size, > unsigned alignment, > unsigned long color, > - bool best_match) > + enum drm_mm_search_flags flags) > { > struct drm_mm_node *entry; > struct drm_mm_node *best; > @@ -431,7 +442,8 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, > best = NULL; > best_size = ~0UL; > > - drm_mm_for_each_hole(entry, mm, adj_start, adj_end) { > + __drm_mm_for_each_hole(entry, mm, adj_start, adj_end, > + flags & DRM_MM_SEARCH_BELOW) { > if (mm->color_adjust) { > mm->color_adjust(entry, color, &adj_start, &adj_end); > if (adj_end <= adj_start) > @@ -441,7 +453,7 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, > if (!check_free_hole(adj_start, adj_end, size, alignment)) > continue; > > - if (!best_match) > + if ((flags & DRM_MM_SEARCH_BEST) == 0) > return entry; > > if (entry->size < best_size) { > @@ -454,13 +466,14 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, > } > EXPORT_SYMBOL(drm_mm_search_free_generic); > > -struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - unsigned long start, > - unsigned long end, > - bool best_match) > +struct drm_mm_node * > +drm_mm_search_free_in_range_generic(const struct drm_mm *mm, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + unsigned long start, > + unsigned long end, > + enum drm_mm_search_flags flags) > { > struct drm_mm_node *entry; > struct drm_mm_node *best; > @@ -473,7 +486,8 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, > best = NULL; > best_size = ~0UL; > > - drm_mm_for_each_hole(entry, mm, adj_start, adj_end) { > + __drm_mm_for_each_hole(entry, mm, adj_start, adj_end, > + flags & DRM_MM_SEARCH_BELOW) { > if (adj_start < start) > adj_start = start; > if (adj_end > end) > @@ -488,7 +502,7 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, > if (!check_free_hole(adj_start, adj_end, size, alignment)) > continue; > > - if (!best_match) > + if ((flags & DRM_MM_SEARCH_BEST) == 0) > return entry; > > if (entry->size < best_size) { > diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c > index bbc3beb..6806bb9 100644 > --- a/drivers/gpu/drm/i915/i915_gem.c > +++ b/drivers/gpu/drm/i915/i915_gem.c > @@ -3131,7 +3131,9 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, > search_free: > ret = drm_mm_insert_node_in_range_generic(&dev_priv->mm.gtt_space, node, > size, alignment, > - obj->cache_level, 0, gtt_max); > + obj->cache_level, 0, gtt_max, > + DRM_MM_CREATE_DEFAULT, > + DRM_MM_SEARCH_DEFAULT); > if (ret) { > ret = i915_gem_evict_something(dev, size, alignment, > obj->cache_level, > diff --git a/include/drm/drm_mm.h b/include/drm/drm_mm.h > index 88591ef..8935710 100644 > --- a/include/drm/drm_mm.h > +++ b/include/drm/drm_mm.h > @@ -41,6 +41,21 @@ > #include <linux/seq_file.h> > #endif > > +enum drm_mm_allocator_flags { > + DRM_MM_CREATE_DEFAULT = 0, > + DRM_MM_CREATE_ATOMIC = 1<<0, > + DRM_MM_CREATE_TOP = 1<<1, > +}; > + > +enum drm_mm_search_flags { > + DRM_MM_SEARCH_DEFAULT = 0, > + DRM_MM_SEARCH_BEST = 1<<0, > + DRM_MM_SEARCH_BELOW = 1<<1, > +}; > + > +#define DRM_MM_BOTTOMUP DRM_MM_CREATE_DEFAULT, DRM_MM_SEARCH_DEFAULT > +#define DRM_MM_TOPDOWN DRM_MM_CREATE_TOP, DRM_MM_SEARCH_BELOW > + > struct drm_mm_node { > struct list_head node_list; > struct list_head hole_stack; > @@ -135,26 +150,37 @@ static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node) > 1 : 0; \ > entry = list_entry(entry->hole_stack.next, struct drm_mm_node, hole_stack)) > > +#define __drm_mm_for_each_hole(entry, mm, hole_start, hole_end, backwards) \ > + for (entry = list_entry((backwards) ? (mm)->hole_stack.prev : (mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ > + &entry->hole_stack != &(mm)->hole_stack ? \ > + hole_start = drm_mm_hole_node_start(entry), \ > + hole_end = drm_mm_hole_node_end(entry), \ > + 1 : 0; \ > + entry = list_entry((backwards) ? entry->hole_stack.prev : entry->hole_stack.next, struct drm_mm_node, hole_stack)) > + > /* > * Basic range manager support (drm_mm.c) > */ > -extern struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, > - unsigned long start, > - unsigned long size, > - bool atomic); > -extern struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *node, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - int atomic); > -extern struct drm_mm_node *drm_mm_get_block_range_generic( > - struct drm_mm_node *node, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - unsigned long start, > - unsigned long end, > - int atomic); > +extern struct drm_mm_node * > +drm_mm_create_block(struct drm_mm *mm, > + unsigned long start, > + unsigned long size, > + enum drm_mm_allocator_flags flags); > +extern struct drm_mm_node * > +drm_mm_get_block_generic(struct drm_mm_node *node, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + enum drm_mm_allocator_flags flags); > +extern struct drm_mm_node * > +drm_mm_get_block_range_generic(struct drm_mm_node *node, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + unsigned long start, > + unsigned long end, > + enum drm_mm_allocator_flags flags); > + > static inline struct drm_mm_node *drm_mm_get_block(struct drm_mm_node *parent, > unsigned long size, > unsigned alignment) > @@ -165,7 +191,8 @@ static inline struct drm_mm_node *drm_mm_get_block_atomic(struct drm_mm_node *pa > unsigned long size, > unsigned alignment) > { > - return drm_mm_get_block_generic(parent, size, alignment, 0, 1); > + return drm_mm_get_block_generic(parent, size, alignment, 0, > + DRM_MM_CREATE_ATOMIC); > } > static inline struct drm_mm_node *drm_mm_get_block_range( > struct drm_mm_node *parent, > @@ -196,39 +223,41 @@ static inline struct drm_mm_node *drm_mm_get_block_atomic_range( > unsigned long end) > { > return drm_mm_get_block_range_generic(parent, size, alignment, 0, > - start, end, 1); > + start, end, > + DRM_MM_CREATE_ATOMIC); > } > > -extern int drm_mm_insert_node(struct drm_mm *mm, > - struct drm_mm_node *node, > - unsigned long size, > - unsigned alignment); > -extern int drm_mm_insert_node_in_range(struct drm_mm *mm, > - struct drm_mm_node *node, > - unsigned long size, > - unsigned alignment, > - unsigned long start, > - unsigned long end); > extern int drm_mm_insert_node_generic(struct drm_mm *mm, > struct drm_mm_node *node, > unsigned long size, > unsigned alignment, > - unsigned long color); > -extern int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, > - struct drm_mm_node *node, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - unsigned long start, > - unsigned long end); > + unsigned long color, > + enum drm_mm_allocator_flags aflags, > + enum drm_mm_search_flags sflags); > +#define drm_mm_insert_node(mm, node, size, alignment) \ > + drm_mm_insert_node_generic(mm, node, size, alignment, 0, 0) > +extern int > +drm_mm_insert_node_in_range_generic(struct drm_mm *mm, > + struct drm_mm_node *node, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + unsigned long start, > + unsigned long end, > + enum drm_mm_allocator_flags aflags, > + enum drm_mm_search_flags sflags); > +#define drm_mm_insert_node_in_range(mm, node, size, alignment, start, end) \ > + drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end, 0) > extern void drm_mm_put_block(struct drm_mm_node *cur); > extern void drm_mm_remove_node(struct drm_mm_node *node); > extern void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new); > -extern struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - bool best_match); > + > +extern struct drm_mm_node * > +drm_mm_search_free_generic(const struct drm_mm *mm, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + enum drm_mm_search_flags flags); > extern struct drm_mm_node *drm_mm_search_free_in_range_generic( > const struct drm_mm *mm, > unsigned long size, > @@ -236,13 +265,15 @@ extern struct drm_mm_node *drm_mm_search_free_in_range_generic( > unsigned long color, > unsigned long start, > unsigned long end, > - bool best_match); > -static inline struct drm_mm_node *drm_mm_search_free(const struct drm_mm *mm, > - unsigned long size, > - unsigned alignment, > - bool best_match) > + enum drm_mm_search_flags flags); > + > +static inline struct drm_mm_node * > +drm_mm_search_free(const struct drm_mm *mm, > + unsigned long size, > + unsigned alignment, > + enum drm_mm_search_flags flags) > { > - return drm_mm_search_free_generic(mm,size, alignment, 0, best_match); > + return drm_mm_search_free_generic(mm, size, alignment, 0, flags); > } > static inline struct drm_mm_node *drm_mm_search_free_in_range( > const struct drm_mm *mm, > @@ -250,18 +281,19 @@ static inline struct drm_mm_node *drm_mm_search_free_in_range( > unsigned alignment, > unsigned long start, > unsigned long end, > - bool best_match) > + enum drm_mm_search_flags flags) > { > return drm_mm_search_free_in_range_generic(mm, size, alignment, 0, > - start, end, best_match); > + start, end, flags); > } > -static inline struct drm_mm_node *drm_mm_search_free_color(const struct drm_mm *mm, > - unsigned long size, > - unsigned alignment, > - unsigned long color, > - bool best_match) > +static inline struct drm_mm_node * > +drm_mm_search_free_color(const struct drm_mm *mm, > + unsigned long size, > + unsigned alignment, > + unsigned long color, > + enum drm_mm_search_flags flags) > { > - return drm_mm_search_free_generic(mm,size, alignment, color, best_match); > + return drm_mm_search_free_generic(mm, size, alignment, color, flags); > } > static inline struct drm_mm_node *drm_mm_search_free_in_range_color( > const struct drm_mm *mm, > @@ -270,10 +302,10 @@ static inline struct drm_mm_node *drm_mm_search_free_in_range_color( > unsigned long color, > unsigned long start, > unsigned long end, > - bool best_match) > + enum drm_mm_search_flags flags) > { > return drm_mm_search_free_in_range_generic(mm, size, alignment, color, > - start, end, best_match); > + start, end, flags); > } > extern int drm_mm_init(struct drm_mm *mm, > unsigned long start, > -- > 1.8.3.1 > > _______________________________________________ > Intel-gfx mailing list > Intel-gfx@lists.freedesktop.org > http://lists.freedesktop.org/mailman/listinfo/intel-gfx
On Sun, Jun 30, 2013 at 2:30 PM, Daniel Vetter <daniel@ffwll.ch> wrote: > On Thu, Jun 27, 2013 at 04:30:05PM -0700, Ben Widawsky wrote: >> From: Chris Wilson <chris@chris-wilson.co.uk> >> >> Clients like i915 needs to segregate cache domains within the GTT which >> can lead to small amounts of fragmentation. By allocating the uncached >> buffers from the bottom and the cacheable buffers from the top, we can >> reduce the amount of wasted space and also optimize allocation of the >> mappable portion of the GTT to only those buffers that require CPU >> access through the GTT. >> >> v2 by Ben: >> Update callers in i915_gem_object_bind_to_gtt() >> Turn search flags and allocation flags into separate enums >> Make checkpatch happy where logical/easy >> >> Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> >> Signed-off-by: Ben Widawsky <ben@bwidawsk.net> > > Since this is a core drm patch it must be cc'ed to dri-devel (and acked by > Dave) before I can merge it. Can you please resend? And same review as for Chris' original patch still applies: best_match is unused (and it's better that way, really) so can be garbage collected. -Daniel -- Daniel Vetter Software Engineer, Intel Corporation +41 (0) 79 365 57 48 - http://blog.ffwll.ch
diff --git a/drivers/gpu/drm/drm_mm.c b/drivers/gpu/drm/drm_mm.c index 07cf99c..7095328 100644 --- a/drivers/gpu/drm/drm_mm.c +++ b/drivers/gpu/drm/drm_mm.c @@ -49,7 +49,7 @@ #define MM_UNUSED_TARGET 4 -static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic) +static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, bool atomic) { struct drm_mm_node *child; @@ -105,7 +105,8 @@ EXPORT_SYMBOL(drm_mm_pre_get); static void drm_mm_insert_helper(struct drm_mm_node *hole_node, struct drm_mm_node *node, unsigned long size, unsigned alignment, - unsigned long color) + unsigned long color, + enum drm_mm_allocator_flags flags) { struct drm_mm *mm = hole_node->mm; unsigned long hole_start = drm_mm_hole_node_start(hole_node); @@ -118,12 +119,22 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, if (mm->color_adjust) mm->color_adjust(hole_node, color, &adj_start, &adj_end); + if (flags & DRM_MM_CREATE_TOP) + adj_start = adj_end - size; + if (alignment) { unsigned tmp = adj_start % alignment; - if (tmp) - adj_start += alignment - tmp; + if (tmp) { + if (flags & DRM_MM_CREATE_TOP) + adj_start -= tmp; + else + adj_start += alignment - tmp; + } } + BUG_ON(adj_start < hole_start); + BUG_ON(adj_end > hole_end); + if (adj_start == hole_start) { hole_node->hole_follows = 0; list_del(&hole_node->hole_stack); @@ -150,7 +161,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node, struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, unsigned long start, unsigned long size, - bool atomic) + enum drm_mm_allocator_flags flags) { struct drm_mm_node *hole, *node; unsigned long end = start + size; @@ -161,7 +172,7 @@ struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, if (hole_start > start || hole_end < end) continue; - node = drm_mm_kmalloc(mm, atomic); + node = drm_mm_kmalloc(mm, flags & DRM_MM_CREATE_ATOMIC); if (unlikely(node == NULL)) return NULL; @@ -196,15 +207,15 @@ struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *hole_node, unsigned long size, unsigned alignment, unsigned long color, - int atomic) + enum drm_mm_allocator_flags flags) { struct drm_mm_node *node; - node = drm_mm_kmalloc(hole_node->mm, atomic); + node = drm_mm_kmalloc(hole_node->mm, flags & DRM_MM_CREATE_ATOMIC); if (unlikely(node == NULL)) return NULL; - drm_mm_insert_helper(hole_node, node, size, alignment, color); + drm_mm_insert_helper(hole_node, node, size, alignment, color, flags); return node; } @@ -217,32 +228,28 @@ EXPORT_SYMBOL(drm_mm_get_block_generic); */ int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node, unsigned long size, unsigned alignment, - unsigned long color) + unsigned long color, + enum drm_mm_allocator_flags aflags, + enum drm_mm_search_flags sflags) { struct drm_mm_node *hole_node; hole_node = drm_mm_search_free_generic(mm, size, alignment, - color, 0); + color, sflags); if (!hole_node) return -ENOSPC; - drm_mm_insert_helper(hole_node, node, size, alignment, color); + drm_mm_insert_helper(hole_node, node, size, alignment, color, aflags); return 0; } EXPORT_SYMBOL(drm_mm_insert_node_generic); -int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node, - unsigned long size, unsigned alignment) -{ - return drm_mm_insert_node_generic(mm, node, size, alignment, 0); -} -EXPORT_SYMBOL(drm_mm_insert_node); - static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, struct drm_mm_node *node, unsigned long size, unsigned alignment, unsigned long color, - unsigned long start, unsigned long end) + unsigned long start, unsigned long end, + enum drm_mm_search_flags flags) { struct drm_mm *mm = hole_node->mm; unsigned long hole_start = drm_mm_hole_node_start(hole_node); @@ -257,13 +264,20 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, if (adj_end > end) adj_end = end; + if (flags & DRM_MM_CREATE_TOP) + adj_start = adj_end - size; + if (mm->color_adjust) mm->color_adjust(hole_node, color, &adj_start, &adj_end); if (alignment) { unsigned tmp = adj_start % alignment; - if (tmp) - adj_start += alignment - tmp; + if (tmp) { + if (flags & DRM_MM_CREATE_TOP) + adj_start -= tmp; + else + adj_start += alignment - tmp; + } } if (adj_start == hole_start) { @@ -280,6 +294,8 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, INIT_LIST_HEAD(&node->hole_stack); list_add(&node->node_list, &hole_node->node_list); + BUG_ON(node->start < start); + BUG_ON(node->start < adj_start); BUG_ON(node->start + node->size > adj_end); BUG_ON(node->start + node->size > end); @@ -290,22 +306,23 @@ static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node, } } -struct drm_mm_node *drm_mm_get_block_range_generic(struct drm_mm_node *hole_node, - unsigned long size, - unsigned alignment, - unsigned long color, - unsigned long start, - unsigned long end, - int atomic) +struct drm_mm_node * +drm_mm_get_block_range_generic(struct drm_mm_node *hole_node, + unsigned long size, + unsigned alignment, + unsigned long color, + unsigned long start, + unsigned long end, + enum drm_mm_allocator_flags flags) { struct drm_mm_node *node; - node = drm_mm_kmalloc(hole_node->mm, atomic); + node = drm_mm_kmalloc(hole_node->mm, flags & DRM_MM_CREATE_ATOMIC); if (unlikely(node == NULL)) return NULL; drm_mm_insert_helper_range(hole_node, node, size, alignment, color, - start, end); + start, end, flags); return node; } @@ -318,31 +335,25 @@ EXPORT_SYMBOL(drm_mm_get_block_range_generic); */ int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node, unsigned long size, unsigned alignment, unsigned long color, - unsigned long start, unsigned long end) + unsigned long start, unsigned long end, + enum drm_mm_allocator_flags aflags, + enum drm_mm_search_flags sflags) { struct drm_mm_node *hole_node; hole_node = drm_mm_search_free_in_range_generic(mm, size, alignment, color, - start, end, 0); + start, end, sflags); if (!hole_node) return -ENOSPC; drm_mm_insert_helper_range(hole_node, node, size, alignment, color, - start, end); + start, end, aflags); return 0; } EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic); -int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node, - unsigned long size, unsigned alignment, - unsigned long start, unsigned long end) -{ - return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end); -} -EXPORT_SYMBOL(drm_mm_insert_node_in_range); - /** * Remove a memory node from the allocator. */ @@ -418,7 +429,7 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, unsigned long size, unsigned alignment, unsigned long color, - bool best_match) + enum drm_mm_search_flags flags) { struct drm_mm_node *entry; struct drm_mm_node *best; @@ -431,7 +442,8 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, best = NULL; best_size = ~0UL; - drm_mm_for_each_hole(entry, mm, adj_start, adj_end) { + __drm_mm_for_each_hole(entry, mm, adj_start, adj_end, + flags & DRM_MM_SEARCH_BELOW) { if (mm->color_adjust) { mm->color_adjust(entry, color, &adj_start, &adj_end); if (adj_end <= adj_start) @@ -441,7 +453,7 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, if (!check_free_hole(adj_start, adj_end, size, alignment)) continue; - if (!best_match) + if ((flags & DRM_MM_SEARCH_BEST) == 0) return entry; if (entry->size < best_size) { @@ -454,13 +466,14 @@ struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, } EXPORT_SYMBOL(drm_mm_search_free_generic); -struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, - unsigned long size, - unsigned alignment, - unsigned long color, - unsigned long start, - unsigned long end, - bool best_match) +struct drm_mm_node * +drm_mm_search_free_in_range_generic(const struct drm_mm *mm, + unsigned long size, + unsigned alignment, + unsigned long color, + unsigned long start, + unsigned long end, + enum drm_mm_search_flags flags) { struct drm_mm_node *entry; struct drm_mm_node *best; @@ -473,7 +486,8 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, best = NULL; best_size = ~0UL; - drm_mm_for_each_hole(entry, mm, adj_start, adj_end) { + __drm_mm_for_each_hole(entry, mm, adj_start, adj_end, + flags & DRM_MM_SEARCH_BELOW) { if (adj_start < start) adj_start = start; if (adj_end > end) @@ -488,7 +502,7 @@ struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, if (!check_free_hole(adj_start, adj_end, size, alignment)) continue; - if (!best_match) + if ((flags & DRM_MM_SEARCH_BEST) == 0) return entry; if (entry->size < best_size) { diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c index bbc3beb..6806bb9 100644 --- a/drivers/gpu/drm/i915/i915_gem.c +++ b/drivers/gpu/drm/i915/i915_gem.c @@ -3131,7 +3131,9 @@ i915_gem_object_bind_to_gtt(struct drm_i915_gem_object *obj, search_free: ret = drm_mm_insert_node_in_range_generic(&dev_priv->mm.gtt_space, node, size, alignment, - obj->cache_level, 0, gtt_max); + obj->cache_level, 0, gtt_max, + DRM_MM_CREATE_DEFAULT, + DRM_MM_SEARCH_DEFAULT); if (ret) { ret = i915_gem_evict_something(dev, size, alignment, obj->cache_level, diff --git a/include/drm/drm_mm.h b/include/drm/drm_mm.h index 88591ef..8935710 100644 --- a/include/drm/drm_mm.h +++ b/include/drm/drm_mm.h @@ -41,6 +41,21 @@ #include <linux/seq_file.h> #endif +enum drm_mm_allocator_flags { + DRM_MM_CREATE_DEFAULT = 0, + DRM_MM_CREATE_ATOMIC = 1<<0, + DRM_MM_CREATE_TOP = 1<<1, +}; + +enum drm_mm_search_flags { + DRM_MM_SEARCH_DEFAULT = 0, + DRM_MM_SEARCH_BEST = 1<<0, + DRM_MM_SEARCH_BELOW = 1<<1, +}; + +#define DRM_MM_BOTTOMUP DRM_MM_CREATE_DEFAULT, DRM_MM_SEARCH_DEFAULT +#define DRM_MM_TOPDOWN DRM_MM_CREATE_TOP, DRM_MM_SEARCH_BELOW + struct drm_mm_node { struct list_head node_list; struct list_head hole_stack; @@ -135,26 +150,37 @@ static inline unsigned long drm_mm_hole_node_end(struct drm_mm_node *hole_node) 1 : 0; \ entry = list_entry(entry->hole_stack.next, struct drm_mm_node, hole_stack)) +#define __drm_mm_for_each_hole(entry, mm, hole_start, hole_end, backwards) \ + for (entry = list_entry((backwards) ? (mm)->hole_stack.prev : (mm)->hole_stack.next, struct drm_mm_node, hole_stack); \ + &entry->hole_stack != &(mm)->hole_stack ? \ + hole_start = drm_mm_hole_node_start(entry), \ + hole_end = drm_mm_hole_node_end(entry), \ + 1 : 0; \ + entry = list_entry((backwards) ? entry->hole_stack.prev : entry->hole_stack.next, struct drm_mm_node, hole_stack)) + /* * Basic range manager support (drm_mm.c) */ -extern struct drm_mm_node *drm_mm_create_block(struct drm_mm *mm, - unsigned long start, - unsigned long size, - bool atomic); -extern struct drm_mm_node *drm_mm_get_block_generic(struct drm_mm_node *node, - unsigned long size, - unsigned alignment, - unsigned long color, - int atomic); -extern struct drm_mm_node *drm_mm_get_block_range_generic( - struct drm_mm_node *node, - unsigned long size, - unsigned alignment, - unsigned long color, - unsigned long start, - unsigned long end, - int atomic); +extern struct drm_mm_node * +drm_mm_create_block(struct drm_mm *mm, + unsigned long start, + unsigned long size, + enum drm_mm_allocator_flags flags); +extern struct drm_mm_node * +drm_mm_get_block_generic(struct drm_mm_node *node, + unsigned long size, + unsigned alignment, + unsigned long color, + enum drm_mm_allocator_flags flags); +extern struct drm_mm_node * +drm_mm_get_block_range_generic(struct drm_mm_node *node, + unsigned long size, + unsigned alignment, + unsigned long color, + unsigned long start, + unsigned long end, + enum drm_mm_allocator_flags flags); + static inline struct drm_mm_node *drm_mm_get_block(struct drm_mm_node *parent, unsigned long size, unsigned alignment) @@ -165,7 +191,8 @@ static inline struct drm_mm_node *drm_mm_get_block_atomic(struct drm_mm_node *pa unsigned long size, unsigned alignment) { - return drm_mm_get_block_generic(parent, size, alignment, 0, 1); + return drm_mm_get_block_generic(parent, size, alignment, 0, + DRM_MM_CREATE_ATOMIC); } static inline struct drm_mm_node *drm_mm_get_block_range( struct drm_mm_node *parent, @@ -196,39 +223,41 @@ static inline struct drm_mm_node *drm_mm_get_block_atomic_range( unsigned long end) { return drm_mm_get_block_range_generic(parent, size, alignment, 0, - start, end, 1); + start, end, + DRM_MM_CREATE_ATOMIC); } -extern int drm_mm_insert_node(struct drm_mm *mm, - struct drm_mm_node *node, - unsigned long size, - unsigned alignment); -extern int drm_mm_insert_node_in_range(struct drm_mm *mm, - struct drm_mm_node *node, - unsigned long size, - unsigned alignment, - unsigned long start, - unsigned long end); extern int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node, unsigned long size, unsigned alignment, - unsigned long color); -extern int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, - struct drm_mm_node *node, - unsigned long size, - unsigned alignment, - unsigned long color, - unsigned long start, - unsigned long end); + unsigned long color, + enum drm_mm_allocator_flags aflags, + enum drm_mm_search_flags sflags); +#define drm_mm_insert_node(mm, node, size, alignment) \ + drm_mm_insert_node_generic(mm, node, size, alignment, 0, 0) +extern int +drm_mm_insert_node_in_range_generic(struct drm_mm *mm, + struct drm_mm_node *node, + unsigned long size, + unsigned alignment, + unsigned long color, + unsigned long start, + unsigned long end, + enum drm_mm_allocator_flags aflags, + enum drm_mm_search_flags sflags); +#define drm_mm_insert_node_in_range(mm, node, size, alignment, start, end) \ + drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end, 0) extern void drm_mm_put_block(struct drm_mm_node *cur); extern void drm_mm_remove_node(struct drm_mm_node *node); extern void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new); -extern struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, - unsigned long size, - unsigned alignment, - unsigned long color, - bool best_match); + +extern struct drm_mm_node * +drm_mm_search_free_generic(const struct drm_mm *mm, + unsigned long size, + unsigned alignment, + unsigned long color, + enum drm_mm_search_flags flags); extern struct drm_mm_node *drm_mm_search_free_in_range_generic( const struct drm_mm *mm, unsigned long size, @@ -236,13 +265,15 @@ extern struct drm_mm_node *drm_mm_search_free_in_range_generic( unsigned long color, unsigned long start, unsigned long end, - bool best_match); -static inline struct drm_mm_node *drm_mm_search_free(const struct drm_mm *mm, - unsigned long size, - unsigned alignment, - bool best_match) + enum drm_mm_search_flags flags); + +static inline struct drm_mm_node * +drm_mm_search_free(const struct drm_mm *mm, + unsigned long size, + unsigned alignment, + enum drm_mm_search_flags flags) { - return drm_mm_search_free_generic(mm,size, alignment, 0, best_match); + return drm_mm_search_free_generic(mm, size, alignment, 0, flags); } static inline struct drm_mm_node *drm_mm_search_free_in_range( const struct drm_mm *mm, @@ -250,18 +281,19 @@ static inline struct drm_mm_node *drm_mm_search_free_in_range( unsigned alignment, unsigned long start, unsigned long end, - bool best_match) + enum drm_mm_search_flags flags) { return drm_mm_search_free_in_range_generic(mm, size, alignment, 0, - start, end, best_match); + start, end, flags); } -static inline struct drm_mm_node *drm_mm_search_free_color(const struct drm_mm *mm, - unsigned long size, - unsigned alignment, - unsigned long color, - bool best_match) +static inline struct drm_mm_node * +drm_mm_search_free_color(const struct drm_mm *mm, + unsigned long size, + unsigned alignment, + unsigned long color, + enum drm_mm_search_flags flags) { - return drm_mm_search_free_generic(mm,size, alignment, color, best_match); + return drm_mm_search_free_generic(mm, size, alignment, color, flags); } static inline struct drm_mm_node *drm_mm_search_free_in_range_color( const struct drm_mm *mm, @@ -270,10 +302,10 @@ static inline struct drm_mm_node *drm_mm_search_free_in_range_color( unsigned long color, unsigned long start, unsigned long end, - bool best_match) + enum drm_mm_search_flags flags) { return drm_mm_search_free_in_range_generic(mm, size, alignment, color, - start, end, best_match); + start, end, flags); } extern int drm_mm_init(struct drm_mm *mm, unsigned long start,