mbox series

[0/2] some cleanups related to RB_DECLARE_CALLBACKS_MAX

Message ID 20190811184613.20463-1-urezki@gmail.com (mailing list archive)
Headers show
Series some cleanups related to RB_DECLARE_CALLBACKS_MAX | expand

Message

Uladzislau Rezki Aug. 11, 2019, 6:46 p.m. UTC
Recently we have got RB_DECLARE_CALLBACKS_MAX template that is supposed
to be used in case of having an augmented value as scalar value. First
patch just simplifies the *_compute_max() callback by using max3()
macro that makes the code more transparent, i think. No functional changes.

Second patch reuses RB_DECLARE_CALLBACKS_MAX template's internal functionality,
that is generated to manage augment red-black tree instead of using our own and
the same logic in vmalloc. Just get rid of duplication. No functional changes.

Also i have open question related to validating of the augment tree, i mean
in case of debugging to check that nodes are maintained correctly. Please
have a look here: https://lkml.org/lkml/2019/7/29/304

Basically we can add one more function under RB_DECLARE_CALLBACKS_MAX template
making it public that checks a tree and its augmented nodes. At least i see
two users where it can be used: vmalloc and lib/rbtree_test.c.

Appreciate for any comments.

Uladzislau Rezki (Sony) (2):
  augmented rbtree: use max3() in the *_compute_max() function
  mm/vmalloc: use generated callback to populate subtree_max_size

 include/linux/rbtree_augmented.h       | 40 +++++++++++++++++-----------------
 mm/vmalloc.c                           | 31 +-------------------------
 tools/include/linux/rbtree_augmented.h | 40 +++++++++++++++++-----------------
 3 files changed, 41 insertions(+), 70 deletions(-)

Comments

Michel Lespinasse Aug. 12, 2019, 7:12 a.m. UTC | #1
On Sun, Aug 11, 2019 at 11:46 AM Uladzislau Rezki (Sony) <urezki@gmail.com> wrote:
> Also i have open question related to validating of the augment tree, i mean
> in case of debugging to check that nodes are maintained correctly. Please
> have a look here: https://lkml.org/lkml/2019/7/29/304
>
> Basically we can add one more function under RB_DECLARE_CALLBACKS_MAX template
> making it public that checks a tree and its augmented nodes. At least i see
> two users where it can be used: vmalloc and lib/rbtree_test.c.

I think it would be sufficient to call RBCOMPUTE(node, true) on every
node and check the return value ?

Something like the following (probably applicable in other files too):

---------------------------------- 8< ------------------------------------

augmented rbtree: use generated compute_max function for debug checks

In debug code, use the generated compute_max function instead of
reimplementing similar functionality in multiple places.

Signed-off-by: Michel Lespinasse <walken@google.com>
---
 lib/rbtree_test.c | 15 +-------------
 mm/mmap.c         | 26 +++--------------------
 mm/vmalloc.c      | 53 +++++++----------------------------------------
 3 files changed, 12 insertions(+), 82 deletions(-)

diff --git a/lib/rbtree_test.c b/lib/rbtree_test.c
index 41ae3c7570d3..a5a04e820f77 100644
--- a/lib/rbtree_test.c
+++ b/lib/rbtree_test.c
@@ -222,20 +222,7 @@ static void check_augmented(int nr_nodes)
 	check(nr_nodes);
 	for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
 		struct test_node *node = rb_entry(rb, struct test_node, rb);
-		u32 subtree, max = node->val;
-		if (node->rb.rb_left) {
-			subtree = rb_entry(node->rb.rb_left, struct test_node,
-					   rb)->augmented;
-			if (max < subtree)
-				max = subtree;
-		}
-		if (node->rb.rb_right) {
-			subtree = rb_entry(node->rb.rb_right, struct test_node,
-					   rb)->augmented;
-			if (max < subtree)
-				max = subtree;
-		}
-		WARN_ON_ONCE(node->augmented != max);
+		WARN_ON_ONCE(!augment_callbacks_compute_max(node, true));
 	}
 }
 
diff --git a/mm/mmap.c b/mm/mmap.c
index 24f0772d6afd..d6d23e6c2d10 100644
--- a/mm/mmap.c
+++ b/mm/mmap.c
@@ -311,24 +311,6 @@ static inline unsigned long vma_compute_gap(struct vm_area_struct *vma)
 }
 
 #ifdef CONFIG_DEBUG_VM_RB
-static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma)
-{
-	unsigned long max = vma_compute_gap(vma), subtree_gap;
-	if (vma->vm_rb.rb_left) {
-		subtree_gap = rb_entry(vma->vm_rb.rb_left,
-				struct vm_area_struct, vm_rb)->rb_subtree_gap;
-		if (subtree_gap > max)
-			max = subtree_gap;
-	}
-	if (vma->vm_rb.rb_right) {
-		subtree_gap = rb_entry(vma->vm_rb.rb_right,
-				struct vm_area_struct, vm_rb)->rb_subtree_gap;
-		if (subtree_gap > max)
-			max = subtree_gap;
-	}
-	return max;
-}
-
 static int browse_rb(struct mm_struct *mm)
 {
 	struct rb_root *root = &mm->mm_rb;
@@ -355,10 +337,8 @@ static int browse_rb(struct mm_struct *mm)
 			bug = 1;
 		}
 		spin_lock(&mm->page_table_lock);
-		if (vma->rb_subtree_gap != vma_compute_subtree_gap(vma)) {
-			pr_emerg("free gap %lx, correct %lx\n",
-			       vma->rb_subtree_gap,
-			       vma_compute_subtree_gap(vma));
+		if (!vma_gap_callbacks_compute_max(vma, true)) {
+			pr_emerg("wrong subtree gap in vma %p\n", vma);
 			bug = 1;
 		}
 		spin_unlock(&mm->page_table_lock);
@@ -385,7 +365,7 @@ static void validate_mm_rb(struct rb_root *root, struct vm_area_struct *ignore)
 		struct vm_area_struct *vma;
 		vma = rb_entry(nd, struct vm_area_struct, vm_rb);
 		VM_BUG_ON_VMA(vma != ignore &&
-			vma->rb_subtree_gap != vma_compute_subtree_gap(vma),
+			!vma_gap_callbacks_compute_max(vma, true),
 			vma);
 	}
 }
diff --git a/mm/vmalloc.c b/mm/vmalloc.c
index f7c61accb0e2..ea23ccaf70fc 100644
--- a/mm/vmalloc.c
+++ b/mm/vmalloc.c
@@ -553,48 +553,6 @@ unlink_va(struct vmap_area *va, struct rb_root *root)
 	RB_CLEAR_NODE(&va->rb_node);
 }
 
-#if DEBUG_AUGMENT_PROPAGATE_CHECK
-static void
-augment_tree_propagate_check(struct rb_node *n)
-{
-	struct vmap_area *va;
-	struct rb_node *node;
-	unsigned long size;
-	bool found = false;
-
-	if (n == NULL)
-		return;
-
-	va = rb_entry(n, struct vmap_area, rb_node);
-	size = va->subtree_max_size;
-	node = n;
-
-	while (node) {
-		va = rb_entry(node, struct vmap_area, rb_node);
-
-		if (get_subtree_max_size(node->rb_left) == size) {
-			node = node->rb_left;
-		} else {
-			if (va_size(va) == size) {
-				found = true;
-				break;
-			}
-
-			node = node->rb_right;
-		}
-	}
-
-	if (!found) {
-		va = rb_entry(n, struct vmap_area, rb_node);
-		pr_emerg("tree is corrupted: %lu, %lu\n",
-			va_size(va), va->subtree_max_size);
-	}
-
-	augment_tree_propagate_check(n->rb_left);
-	augment_tree_propagate_check(n->rb_right);
-}
-#endif
-
 /*
  * This function populates subtree_max_size from bottom to upper
  * levels starting from VA point. The propagation must be done
@@ -645,9 +603,14 @@ augment_tree_propagate_from(struct vmap_area *va)
 		node = rb_parent(&va->rb_node);
 	}
 
-#if DEBUG_AUGMENT_PROPAGATE_CHECK
-	augment_tree_propagate_check(free_vmap_area_root.rb_node);
-#endif
+	if (DEBUG_AUGMENT_PROPAGATE_CHECK) {
+		struct vmap_area *va;
+
+		list_for_each_entry(va, &free_vmap_area_list, list) {
+			WARN_ON(!free_vmap_area_rb_augment_cb_compute_max(
+					va, true));
+		}
+	}
 }
 
 static void
Uladzislau Rezki Aug. 13, 2019, 9:29 a.m. UTC | #2
> 
> I think it would be sufficient to call RBCOMPUTE(node, true) on every
> node and check the return value ?
>
Yes, that is enough for sure. The only way i was thinking about to make it
public, because checking the tree for MAX is generic for every users which
use RB_DECLARE_CALLBACKS_MAX template. Something like:

validate_rb_max_tree() {
    for (nd = rb_first(root); nd; nd = rb_next(nd)) {
	    fooo = rb_entry(nd, struct sometinhf, rb_field);
	    WARN_ON(!*_compute_max(foo, true);	
    }
}

and call this public function under debug code. But i do not have strong
opinion here and it is probably odd. Anyway i am fine with your change.

There is small comment below:

> 
> Something like the following (probably applicable in other files too):
> 
> ---------------------------------- 8< ------------------------------------
> 
> augmented rbtree: use generated compute_max function for debug checks
> 
> In debug code, use the generated compute_max function instead of
> reimplementing similar functionality in multiple places.
> 
> Signed-off-by: Michel Lespinasse <walken@google.com>
> ---
>  lib/rbtree_test.c | 15 +-------------
>  mm/mmap.c         | 26 +++--------------------
>  mm/vmalloc.c      | 53 +++++++----------------------------------------
>  3 files changed, 12 insertions(+), 82 deletions(-)
> 
> diff --git a/lib/rbtree_test.c b/lib/rbtree_test.c
> index 41ae3c7570d3..a5a04e820f77 100644
> --- a/lib/rbtree_test.c
> +++ b/lib/rbtree_test.c
> @@ -222,20 +222,7 @@ static void check_augmented(int nr_nodes)
>  	check(nr_nodes);
>  	for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
>  		struct test_node *node = rb_entry(rb, struct test_node, rb);
> -		u32 subtree, max = node->val;
> -		if (node->rb.rb_left) {
> -			subtree = rb_entry(node->rb.rb_left, struct test_node,
> -					   rb)->augmented;
> -			if (max < subtree)
> -				max = subtree;
> -		}
> -		if (node->rb.rb_right) {
> -			subtree = rb_entry(node->rb.rb_right, struct test_node,
> -					   rb)->augmented;
> -			if (max < subtree)
> -				max = subtree;
> -		}
> -		WARN_ON_ONCE(node->augmented != max);
> +		WARN_ON_ONCE(!augment_callbacks_compute_max(node, true));
>  	}
>  }
>  
> diff --git a/mm/mmap.c b/mm/mmap.c
> index 24f0772d6afd..d6d23e6c2d10 100644
> --- a/mm/mmap.c
> +++ b/mm/mmap.c
> @@ -311,24 +311,6 @@ static inline unsigned long vma_compute_gap(struct vm_area_struct *vma)
>  }
>  
>  #ifdef CONFIG_DEBUG_VM_RB
> -static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma)
> -{
> -	unsigned long max = vma_compute_gap(vma), subtree_gap;
> -	if (vma->vm_rb.rb_left) {
> -		subtree_gap = rb_entry(vma->vm_rb.rb_left,
> -				struct vm_area_struct, vm_rb)->rb_subtree_gap;
> -		if (subtree_gap > max)
> -			max = subtree_gap;
> -	}
> -	if (vma->vm_rb.rb_right) {
> -		subtree_gap = rb_entry(vma->vm_rb.rb_right,
> -				struct vm_area_struct, vm_rb)->rb_subtree_gap;
> -		if (subtree_gap > max)
> -			max = subtree_gap;
> -	}
> -	return max;
> -}
> -
>  static int browse_rb(struct mm_struct *mm)
>  {
>  	struct rb_root *root = &mm->mm_rb;
> @@ -355,10 +337,8 @@ static int browse_rb(struct mm_struct *mm)
>  			bug = 1;
>  		}
>  		spin_lock(&mm->page_table_lock);
> -		if (vma->rb_subtree_gap != vma_compute_subtree_gap(vma)) {
> -			pr_emerg("free gap %lx, correct %lx\n",
> -			       vma->rb_subtree_gap,
> -			       vma_compute_subtree_gap(vma));
> +		if (!vma_gap_callbacks_compute_max(vma, true)) {
> +			pr_emerg("wrong subtree gap in vma %p\n", vma);
>  			bug = 1;
>  		}
>  		spin_unlock(&mm->page_table_lock);
> @@ -385,7 +365,7 @@ static void validate_mm_rb(struct rb_root *root, struct vm_area_struct *ignore)
>  		struct vm_area_struct *vma;
>  		vma = rb_entry(nd, struct vm_area_struct, vm_rb);
>  		VM_BUG_ON_VMA(vma != ignore &&
> -			vma->rb_subtree_gap != vma_compute_subtree_gap(vma),
> +			!vma_gap_callbacks_compute_max(vma, true),
>  			vma);
>  	}
>  }
> diff --git a/mm/vmalloc.c b/mm/vmalloc.c
> index f7c61accb0e2..ea23ccaf70fc 100644
> --- a/mm/vmalloc.c
> +++ b/mm/vmalloc.c
> @@ -553,48 +553,6 @@ unlink_va(struct vmap_area *va, struct rb_root *root)
>  	RB_CLEAR_NODE(&va->rb_node);
>  }
>  
> -#if DEBUG_AUGMENT_PROPAGATE_CHECK
> -static void
> -augment_tree_propagate_check(struct rb_node *n)
> -{
> -	struct vmap_area *va;
> -	struct rb_node *node;
> -	unsigned long size;
> -	bool found = false;
> -
> -	if (n == NULL)
> -		return;
> -
> -	va = rb_entry(n, struct vmap_area, rb_node);
> -	size = va->subtree_max_size;
> -	node = n;
> -
> -	while (node) {
> -		va = rb_entry(node, struct vmap_area, rb_node);
> -
> -		if (get_subtree_max_size(node->rb_left) == size) {
> -			node = node->rb_left;
> -		} else {
> -			if (va_size(va) == size) {
> -				found = true;
> -				break;
> -			}
> -
> -			node = node->rb_right;
> -		}
> -	}
> -
> -	if (!found) {
> -		va = rb_entry(n, struct vmap_area, rb_node);
> -		pr_emerg("tree is corrupted: %lu, %lu\n",
> -			va_size(va), va->subtree_max_size);
> -	}
> -
> -	augment_tree_propagate_check(n->rb_left);
> -	augment_tree_propagate_check(n->rb_right);
> -}
> -#endif
> -
>  /*
>   * This function populates subtree_max_size from bottom to upper
>   * levels starting from VA point. The propagation must be done
> @@ -645,9 +603,14 @@ augment_tree_propagate_from(struct vmap_area *va)
>  		node = rb_parent(&va->rb_node);
>  	}
>  
> -#if DEBUG_AUGMENT_PROPAGATE_CHECK
> -	augment_tree_propagate_check(free_vmap_area_root.rb_node);
> -#endif
> +	if (DEBUG_AUGMENT_PROPAGATE_CHECK) {
> +		struct vmap_area *va;
> +
> +		list_for_each_entry(va, &free_vmap_area_list, list) {
> +			WARN_ON(!free_vmap_area_rb_augment_cb_compute_max(
> +					va, true));
> +		}
> +	}
>  }
>
The object of validating is the tree, therefore it makes sense to go with it,
instead of iterating over the list.

Thank you!

--
Vlad Rezki