Commit 9333fe98 authored by Uladzislau Rezki's avatar Uladzislau Rezki Committed by Linus Torvalds

mm/vmalloc: add adjust_search_size parameter

Extend the find_vmap_lowest_match() function with one more parameter.
It is "adjust_search_size" boolean variable, so it is possible to
control an accuracy of search block if a specific alignment is required.

With this patch, a search size is always adjusted, to serve a request as
fast as possible because of performance reason.

But there is one exception though, it is short ranges where requested
size corresponds to passed vstart/vend restriction together with a
specific alignment request.  In such scenario an adjustment wold not
lead to success allocation.

Link: https://lkml.kernel.org/r/20220119143540.601149-2-urezki@gmail.comSigned-off-by: default avatarUladzislau Rezki <uladzislau.rezki@sony.com>
Signed-off-by: default avatarUladzislau Rezki (Sony) <urezki@gmail.com>
Cc: Christoph Hellwig <hch@lst.de>
Cc: Matthew Wilcox <willy@infradead.org>
Cc: Nicholas Piggin <npiggin@gmail.com>
Cc: Oleksiy Avramchenko <oleksiy.avramchenko@sonymobile.com>
Cc: Vasily Averin <vvs@virtuozzo.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 690467c8
...@@ -1189,22 +1189,28 @@ is_within_this_va(struct vmap_area *va, unsigned long size, ...@@ -1189,22 +1189,28 @@ is_within_this_va(struct vmap_area *va, unsigned long size,
/* /*
* Find the first free block(lowest start address) in the tree, * Find the first free block(lowest start address) in the tree,
* that will accomplish the request corresponding to passing * that will accomplish the request corresponding to passing
* parameters. * parameters. Please note, with an alignment bigger than PAGE_SIZE,
* a search length is adjusted to account for worst case alignment
* overhead.
*/ */
static __always_inline struct vmap_area * static __always_inline struct vmap_area *
find_vmap_lowest_match(unsigned long size, find_vmap_lowest_match(unsigned long size, unsigned long align,
unsigned long align, unsigned long vstart) unsigned long vstart, bool adjust_search_size)
{ {
struct vmap_area *va; struct vmap_area *va;
struct rb_node *node; struct rb_node *node;
unsigned long length;
/* Start from the root. */ /* Start from the root. */
node = free_vmap_area_root.rb_node; node = free_vmap_area_root.rb_node;
/* Adjust the search size for alignment overhead. */
length = adjust_search_size ? size + align - 1 : size;
while (node) { while (node) {
va = rb_entry(node, struct vmap_area, rb_node); va = rb_entry(node, struct vmap_area, rb_node);
if (get_subtree_max_size(node->rb_left) >= size && if (get_subtree_max_size(node->rb_left) >= length &&
vstart < va->va_start) { vstart < va->va_start) {
node = node->rb_left; node = node->rb_left;
} else { } else {
...@@ -1214,9 +1220,9 @@ find_vmap_lowest_match(unsigned long size, ...@@ -1214,9 +1220,9 @@ find_vmap_lowest_match(unsigned long size,
/* /*
* Does not make sense to go deeper towards the right * Does not make sense to go deeper towards the right
* sub-tree if it does not have a free block that is * sub-tree if it does not have a free block that is
* equal or bigger to the requested search size. * equal or bigger to the requested search length.
*/ */
if (get_subtree_max_size(node->rb_right) >= size) { if (get_subtree_max_size(node->rb_right) >= length) {
node = node->rb_right; node = node->rb_right;
continue; continue;
} }
...@@ -1232,7 +1238,7 @@ find_vmap_lowest_match(unsigned long size, ...@@ -1232,7 +1238,7 @@ find_vmap_lowest_match(unsigned long size,
if (is_within_this_va(va, size, align, vstart)) if (is_within_this_va(va, size, align, vstart))
return va; return va;
if (get_subtree_max_size(node->rb_right) >= size && if (get_subtree_max_size(node->rb_right) >= length &&
vstart <= va->va_start) { vstart <= va->va_start) {
/* /*
* Shift the vstart forward. Please note, we update it with * Shift the vstart forward. Please note, we update it with
...@@ -1280,7 +1286,7 @@ find_vmap_lowest_match_check(unsigned long size, unsigned long align) ...@@ -1280,7 +1286,7 @@ find_vmap_lowest_match_check(unsigned long size, unsigned long align)
get_random_bytes(&rnd, sizeof(rnd)); get_random_bytes(&rnd, sizeof(rnd));
vstart = VMALLOC_START + rnd; vstart = VMALLOC_START + rnd;
va_1 = find_vmap_lowest_match(size, align, vstart); va_1 = find_vmap_lowest_match(size, align, vstart, false);
va_2 = find_vmap_lowest_linear_match(size, align, vstart); va_2 = find_vmap_lowest_linear_match(size, align, vstart);
if (va_1 != va_2) if (va_1 != va_2)
...@@ -1431,12 +1437,25 @@ static __always_inline unsigned long ...@@ -1431,12 +1437,25 @@ static __always_inline unsigned long
__alloc_vmap_area(unsigned long size, unsigned long align, __alloc_vmap_area(unsigned long size, unsigned long align,
unsigned long vstart, unsigned long vend) unsigned long vstart, unsigned long vend)
{ {
bool adjust_search_size = true;
unsigned long nva_start_addr; unsigned long nva_start_addr;
struct vmap_area *va; struct vmap_area *va;
enum fit_type type; enum fit_type type;
int ret; int ret;
va = find_vmap_lowest_match(size, align, vstart); /*
* Do not adjust when:
* a) align <= PAGE_SIZE, because it does not make any sense.
* All blocks(their start addresses) are at least PAGE_SIZE
* aligned anyway;
* b) a short range where a requested size corresponds to exactly
* specified [vstart:vend] interval and an alignment > PAGE_SIZE.
* With adjusted search length an allocation would not succeed.
*/
if (align <= PAGE_SIZE || (align > PAGE_SIZE && (vend - vstart) == size))
adjust_search_size = false;
va = find_vmap_lowest_match(size, align, vstart, adjust_search_size);
if (unlikely(!va)) if (unlikely(!va))
return vend; return vend;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment