Commit 6846895f authored by Chris Wilson's avatar Chris Wilson

drm/i915: Replace PIN_NONFAULT with calls to PIN_NOEVICT

When under severe stress for GTT mappable space, the LRU eviction model
falls off a cliff. We spend all our time scanning the much larger
non-mappable area searching for something within the mappable zone we can
evict. Turn this on its head by only using the full vma for the object if
it is already pinned in the mappable zone or there is sufficient *free*
space to accommodate it (prioritizing speedy reuse). If there is not,
immediately fall back to using small chunks (tilerow for GTT mmap, single
pages for pwrite/relocation) and using random eviction before doing a full
search.

Testcase: igt/gem_concurrent_blt
References: https://bugs.freedesktop.org/show_bug.cgi?id=110848Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarMatthew Auld <matthew.auld@intel.com>
Link: https://patchwork.freedesktop.org/patch/msgid/20190821123234.19194-1-chris@chris-wilson.co.uk
parent 78387745
...@@ -1035,8 +1035,8 @@ static void *reloc_iomap(struct drm_i915_gem_object *obj, ...@@ -1035,8 +1035,8 @@ static void *reloc_iomap(struct drm_i915_gem_object *obj,
vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0,
PIN_MAPPABLE | PIN_MAPPABLE |
PIN_NONBLOCK | PIN_NONBLOCK /* NOWARN */ |
PIN_NONFAULT); PIN_NOEVICT);
if (IS_ERR(vma)) { if (IS_ERR(vma)) {
memset(&cache->node, 0, sizeof(cache->node)); memset(&cache->node, 0, sizeof(cache->node));
err = drm_mm_insert_node_in_range err = drm_mm_insert_node_in_range
......
...@@ -264,15 +264,15 @@ vm_fault_t i915_gem_fault(struct vm_fault *vmf) ...@@ -264,15 +264,15 @@ vm_fault_t i915_gem_fault(struct vm_fault *vmf)
/* Now pin it into the GTT as needed */ /* Now pin it into the GTT as needed */
vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0,
PIN_MAPPABLE | PIN_MAPPABLE |
PIN_NONBLOCK | PIN_NONBLOCK /* NOWARN */ |
PIN_NONFAULT); PIN_NOSEARCH);
if (IS_ERR(vma)) { if (IS_ERR(vma)) {
/* Use a partial view if it is bigger than available space */ /* Use a partial view if it is bigger than available space */
struct i915_ggtt_view view = struct i915_ggtt_view view =
compute_partial_view(obj, page_offset, MIN_CHUNK_PAGES); compute_partial_view(obj, page_offset, MIN_CHUNK_PAGES);
unsigned int flags; unsigned int flags;
flags = PIN_MAPPABLE; flags = PIN_MAPPABLE | PIN_NOSEARCH;
if (view.type == I915_GGTT_VIEW_NORMAL) if (view.type == I915_GGTT_VIEW_NORMAL)
flags |= PIN_NONBLOCK; /* avoid warnings for pinned */ flags |= PIN_NONBLOCK; /* avoid warnings for pinned */
...@@ -282,7 +282,7 @@ vm_fault_t i915_gem_fault(struct vm_fault *vmf) ...@@ -282,7 +282,7 @@ vm_fault_t i915_gem_fault(struct vm_fault *vmf)
*/ */
vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags); vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags);
if (IS_ERR(vma) && !view.type) { if (IS_ERR(vma)) {
flags = PIN_MAPPABLE; flags = PIN_MAPPABLE;
view.type = I915_GGTT_VIEW_PARTIAL; view.type = I915_GGTT_VIEW_PARTIAL;
vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags); vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, flags);
......
...@@ -345,8 +345,8 @@ i915_gem_gtt_pread(struct drm_i915_gem_object *obj, ...@@ -345,8 +345,8 @@ i915_gem_gtt_pread(struct drm_i915_gem_object *obj,
wakeref = intel_runtime_pm_get(&i915->runtime_pm); wakeref = intel_runtime_pm_get(&i915->runtime_pm);
vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0,
PIN_MAPPABLE | PIN_MAPPABLE |
PIN_NONFAULT | PIN_NONBLOCK /* NOWARN */ |
PIN_NONBLOCK); PIN_NOEVICT);
if (!IS_ERR(vma)) { if (!IS_ERR(vma)) {
node.start = i915_ggtt_offset(vma); node.start = i915_ggtt_offset(vma);
node.allocated = false; node.allocated = false;
...@@ -559,8 +559,8 @@ i915_gem_gtt_pwrite_fast(struct drm_i915_gem_object *obj, ...@@ -559,8 +559,8 @@ i915_gem_gtt_pwrite_fast(struct drm_i915_gem_object *obj,
vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0,
PIN_MAPPABLE | PIN_MAPPABLE |
PIN_NONFAULT | PIN_NONBLOCK /* NOWARN */ |
PIN_NONBLOCK); PIN_NOEVICT);
if (!IS_ERR(vma)) { if (!IS_ERR(vma)) {
node.start = i915_ggtt_offset(vma); node.start = i915_ggtt_offset(vma);
node.allocated = false; node.allocated = false;
......
...@@ -61,9 +61,6 @@ mark_free(struct drm_mm_scan *scan, ...@@ -61,9 +61,6 @@ mark_free(struct drm_mm_scan *scan,
if (i915_vma_is_pinned(vma)) if (i915_vma_is_pinned(vma))
return false; return false;
if (flags & PIN_NONFAULT && i915_vma_has_userfault(vma))
return false;
list_add(&vma->evict_link, unwind); list_add(&vma->evict_link, unwind);
return drm_mm_scan_add_block(scan, &vma->node); return drm_mm_scan_add_block(scan, &vma->node);
} }
...@@ -330,11 +327,6 @@ int i915_gem_evict_for_node(struct i915_address_space *vm, ...@@ -330,11 +327,6 @@ int i915_gem_evict_for_node(struct i915_address_space *vm,
break; break;
} }
if (flags & PIN_NONFAULT && i915_vma_has_userfault(vma)) {
ret = -ENOSPC;
break;
}
/* Overlap of objects in the same batch? */ /* Overlap of objects in the same batch? */
if (i915_vma_is_pinned(vma)) { if (i915_vma_is_pinned(vma)) {
ret = -ENOSPC; ret = -ENOSPC;
......
...@@ -3769,7 +3769,8 @@ int i915_gem_gtt_insert(struct i915_address_space *vm, ...@@ -3769,7 +3769,8 @@ int i915_gem_gtt_insert(struct i915_address_space *vm,
if (flags & PIN_NOEVICT) if (flags & PIN_NOEVICT)
return -ENOSPC; return -ENOSPC;
/* No free space, pick a slot at random. /*
* No free space, pick a slot at random.
* *
* There is a pathological case here using a GTT shared between * There is a pathological case here using a GTT shared between
* mmap and GPU (i.e. ggtt/aliasing_ppgtt but not full-ppgtt): * mmap and GPU (i.e. ggtt/aliasing_ppgtt but not full-ppgtt):
...@@ -3797,6 +3798,9 @@ int i915_gem_gtt_insert(struct i915_address_space *vm, ...@@ -3797,6 +3798,9 @@ int i915_gem_gtt_insert(struct i915_address_space *vm,
if (err != -ENOSPC) if (err != -ENOSPC)
return err; return err;
if (flags & PIN_NOSEARCH)
return -ENOSPC;
/* Randomly selected placement is pinned, do a search */ /* Randomly selected placement is pinned, do a search */
err = i915_gem_evict_something(vm, size, alignment, color, err = i915_gem_evict_something(vm, size, alignment, color,
start, end, flags); start, end, flags);
......
...@@ -600,9 +600,9 @@ int i915_gem_gtt_insert(struct i915_address_space *vm, ...@@ -600,9 +600,9 @@ int i915_gem_gtt_insert(struct i915_address_space *vm,
u64 start, u64 end, unsigned int flags); u64 start, u64 end, unsigned int flags);
/* Flags used by pin/bind&friends. */ /* Flags used by pin/bind&friends. */
#define PIN_NONBLOCK BIT_ULL(0) #define PIN_NOEVICT BIT_ULL(0)
#define PIN_NONFAULT BIT_ULL(1) #define PIN_NOSEARCH BIT_ULL(1)
#define PIN_NOEVICT BIT_ULL(2) #define PIN_NONBLOCK BIT_ULL(2)
#define PIN_MAPPABLE BIT_ULL(3) #define PIN_MAPPABLE BIT_ULL(3)
#define PIN_ZONE_4G BIT_ULL(4) #define PIN_ZONE_4G BIT_ULL(4)
#define PIN_HIGH BIT_ULL(5) #define PIN_HIGH BIT_ULL(5)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment