Commit 3089c6f2 authored by Ben Widawsky's avatar Ben Widawsky Committed by Daniel Vetter

drm/i915: make caching operate on all address spaces

For now, objects will maintain the same cache levels amongst all address
spaces. This is to limit the risk of bugs, as playing with cacheability
in the different domains can be very error prone.

In the future, it may be optimal to allow setting domains per VMA (ie.
an object bound into an address space).
Signed-off-by: default avatarBen Widawsky <ben@bwidawsk.net>
Signed-off-by: default avatarDaniel Vetter <daniel.vetter@ffwll.ch>
parent 28d6a7bf
...@@ -3319,7 +3319,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3319,7 +3319,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
{ {
struct drm_device *dev = obj->base.dev; struct drm_device *dev = obj->base.dev;
drm_i915_private_t *dev_priv = dev->dev_private; drm_i915_private_t *dev_priv = dev->dev_private;
struct i915_vma *vma = i915_gem_obj_to_vma(obj, &dev_priv->gtt.base); struct i915_vma *vma;
int ret; int ret;
if (obj->cache_level == cache_level) if (obj->cache_level == cache_level)
...@@ -3330,13 +3330,17 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3330,13 +3330,17 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
return -EBUSY; return -EBUSY;
} }
if (vma && !i915_gem_valid_gtt_space(dev, &vma->node, cache_level)) { list_for_each_entry(vma, &obj->vma_list, vma_link) {
if (!i915_gem_valid_gtt_space(dev, &vma->node, cache_level)) {
ret = i915_gem_object_unbind(obj); ret = i915_gem_object_unbind(obj);
if (ret) if (ret)
return ret; return ret;
break;
}
} }
if (i915_gem_obj_ggtt_bound(obj)) { if (i915_gem_obj_bound_any(obj)) {
ret = i915_gem_object_finish_gpu(obj); ret = i915_gem_object_finish_gpu(obj);
if (ret) if (ret)
return ret; return ret;
...@@ -3358,8 +3362,6 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3358,8 +3362,6 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
if (obj->has_aliasing_ppgtt_mapping) if (obj->has_aliasing_ppgtt_mapping)
i915_ppgtt_bind_object(dev_priv->mm.aliasing_ppgtt, i915_ppgtt_bind_object(dev_priv->mm.aliasing_ppgtt,
obj, cache_level); obj, cache_level);
i915_gem_obj_to_vma(obj, &dev_priv->gtt.base)->node.color = cache_level;
} }
if (cache_level == I915_CACHE_NONE) { if (cache_level == I915_CACHE_NONE) {
...@@ -3385,6 +3387,8 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3385,6 +3387,8 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
old_write_domain); old_write_domain);
} }
list_for_each_entry(vma, &obj->vma_list, vma_link)
vma->node.color = cache_level;
obj->cache_level = cache_level; obj->cache_level = cache_level;
i915_gem_verify_gtt(dev); i915_gem_verify_gtt(dev);
return 0; return 0;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment