Commit f7ed28e1 authored by Gerd Hoffmann's avatar Gerd Hoffmann

drm/qxl: rename qxl_bo_kmap -> qxl_bo_vmap_locked

Append _locked to Make clear that these functions should be called with
reserved bo's only.  While being at it also rename kmap -> vmap.

No functional change.
Signed-off-by: default avatarGerd Hoffmann <kraxel@redhat.com>
Acked-by: default avatarThomas Zimmermann <tzimmermann@suse.de>
Link: http://patchwork.freedesktop.org/patch/msgid/20210217123213.2199186-6-kraxel@redhat.com
parent 19089b76
......@@ -600,7 +600,7 @@ static void qxl_cursor_atomic_update(struct drm_plane *plane,
user_bo = gem_to_qxl_bo(obj);
/* pinning is done in the prepare/cleanup framevbuffer */
ret = qxl_bo_kmap(user_bo, &user_map);
ret = qxl_bo_vmap_locked(user_bo, &user_map);
if (ret)
goto out_free_release;
user_ptr = user_map.vaddr; /* TODO: Use mapping abstraction properly */
......@@ -619,7 +619,7 @@ static void qxl_cursor_atomic_update(struct drm_plane *plane,
if (ret)
goto out_unpin;
ret = qxl_bo_kmap(cursor_bo, &cursor_map);
ret = qxl_bo_vmap_locked(cursor_bo, &cursor_map);
if (ret)
goto out_backoff;
if (cursor_map.is_iomem) /* TODO: Use mapping abstraction properly */
......@@ -638,8 +638,8 @@ static void qxl_cursor_atomic_update(struct drm_plane *plane,
cursor->chunk.prev_chunk = 0;
cursor->chunk.data_size = size;
memcpy(cursor->chunk.data, user_ptr, size);
qxl_bo_kunmap(cursor_bo);
qxl_bo_kunmap(user_bo);
qxl_bo_vunmap_locked(cursor_bo);
qxl_bo_vunmap_locked(user_bo);
cmd = (struct qxl_cursor_cmd *) qxl_release_map(qdev, release);
cmd->u.set.visible = 1;
......@@ -681,7 +681,7 @@ static void qxl_cursor_atomic_update(struct drm_plane *plane,
out_free_bo:
qxl_bo_unref(&cursor_bo);
out_kunmap:
qxl_bo_kunmap(user_bo);
qxl_bo_vunmap_locked(user_bo);
out_free_release:
qxl_release_free(qdev, release);
return;
......@@ -1163,7 +1163,7 @@ int qxl_create_monitors_object(struct qxl_device *qdev)
if (ret)
return ret;
qxl_bo_kmap(qdev->monitors_config_bo, &map);
qxl_bo_vmap_locked(qdev->monitors_config_bo, &map);
qdev->monitors_config = qdev->monitors_config_bo->kptr;
qdev->ram_header->monitors_config =
......@@ -1189,7 +1189,7 @@ int qxl_destroy_monitors_object(struct qxl_device *qdev)
qdev->monitors_config = NULL;
qdev->ram_header->monitors_config = 0;
qxl_bo_kunmap(qdev->monitors_config_bo);
qxl_bo_vunmap_locked(qdev->monitors_config_bo);
ret = qxl_bo_unpin(qdev->monitors_config_bo);
if (ret)
return ret;
......
......@@ -48,7 +48,7 @@ static struct qxl_rect *drawable_set_clipping(struct qxl_device *qdev,
struct qxl_clip_rects *dev_clips;
int ret;
ret = qxl_bo_kmap(clips_bo, &map);
ret = qxl_bo_vmap_locked(clips_bo, &map);
if (ret)
return NULL;
dev_clips = map.vaddr; /* TODO: Use mapping abstraction properly */
......@@ -202,7 +202,7 @@ void qxl_draw_dirty_fb(struct qxl_device *qdev,
if (ret)
goto out_release_backoff;
ret = qxl_bo_kmap(bo, &surface_map);
ret = qxl_bo_vmap_locked(bo, &surface_map);
if (ret)
goto out_release_backoff;
surface_base = surface_map.vaddr; /* TODO: Use mapping abstraction properly */
......@@ -210,7 +210,7 @@ void qxl_draw_dirty_fb(struct qxl_device *qdev,
ret = qxl_image_init(qdev, release, dimage, surface_base,
left - dumb_shadow_offset,
top, width, height, depth, stride);
qxl_bo_kunmap(bo);
qxl_bo_vunmap_locked(bo);
if (ret)
goto out_release_backoff;
......@@ -247,7 +247,7 @@ void qxl_draw_dirty_fb(struct qxl_device *qdev,
rects[i].top = clips_ptr->y1;
rects[i].bottom = clips_ptr->y2;
}
qxl_bo_kunmap(clips_bo);
qxl_bo_vunmap_locked(clips_bo);
qxl_release_fence_buffer_objects(release);
qxl_push_command_ring_release(qdev, release, QXL_CMD_DRAW, false);
......
......@@ -186,7 +186,7 @@ qxl_image_init_helper(struct qxl_device *qdev,
}
}
}
qxl_bo_kunmap(chunk_bo);
qxl_bo_vunmap_locked(chunk_bo);
image_bo = dimage->bo;
ptr = qxl_bo_kmap_atomic_page(qdev, image_bo, 0);
......
......@@ -155,7 +155,7 @@ int qxl_bo_create(struct qxl_device *qdev, unsigned long size,
return 0;
}
int qxl_bo_kmap(struct qxl_bo *bo, struct dma_buf_map *map)
int qxl_bo_vmap_locked(struct qxl_bo *bo, struct dma_buf_map *map)
{
int r;
......@@ -203,7 +203,7 @@ void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev,
return rptr;
}
ret = qxl_bo_kmap(bo, &bo_map);
ret = qxl_bo_vmap_locked(bo, &bo_map);
if (ret)
return NULL;
rptr = bo_map.vaddr; /* TODO: Use mapping abstraction properly */
......@@ -212,7 +212,7 @@ void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev,
return rptr;
}
void qxl_bo_kunmap(struct qxl_bo *bo)
void qxl_bo_vunmap_locked(struct qxl_bo *bo)
{
if (bo->kptr == NULL)
return;
......@@ -233,7 +233,7 @@ void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev,
io_mapping_unmap_atomic(pmap);
return;
fallback:
qxl_bo_kunmap(bo);
qxl_bo_vunmap_locked(bo);
}
void qxl_bo_unref(struct qxl_bo **bo)
......
......@@ -64,8 +64,8 @@ extern int qxl_bo_create(struct qxl_device *qdev,
u32 priority,
struct qxl_surface *surf,
struct qxl_bo **bo_ptr);
extern int qxl_bo_kmap(struct qxl_bo *bo, struct dma_buf_map *map);
extern void qxl_bo_kunmap(struct qxl_bo *bo);
int qxl_bo_vmap_locked(struct qxl_bo *bo, struct dma_buf_map *map);
void qxl_bo_vunmap_locked(struct qxl_bo *bo);
void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset);
void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *map);
extern struct qxl_bo *qxl_bo_ref(struct qxl_bo *bo);
......
......@@ -59,7 +59,7 @@ int qxl_gem_prime_vmap(struct drm_gem_object *obj, struct dma_buf_map *map)
struct qxl_bo *bo = gem_to_qxl_bo(obj);
int ret;
ret = qxl_bo_kmap(bo, map);
ret = qxl_bo_vmap_locked(bo, map);
if (ret < 0)
return ret;
......@@ -71,7 +71,7 @@ void qxl_gem_prime_vunmap(struct drm_gem_object *obj,
{
struct qxl_bo *bo = gem_to_qxl_bo(obj);
qxl_bo_kunmap(bo);
qxl_bo_vunmap_locked(bo);
}
int qxl_gem_prime_mmap(struct drm_gem_object *obj,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment