Home
last modified time | relevance | path

Searched refs:cache_ent (Results 1 – 5 of 5) sorted by relevance

/drivers/gpu/drm/virtio/
Dvirtgpu_vq.c698 struct virtio_gpu_drv_cap_cache *cache_ent; in virtio_gpu_cmd_capset_cb() local
701 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
702 if (cache_ent->version == le32_to_cpu(cmd->capset_version) && in virtio_gpu_cmd_capset_cb()
703 cache_ent->id == le32_to_cpu(cmd->capset_id)) { in virtio_gpu_cmd_capset_cb()
704 memcpy(cache_ent->caps_cache, resp->capset_data, in virtio_gpu_cmd_capset_cb()
705 cache_ent->size); in virtio_gpu_cmd_capset_cb()
708 atomic_set(&cache_ent->is_valid, 1); in virtio_gpu_cmd_capset_cb()
808 struct virtio_gpu_drv_cap_cache *cache_ent; in virtio_gpu_cmd_get_capset() local
820 cache_ent = kzalloc(sizeof(*cache_ent), GFP_KERNEL); in virtio_gpu_cmd_get_capset()
821 if (!cache_ent) in virtio_gpu_cmd_get_capset()
[all …]
Dvirtgpu_kms.c266 struct virtio_gpu_drv_cap_cache *cache_ent, *tmp; in virtio_gpu_cleanup_cap_cache() local
268 list_for_each_entry_safe(cache_ent, tmp, &vgdev->cap_cache, head) { in virtio_gpu_cleanup_cap_cache()
269 kfree(cache_ent->caps_cache); in virtio_gpu_cleanup_cap_cache()
270 kfree(cache_ent); in virtio_gpu_cleanup_cap_cache()
Dvirtgpu_ioctl.c555 struct virtio_gpu_drv_cap_cache *cache_ent; in virtio_gpu_get_caps_ioctl() local
584 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_get_caps_ioctl()
585 if (cache_ent->id == args->cap_set_id && in virtio_gpu_get_caps_ioctl()
586 cache_ent->version == args->cap_set_ver) { in virtio_gpu_get_caps_ioctl()
595 &cache_ent); in virtio_gpu_get_caps_ioctl()
602 atomic_read(&cache_ent->is_valid), 5 * HZ); in virtio_gpu_get_caps_ioctl()
609 ptr = cache_ent->caps_cache; in virtio_gpu_get_caps_ioctl()
/drivers/infiniband/hw/mlx5/
Dmr.c674 mr->mmkey.cache_ent = ent; in mlx5_mr_cache_alloc()
1366 if (!mr->mmkey.cache_ent) in can_use_umr_rereg_pas()
1375 return (1ULL << mr->mmkey.cache_ent->order) >= in can_use_umr_rereg_pas()
1615 if (mr->mmkey.cache_ent) { in mlx5_ib_dereg_mr()
1616 xa_lock_irq(&mr->mmkey.cache_ent->mkeys); in mlx5_ib_dereg_mr()
1617 mr->mmkey.cache_ent->in_use--; in mlx5_ib_dereg_mr()
1618 xa_unlock_irq(&mr->mmkey.cache_ent->mkeys); in mlx5_ib_dereg_mr()
1621 push_mkey(mr->mmkey.cache_ent, false, in mlx5_ib_dereg_mr()
1623 mr->mmkey.cache_ent = NULL; in mlx5_ib_dereg_mr()
1625 if (!mr->mmkey.cache_ent) { in mlx5_ib_dereg_mr()
[all …]
Dmlx5_ib.h633 struct mlx5_cache_ent *cache_ent; member