Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 25 of 97) sorted by relevance

1234

/drivers/gpu/drm/radeon/
Dradeon_object.c42 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
49 void radeon_bo_clear_va(struct radeon_bo *bo) in radeon_bo_clear_va() argument
53 list_for_each_entry_safe(bo_va, tmp, &bo->va, bo_list) { in radeon_bo_clear_va()
55 radeon_vm_bo_rmv(bo->rdev, bo_va); in radeon_bo_clear_va()
61 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
63 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
64 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
65 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
66 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
67 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
[all …]
Dradeon_object.h55 int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr);
57 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
59 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
71 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument
73 return bo->tbo.offset; in radeon_bo_gpu_offset()
76 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument
78 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
81 static inline bool radeon_bo_is_reserved(struct radeon_bo *bo) in radeon_bo_is_reserved() argument
83 return ttm_bo_is_reserved(&bo->tbo); in radeon_bo_is_reserved()
86 static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo) in radeon_bo_ngpu_pages() argument
[all …]
Dradeon_prime.c33 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_get_sg_table() local
34 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
36 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
41 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vmap() local
44 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
45 &bo->dma_buf_vmap); in radeon_gem_prime_vmap()
49 return bo->dma_buf_vmap.virtual; in radeon_gem_prime_vmap()
54 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vunmap() local
56 ttm_bo_kunmap(&bo->dma_buf_vmap); in radeon_gem_prime_vunmap()
64 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local
[all …]
Dradeon_uvd.c333 static int radeon_uvd_cs_msg(struct radeon_cs_parser *p, struct radeon_bo *bo, in radeon_uvd_cs_msg() argument
346 r = radeon_bo_kmap(bo, &ptr); in radeon_uvd_cs_msg()
363 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
371 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
375 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
541 int ring, struct radeon_bo *bo, in radeon_uvd_send_msg() argument
551 tv.bo = &bo->tbo; in radeon_uvd_send_msg()
560 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_VRAM); in radeon_uvd_send_msg()
561 radeon_uvd_force_into_uvd_segment(bo); in radeon_uvd_send_msg()
563 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_uvd_send_msg()
[all …]
Dradeon_ttm.c174 static void radeon_evict_flags(struct ttm_buffer_object *bo, in radeon_evict_flags() argument
180 if (!radeon_ttm_bo_is_radeon_bo(bo)) { in radeon_evict_flags()
189 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags()
190 switch (bo->mem.mem_type) { in radeon_evict_flags()
204 static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp) in radeon_verify_access() argument
209 static void radeon_move_null(struct ttm_buffer_object *bo, in radeon_move_null() argument
212 struct ttm_mem_reg *old_mem = &bo->mem; in radeon_move_null()
219 static int radeon_move_blit(struct ttm_buffer_object *bo, in radeon_move_blit() argument
229 rdev = radeon_get_rdev(bo->bdev); in radeon_move_blit()
264 fence = bo->sync_obj; in radeon_move_blit()
[all …]
/drivers/gpu/drm/qxl/
Dqxl_object.c32 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy()
36 qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_ttm_bo_destroy()
38 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
39 qxl_fence_fini(&bo->fence); in qxl_ttm_bo_destroy()
41 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
43 drm_gem_object_release(&bo->gem_base); in qxl_ttm_bo_destroy()
44 kfree(bo); in qxl_ttm_bo_destroy()
47 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
49 if (bo->destroy == &qxl_ttm_bo_destroy) in qxl_ttm_bo_is_qxl_bo()
[all …]
Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo, bool no_wait) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, 0); in qxl_bo_reserve()
37 struct qxl_device *qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_bo_reserve()
38 dev_err(qdev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
45 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
50 static inline u64 qxl_bo_gpu_offset(struct qxl_bo *bo) in qxl_bo_gpu_offset() argument
52 return bo->tbo.offset; in qxl_bo_gpu_offset()
55 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size()
[all …]
Dqxl_release.c103 struct qxl_bo *bo) in qxl_release_add_res() argument
107 if (release->bos[i] == bo) in qxl_release_add_res()
114 release->bos[release->bo_count++] = qxl_bo_ref(bo); in qxl_release_add_res()
118 struct qxl_bo **bo) in qxl_release_bo_alloc() argument
122 bo); in qxl_release_bo_alloc()
154 struct qxl_bo *bo; in qxl_alloc_surface_release_reserved() local
159 bo = qxl_bo_ref(create_rel->bos[0]); in qxl_alloc_surface_release_reserved()
163 qxl_release_add_res(qdev, *release, bo); in qxl_alloc_surface_release_reserved()
176 qxl_bo_unref(&bo); in qxl_alloc_surface_release_reserved()
188 struct qxl_bo *bo; in qxl_alloc_release_reserved() local
[all …]
Dqxl_ttm.c111 struct ttm_buffer_object *bo; in qxl_ttm_fault() local
115 bo = (struct ttm_buffer_object *)vma->vm_private_data; in qxl_ttm_fault()
116 if (bo == NULL) in qxl_ttm_fault()
118 qdev = qxl_get_qdev(bo->bdev); in qxl_ttm_fault()
193 static void qxl_evict_flags(struct ttm_buffer_object *bo, in qxl_evict_flags() argument
199 if (!qxl_ttm_bo_is_qxl_bo(bo)) { in qxl_evict_flags()
208 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags()
213 static int qxl_verify_access(struct ttm_buffer_object *bo, struct file *filp) in qxl_verify_access() argument
339 static void qxl_move_null(struct ttm_buffer_object *bo, in qxl_move_null() argument
342 struct ttm_mem_reg *old_mem = &bo->mem; in qxl_move_null()
[all …]
/drivers/gpu/drm/ttm/
Dttm_bo.c48 static int ttm_bo_setup_vm(struct ttm_buffer_object *bo);
84 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
90 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug()
91 bo->mem.size >> 20); in ttm_bo_mem_space_debug()
99 ttm_mem_type_debug(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
137 struct ttm_buffer_object *bo = in ttm_bo_release_list() local
139 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_release_list()
140 size_t acc_size = bo->acc_size; in ttm_bo_release_list()
142 BUG_ON(atomic_read(&bo->list_kref.refcount)); in ttm_bo_release_list()
143 BUG_ON(atomic_read(&bo->kref.refcount)); in ttm_bo_release_list()
[all …]
Dttm_bo_vm.c49 struct ttm_buffer_object *bo; in ttm_bo_vm_lookup_rb() local
53 bo = rb_entry(cur, struct ttm_buffer_object, vm_rb); in ttm_bo_vm_lookup_rb()
54 cur_offset = bo->vm_node->start; in ttm_bo_vm_lookup_rb()
57 best_bo = bo; in ttm_bo_vm_lookup_rb()
76 struct ttm_buffer_object *bo = (struct ttm_buffer_object *) in ttm_bo_vm_fault() local
78 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_vm_fault()
89 &bdev->man[bo->mem.mem_type]; in ttm_bo_vm_fault()
97 ret = ttm_bo_reserve(bo, true, true, false, 0); in ttm_bo_vm_fault()
105 ret = bdev->driver->fault_reserve_notify(bo); in ttm_bo_vm_fault()
126 if (test_bit(TTM_BO_PRIV_FLAG_MOVING, &bo->priv_flags)) { in ttm_bo_vm_fault()
[all …]
Dttm_bo_util.c40 void ttm_bo_free_old_node(struct ttm_buffer_object *bo) in ttm_bo_free_old_node() argument
42 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_free_old_node()
45 int ttm_bo_move_ttm(struct ttm_buffer_object *bo, in ttm_bo_move_ttm() argument
49 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm()
50 struct ttm_mem_reg *old_mem = &bo->mem; in ttm_bo_move_ttm()
55 ttm_bo_free_old_node(bo); in ttm_bo_move_ttm()
102 struct ttm_buffer_object *bo; in ttm_mem_io_evict() local
107 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
110 list_del_init(&bo->io_reserve_lru); in ttm_mem_io_evict()
111 ttm_bo_unmap_virtual_locked(bo); in ttm_mem_io_evict()
[all …]
Dttm_execbuf_util.c40 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation_locked() local
45 ttm_bo_add_to_lru(bo); in ttm_eu_backoff_reservation_locked()
50 atomic_set(&bo->reserved, 0); in ttm_eu_backoff_reservation_locked()
51 wake_up_all(&bo->event_queue); in ttm_eu_backoff_reservation_locked()
60 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_del_from_lru_locked() local
65 entry->put_count = ttm_bo_del_from_lru(bo); in ttm_eu_del_from_lru_locked()
76 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_list_ref_sub() local
79 ttm_bo_list_ref_sub(bo, entry->put_count, true); in ttm_eu_list_ref_sub()
94 glob = entry->bo->glob; in ttm_eu_backoff_reservation()
130 glob = entry->bo->glob; in ttm_eu_reserve_buffers()
[all …]
/drivers/gpu/host1x/drm/
Dgem.c32 static inline struct tegra_bo *host1x_to_drm_bo(struct host1x_bo *bo) in host1x_to_drm_bo() argument
34 return container_of(bo, struct tegra_bo, base); in host1x_to_drm_bo()
37 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
39 struct tegra_bo *obj = host1x_to_drm_bo(bo); in tegra_bo_put()
47 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument
49 struct tegra_bo *obj = host1x_to_drm_bo(bo); in tegra_bo_pin()
54 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument
58 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
60 struct tegra_bo *obj = host1x_to_drm_bo(bo); in tegra_bo_mmap()
65 static void tegra_bo_munmap(struct host1x_bo *bo, void *addr) in tegra_bo_munmap() argument
[all …]
/drivers/gpu/host1x/
Dhost1x_bo.h25 struct host1x_bo *(*get)(struct host1x_bo *bo);
26 void (*put)(struct host1x_bo *bo);
27 dma_addr_t (*pin)(struct host1x_bo *bo, struct sg_table **sgt);
28 void (*unpin)(struct host1x_bo *bo, struct sg_table *sgt);
29 void *(*mmap)(struct host1x_bo *bo);
30 void (*munmap)(struct host1x_bo *bo, void *addr);
31 void *(*kmap)(struct host1x_bo *bo, unsigned int pagenum);
32 void (*kunmap)(struct host1x_bo *bo, unsigned int pagenum, void *addr);
39 static inline void host1x_bo_init(struct host1x_bo *bo, in host1x_bo_init() argument
42 bo->ops = ops; in host1x_bo_init()
[all …]
Djob.c97 void host1x_job_add_gather(struct host1x_job *job, struct host1x_bo *bo, in host1x_job_add_gather() argument
103 cur_gather->bo = bo; in host1x_job_add_gather()
151 if (patch != wait->bo) in do_waitchks()
154 trace_host1x_syncpt_wait_check(wait->bo, wait->offset, in do_waitchks()
167 wait->bo = NULL; in do_waitchks()
193 job->unpins[job->num_unpins].bo = reloc->target; in pin_job()
203 g->bo = host1x_bo_get(g->bo); in pin_job()
204 if (!g->bo) in pin_job()
207 phys_addr = host1x_bo_pin(g->bo, &sgt); in pin_job()
212 job->unpins[job->num_unpins].bo = g->bo; in pin_job()
[all …]
/drivers/gpu/drm/ast/
Dast_ttm.c97 struct ast_bo *bo; in ast_bo_ttm_destroy() local
99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in ast_bo_ttm_destroy()
102 kfree(bo); in ast_bo_ttm_destroy()
105 bool ast_ttm_bo_is_ast_bo(struct ttm_buffer_object *bo) in ast_ttm_bo_is_ast_bo() argument
107 if (bo->destroy == &ast_bo_ttm_destroy) in ast_ttm_bo_is_ast_bo()
138 ast_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in ast_bo_evict_flags() argument
140 struct ast_bo *astbo = ast_bo(bo); in ast_bo_evict_flags()
142 if (!ast_ttm_bo_is_ast_bo(bo)) in ast_bo_evict_flags()
149 static int ast_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in ast_bo_verify_access() argument
[all …]
/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c97 struct mgag200_bo *bo; in mgag200_bo_ttm_destroy() local
99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in mgag200_bo_ttm_destroy()
102 kfree(bo); in mgag200_bo_ttm_destroy()
105 bool mgag200_ttm_bo_is_mgag200_bo(struct ttm_buffer_object *bo) in mgag200_ttm_bo_is_mgag200_bo() argument
107 if (bo->destroy == &mgag200_bo_ttm_destroy) in mgag200_ttm_bo_is_mgag200_bo()
138 mgag200_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in mgag200_bo_evict_flags() argument
140 struct mgag200_bo *mgabo = mgag200_bo(bo); in mgag200_bo_evict_flags()
142 if (!mgag200_ttm_bo_is_mgag200_bo(bo)) in mgag200_bo_evict_flags()
149 static int mgag200_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in mgag200_bo_verify_access() argument
[all …]
/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c97 struct cirrus_bo *bo; in cirrus_bo_ttm_destroy() local
99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in cirrus_bo_ttm_destroy()
102 kfree(bo); in cirrus_bo_ttm_destroy()
105 bool cirrus_ttm_bo_is_cirrus_bo(struct ttm_buffer_object *bo) in cirrus_ttm_bo_is_cirrus_bo() argument
107 if (bo->destroy == &cirrus_bo_ttm_destroy) in cirrus_ttm_bo_is_cirrus_bo()
138 cirrus_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in cirrus_bo_evict_flags() argument
140 struct cirrus_bo *cirrusbo = cirrus_bo(bo); in cirrus_bo_evict_flags()
142 if (!cirrus_ttm_bo_is_cirrus_bo(bo)) in cirrus_bo_evict_flags()
149 static int cirrus_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in cirrus_bo_verify_access() argument
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_dmabuf.c56 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_placement() local
65 ret = ttm_bo_reserve(bo, interruptible, false, false, 0); in vmw_dmabuf_to_placement()
69 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_placement()
71 ttm_bo_unreserve(bo); in vmw_dmabuf_to_placement()
99 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_vram_or_gmr() local
110 ret = ttm_bo_reserve(bo, interruptible, false, false, 0); in vmw_dmabuf_to_vram_or_gmr()
126 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_vram_or_gmr()
141 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_vram_or_gmr()
144 ttm_bo_unreserve(bo); in vmw_dmabuf_to_vram_or_gmr()
202 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_start_of_vram() local
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_bo.c143 nouveau_bo_del_ttm(struct ttm_buffer_object *bo) in nouveau_bo_del_ttm() argument
145 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm()
147 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm()
150 DRM_ERROR("bo %p still attached to GEM object\n", bo); in nouveau_bo_del_ttm()
159 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
212 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
221 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new()
227 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
256 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in set_placement_range()
262 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range()
[all …]
Dnouveau_gem.c47 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_gem_object_del() local
59 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del()
61 ttm_bo_unref(&bo); in nouveau_gem_object_del()
78 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, 0); in nouveau_gem_object_open()
100 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_open()
115 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, 0); in nouveau_gem_object_close()
126 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_close()
161 nvbo->gem = drm_gem_object_alloc(dev, nvbo->bo.mem.size); in nouveau_gem_new()
167 nvbo->bo.persistent_swap_storage = nvbo->gem->filp; in nouveau_gem_new()
180 if (nvbo->bo.mem.mem_type == TTM_PL_TT) in nouveau_gem_info()
[all …]
Dnv50_fence.c40 struct ttm_mem_reg *mem = &priv->bo->bo.mem; in nv50_fence_context_new()
65 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv50_fence_context_new() local
72 .start = bo->bo.offset, in nv50_fence_context_new()
73 .limit = bo->bo.offset + 0xfff, in nv50_fence_context_new()
100 0, 0x0000, NULL, &priv->bo); in nv50_fence_create()
102 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM); in nv50_fence_create()
104 ret = nouveau_bo_map(priv->bo); in nv50_fence_create()
106 nouveau_bo_unpin(priv->bo); in nv50_fence_create()
109 nouveau_bo_ref(NULL, &priv->bo); in nv50_fence_create()
117 nouveau_bo_wr32(priv->bo, 0x000, 0x00000000); in nv50_fence_create()
Dnv84_fence.c116 return nouveau_bo_rd32(priv->bo, fifo->chid * 16/4); in nv84_fence_read()
128 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv84_fence_context_del() local
129 nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]); in nv84_fence_context_del()
132 nouveau_bo_vma_del(priv->bo, &fctx->vma_gart); in nv84_fence_context_del()
133 nouveau_bo_vma_del(priv->bo, &fctx->vma); in nv84_fence_context_del()
159 ret = nouveau_bo_vma_add(priv->bo, client->vm, &fctx->vma); in nv84_fence_context_new()
167 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); in nv84_fence_context_new() local
168 ret = nouveau_bo_vma_add(bo, client->vm, &fctx->dispc_vma[i]); in nv84_fence_context_new()
171 nouveau_bo_wr32(priv->bo, fifo->chid * 16/4, 0x00000000); in nv84_fence_context_new()
188 priv->suspend[i] = nouveau_bo_rd32(priv->bo, i*4); in nv84_fence_suspend()
[all …]
/drivers/staging/rtl8192u/ieee80211/
Daes.c127 #define f_rn(bo, bi, n, k) \ argument
128 bo[n] = ft_tab[0][byte(bi[n],0)] ^ \
133 #define i_rn(bo, bi, n, k) \ argument
134 bo[n] = it_tab[0][byte(bi[n],0)] ^ \
145 #define f_rl(bo, bi, n, k) \ argument
146 bo[n] = fl_tab[0][byte(bi[n],0)] ^ \
151 #define i_rl(bo, bi, n, k) \ argument
152 bo[n] = il_tab[0][byte(bi[n],0)] ^ \
329 #define f_nround(bo, bi, k) \ argument
330 f_rn(bo, bi, 0, k); \
[all …]

1234