Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 25 of 38) sorted by relevance

12

/openbsd-src/sys/dev/pci/drm/ttm/
H A Dttm_tt.c71 if (bo->ttm) in ttm_tt_create()
98 bo->ttm = bdev->funcs->ttm_tt_create(bo, page_flags); in ttm_tt_create()
99 if (unlikely(bo->ttm == NULL)) in ttm_tt_create()
102 WARN_ON(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL_MAPPABLE && in ttm_tt_create()
103 !(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)); in ttm_tt_create()
111 static int ttm_tt_alloc_page_directory(struct ttm_tt *ttm) in ttm_tt_alloc_page_directory() argument
113 ttm->pages = kvcalloc(ttm->num_pages, sizeof(void*), GFP_KERNEL); in ttm_tt_alloc_page_directory()
114 if (!ttm->pages) in ttm_tt_alloc_page_directory()
116 ttm->orders = kvmalloc_array(ttm->num_pages, in ttm_tt_alloc_page_directory()
118 if (!ttm->orders) in ttm_tt_alloc_page_directory()
[all …]
H A Dttm_agp_backend.c45 struct ttm_tt ttm; member
50 int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_resource *bo_mem) in ttm_agp_bind() argument
55 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
58 int ret, cached = ttm->caching == ttm_cached; in ttm_agp_bind()
64 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind()
69 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind()
70 struct vm_page *page = ttm->pages[i]; in ttm_agp_bind()
91 void ttm_agp_unbind(struct ttm_tt *ttm) in ttm_agp_unbind() argument
95 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
109 bool ttm_agp_is_bound(struct ttm_tt *ttm) in ttm_agp_is_bound() argument
[all …]
H A Dttm_bo_util.c150 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
165 if (ttm && ((ttm->page_flags & TTM_TT_FLAG_SWAPPED) || in ttm_bo_move_memcpy()
167 ret = ttm_tt_populate(bdev, ttm, ctx); in ttm_bo_move_memcpy()
174 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
180 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
186 clear = src_iter->ops->maps_tt && (!ttm || !ttm_tt_is_populated(ttm)); in ttm_bo_move_memcpy()
187 if (!(clear && ttm && !(ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC))) in ttm_bo_move_memcpy()
300 caching = bo->ttm->caching; in ttm_io_prot()
301 if (bo->ttm->page_flags & TTM_TT_FLAG_DECRYPTED) in ttm_io_prot()
356 struct ttm_tt *ttm = bo->ttm; in ttm_bo_kmap_ttm() local
[all …]
H A Dttm_bo_vm.c34 #include <drm/ttm/ttm_bo.h>
35 #include <drm/ttm/ttm_placement.h>
36 #include <drm/ttm/ttm_tt.h>
154 if (bo->ttm && (bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL)) { in ttm_bo_vm_reserve()
155 if (!(bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL_MAPPABLE)) { in ttm_bo_vm_reserve()
193 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault_reserved() local
228 ttm = bo->ttm; in ttm_bo_vm_fault_reserved()
229 err = ttm_tt_populate(bdev, bo->ttm, in ttm_bo_vm_fault_reserved()
490 struct ttm_tt *ttm = NULL; ttm_bo_vm_fault_reserved() local
[all...]
H A Dttm_bo.c142 ret = ttm_tt_populate(bo->bdev, bo->ttm, ctx); in ttm_bo_handle_move_mem()
348 (want_init_on_free() && (bo->ttm != NULL)) || in ttm_bo_release()
1109 } else if (bo->ttm) { in ttm_bo_unmap_virtual()
1110 for (i = 0; i < bo->ttm->num_pages; i++) { in ttm_bo_unmap_virtual()
1111 pg = bo->ttm->pages[i]; in ttm_bo_unmap_virtual()
1172 if (!bo->ttm || !ttm_tt_is_populated(bo->ttm) || in ttm_bo_swapout()
1173 bo->ttm->page_flags & TTM_TT_FLAG_EXTERNAL || in ttm_bo_swapout()
1174 bo->ttm->page_flags & TTM_TT_FLAG_SWAPPED || in ttm_bo_swapout()
1227 if (ttm_tt_is_populated(bo->ttm)) in ttm_bo_swapout()
1228 ret = ttm_tt_swapout(bo->bdev, bo->ttm, gfp_flags); in ttm_bo_swapout()
[all …]
H A Dttm_device.c269 if (bo->ttm) in ttm_device_clear_lru_dma_mappings()
270 ttm_tt_unpopulate(bo->bdev, bo->ttm); in ttm_device_clear_lru_dma_mappings()
/openbsd-src/sys/dev/pci/drm/radeon/
H A Dradeon_ttm.c44 #include <drm/ttm/ttm_bo.h>
45 #include <drm/ttm/ttm_placement.h>
46 #include <drm/ttm/ttm_range_manager.h>
47 #include <drm/ttm/ttm_tt.h>
63 static int radeon_ttm_tt_bind(struct ttm_device *bdev, struct ttm_tt *ttm,
65 static void radeon_ttm_tt_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
212 r = radeon_ttm_tt_bind(bo->bdev, bo->ttm, new_mem); in radeon_bo_move()
224 bo->ttm == NULL)) { in radeon_bo_move()
236 radeon_ttm_tt_unbind(bo->bdev, bo->ttm); in radeon_bo_move()
329 struct ttm_tt ttm; member
339 radeon_ttm_tt_pin_userptr(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_pin_userptr() argument
404 radeon_ttm_tt_unpin_userptr(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_unpin_userptr() argument
436 radeon_ttm_backend_is_bound(struct ttm_tt * ttm) radeon_ttm_backend_is_bound() argument
444 radeon_ttm_backend_bind(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_resource * bo_mem) radeon_ttm_backend_bind() argument
479 radeon_ttm_backend_unbind(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_backend_unbind() argument
495 radeon_ttm_backend_destroy(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_backend_destroy() argument
538 radeon_ttm_tt_to_gtt(struct radeon_device * rdev,struct ttm_tt * ttm) radeon_ttm_tt_to_gtt() argument
551 radeon_ttm_tt_populate(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_operation_ctx * ctx) radeon_ttm_tt_populate() argument
576 radeon_ttm_tt_unpopulate(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_unpopulate() argument
597 radeon_ttm_tt_set_userptr(struct radeon_device * rdev,struct ttm_tt * ttm,uint64_t addr,uint32_t flags) radeon_ttm_tt_set_userptr() argument
616 radeon_ttm_tt_is_bound(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_is_bound() argument
627 radeon_ttm_tt_bind(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_resource * bo_mem) radeon_ttm_tt_bind() argument
645 radeon_ttm_tt_unbind(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_unbind() argument
659 radeon_ttm_tt_destroy(struct ttm_device * bdev,struct ttm_tt * ttm) radeon_ttm_tt_destroy() argument
673 radeon_ttm_tt_has_userptr(struct radeon_device * rdev,struct ttm_tt * ttm) radeon_ttm_tt_has_userptr() argument
684 radeon_ttm_tt_is_readonly(struct radeon_device * rdev,struct ttm_tt * ttm) radeon_ttm_tt_is_readonly() argument
[all...]
H A Dradeon_prime.c41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, in radeon_gem_prime_get_sg_table()
42 bo->tbo.ttm->num_pages); in radeon_gem_prime_get_sg_table()
111 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
H A Dradeon_mn.c57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate()
H A Dradeon_gem.c349 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap()
362 if (radeon_ttm_tt_has_userptr(rdev, bo->tbo.ttm)) in radeon_gem_object_mmap()
484 r = radeon_ttm_tt_set_userptr(rdev, bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
572 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
874 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) in radeon_gem_op_ioctl()
/openbsd-src/sys/dev/pci/drm/include/drm/ttm/
H A Dttm_tt.h163 int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
176 void ttm_tt_fini(struct ttm_tt *ttm);
186 void ttm_tt_destroy(struct ttm_device *bdev, struct ttm_tt *ttm);
195 int ttm_tt_swapin(struct ttm_tt *ttm);
196 int ttm_tt_swapout(struct ttm_device *bdev, struct ttm_tt *ttm,
208 int ttm_tt_populate(struct ttm_device *bdev, struct ttm_tt *ttm,
219 void ttm_tt_unpopulate(struct ttm_device *bdev, struct ttm_tt *ttm);
229 static inline void ttm_tt_mark_for_clear(struct ttm_tt *ttm) in ttm_tt_mark_for_clear() argument
231 ttm->page_flags |= TTM_TT_FLAG_ZERO_ALLOC; in ttm_tt_mark_for_clear()
257 int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_resource *bo_mem);
[all …]
H A Dttm_device.h86 struct ttm_tt *ttm,
97 struct ttm_tt *ttm);
108 void (*ttm_tt_destroy)(struct ttm_device *bdev, struct ttm_tt *ttm);
/openbsd-src/sys/dev/pci/drm/i915/gem/
H A Di915_gem_ttm.c8 #include <drm/ttm/ttm_placement.h>
9 #include <drm/ttm/ttm_tt.h>
37 * @ttm: The base TTM page vector.
50 struct ttm_tt ttm; member
189 struct ttm_tt *ttm, in i915_ttm_tt_shmem_populate() argument
197 struct i915_ttm_tt *i915_tt = container_of(ttm, typeof(*i915_tt), ttm); in i915_ttm_tt_shmem_populate()
199 const size_t size = (size_t)ttm->num_pages << PAGE_SHIFT; in i915_ttm_tt_shmem_populate()
237 ttm->pages[i++] = page; in i915_ttm_tt_shmem_populate()
239 if (ttm in i915_ttm_tt_shmem_populate()
251 i915_ttm_tt_shmem_unpopulate(struct ttm_tt * ttm) i915_ttm_tt_shmem_unpopulate() argument
330 i915_ttm_tt_populate(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_operation_ctx * ctx) i915_ttm_tt_populate() argument
341 i915_ttm_tt_unpopulate(struct ttm_device * bdev,struct ttm_tt * ttm) i915_ttm_tt_unpopulate() argument
360 i915_ttm_tt_destroy(struct ttm_device * bdev,struct ttm_tt * ttm) i915_ttm_tt_destroy() argument
547 i915_ttm_tt_get_st(struct ttm_tt * ttm) i915_ttm_tt_get_st() argument
[all...]
H A Di915_gem_ttm_pm.c24 if (obj->ttm.backup) { in i915_ttm_backup_free()
25 i915_gem_object_put(obj->ttm.backup); in i915_ttm_backup_free()
26 obj->ttm.backup = NULL; in i915_ttm_backup_free()
56 if (!i915_ttm_cpu_maps_iomem(bo->resource) || obj->ttm.backup) in i915_ttm_backup()
93 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_backup()
106 obj->ttm.backup = backup; in i915_ttm_backup()
173 struct drm_i915_gem_object *backup = obj->ttm.backup; in i915_ttm_restore()
192 err = ttm_tt_populate(backup_bo->bdev, backup_bo->ttm, &ctx); in i915_ttm_restore()
199 obj->ttm.backup = NULL; in i915_ttm_restore()
H A Di915_gem_ttm_move.c53 struct ttm_tt *ttm) in i915_ttm_cache_level() argument
57 ttm->caching == ttm_cached) ? I915_CACHE_LLC : in i915_ttm_cache_level()
86 if (i915_ttm_cpu_maps_iomem(bo->resource) || bo->ttm->caching != ttm_cached) { in i915_ttm_adjust_domains_after_move()
125 bo->ttm); in i915_ttm_adjust_gem_after_move()
198 struct ttm_tt *src_ttm = bo->ttm; in i915_ttm_accel_move()
331 ttm_kmap_iter_tt_init(&arg->_src_iter.tt, bo->ttm) : in i915_ttm_memcpy_init()
333 &obj->ttm.cached_io_rsgt->table, in i915_ttm_memcpy_init()
582 struct ttm_tt *ttm = bo->ttm; in i915_ttm_move() local
629 if (ttm && (dst_man->use_tt || (ttm->page_flags & TTM_TT_FLAG_SWAPPED))) { in i915_ttm_move()
630 ret = ttm_tt_populate(bo->bdev, ttm, ctx); in i915_ttm_move()
[all …]
/openbsd-src/sys/dev/pci/drm/amd/amdgpu/
H A Damdgpu_ttm.c46 #include <drm/ttm/ttm_bo.h>
47 #include <drm/ttm/ttm_placement.h>
48 #include <drm/ttm/ttm_range_manager.h>
49 #include <drm/ttm/ttm_tt.h>
69 struct ttm_tt *ttm,
72 struct ttm_tt *ttm);
249 flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, mem); in amdgpu_ttm_map_buffer()
258 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer()
479 r = amdgpu_ttm_backend_bind(bo->bdev, bo->ttm, new_mem); in amdgpu_bo_move()
488 bo->ttm in amdgpu_bo_move()
655 struct ttm_tt ttm; global() member
678 struct ttm_tt *ttm = bo->tbo.ttm; amdgpu_ttm_tt_get_user_pages() local
725 amdgpu_ttm_tt_discard_user_pages(struct ttm_tt * ttm,struct hmm_range * range) amdgpu_ttm_tt_discard_user_pages() argument
740 amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt * ttm,struct hmm_range * range) amdgpu_ttm_tt_get_user_pages_done() argument
764 amdgpu_ttm_tt_set_user_pages(struct ttm_tt * ttm,struct vm_page ** pages) amdgpu_ttm_tt_set_user_pages() argument
778 amdgpu_ttm_tt_pin_userptr(struct ttm_device * bdev,struct ttm_tt * ttm) amdgpu_ttm_tt_pin_userptr() argument
819 amdgpu_ttm_tt_unpin_userptr(struct ttm_device * bdev,struct ttm_tt * ttm) amdgpu_ttm_tt_unpin_userptr() argument
847 amdgpu_ttm_gart_bind_gfx9_mqd(struct amdgpu_device * adev,struct ttm_tt * ttm,uint64_t flags) amdgpu_ttm_gart_bind_gfx9_mqd() argument
882 struct ttm_tt *ttm = tbo->ttm; amdgpu_ttm_gart_bind() local
904 amdgpu_ttm_backend_bind(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_resource * bo_mem) amdgpu_ttm_backend_bind() argument
1048 amdgpu_ttm_backend_unbind(struct ttm_device * bdev,struct ttm_tt * ttm) amdgpu_ttm_backend_unbind() argument
1080 amdgpu_ttm_backend_destroy(struct ttm_device * bdev,struct ttm_tt * ttm) amdgpu_ttm_backend_destroy() argument
1139 amdgpu_ttm_tt_populate(struct ttm_device * bdev,struct ttm_tt * ttm,struct ttm_operation_ctx * ctx) amdgpu_ttm_tt_populate() argument
1182 amdgpu_ttm_tt_unpopulate(struct ttm_device * bdev,struct ttm_tt * ttm) amdgpu_ttm_tt_unpopulate() argument
1283 amdgpu_ttm_tt_get_usermm(struct ttm_tt * ttm) amdgpu_ttm_tt_get_usermm() argument
1306 amdgpu_ttm_tt_affect_userptr(struct ttm_tt * ttm,unsigned long start,unsigned long end,unsigned long * userptr) amdgpu_ttm_tt_affect_userptr() argument
1330 amdgpu_ttm_tt_is_userptr(struct ttm_tt * ttm) amdgpu_ttm_tt_is_userptr() argument
1343 amdgpu_ttm_tt_is_readonly(struct ttm_tt * ttm) amdgpu_ttm_tt_is_readonly() argument
1361 amdgpu_ttm_tt_pde_flags(struct ttm_tt * ttm,struct ttm_resource * mem) amdgpu_ttm_tt_pde_flags() argument
1393 amdgpu_ttm_tt_pte_flags(struct amdgpu_device * adev,struct ttm_tt * ttm,struct ttm_resource * mem) amdgpu_ttm_tt_pte_flags() argument
[all...]
H A Damdgpu_ttm.h175 void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm,
177 bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm,
186 static inline void amdgpu_ttm_tt_discard_user_pages(struct ttm_tt *ttm, in amdgpu_ttm_tt_discard_user_pages() argument
190 static inline bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm, in amdgpu_ttm_tt_get_user_pages_done() argument
197 void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct vm_page **pages);
202 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
203 struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
204 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
206 bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
208 bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm);
[all …]
H A Damdgpu_amdkfd_gpuvm.c28 #include <drm/ttm/ttm_tt.h>
406 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate()
523 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
524 struct ttm_tt *ttm = bo->tbo.ttm; in kfd_mem_dmamap_userptr()
527 if (WARN_ON(ttm->num_pages != src_ttm->num_pages)) in kfd_mem_dmamap_userptr()
530 ttm->sg = kmalloc(sizeof(*ttm->sg), GFP_KERNEL); in kfd_mem_dmamap_userptr()
531 if (unlikely(!ttm->sg)) in kfd_mem_dmamap_userptr()
535 ret = sg_alloc_table_from_pages(ttm in kfd_mem_dmamap_userptr()
520 struct ttm_tt *ttm = bo->tbo.ttm; kfd_mem_dmamap_userptr() local
609 struct ttm_tt *ttm = bo->tbo.ttm; kfd_mem_dmamap_sg_bo() local
686 struct ttm_tt *ttm = bo->tbo.ttm; kfd_mem_dmaunmap_userptr() local
732 struct ttm_tt *ttm = bo->tbo.ttm; kfd_mem_dmaunmap_sg_bo() local
[all...]
H A Damdgpu_gmc.c38 #include <drm/ttm/ttm_tt.h>
110 *addr = bo->tbo.ttm->dma_address[0]; in amdgpu_gmc_get_pde_for_bo()
119 *flags = amdgpu_ttm_tt_pde_flags(bo->tbo.ttm, bo->tbo.resource); in amdgpu_gmc_get_pde_for_bo()
183 if (bo->ttm->num_pages != 1 || bo->ttm->caching == ttm_cached) in amdgpu_gmc_agp_addr()
186 if (bo->ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr()
189 return adev->gmc.agp_start + bo->ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
H A Damdgpu_dma_buf.c176 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
177 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map()
304 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
H A Damdgpu_gem.c38 #include <drm/ttm/ttm_tt.h>
262 mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm);
339 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm))
361 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm))
546 r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages,
570 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm, range);
591 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) ||
965 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm)) {
H A Damdgpu_cs.c35 #include <drm/ttm/ttm_tt.h>
143 if (amdgpu_ttm_tt_get_usermm(p->uf_bo->tbo.ttm)) in amdgpu_cs_p1_user_fence()
877 e->user_pages = kvmalloc_array(bo->tbo.ttm->num_pages, in amdgpu_cs_parser_bos()
893 for (i = 0; i < bo->tbo.ttm->num_pages; i++) { in amdgpu_cs_parser_bos()
894 if (bo->tbo.ttm->pages[i] != e->user_pages[i]) { in amdgpu_cs_parser_bos()
932 usermm = amdgpu_ttm_tt_get_usermm(e->bo->tbo.ttm); in amdgpu_cs_parser_bos()
939 if (amdgpu_ttm_tt_is_userptr(e->bo->tbo.ttm) && in amdgpu_cs_parser_bos()
948 amdgpu_ttm_tt_set_user_pages(e->bo->tbo.ttm, in amdgpu_cs_parser_bos()
997 amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm, e->range); in amdgpu_cs_parser_bos()
1318 r |= !amdgpu_ttm_tt_get_user_pages_done(e->bo->tbo.ttm, in amdgpu_cs_submit()
[all...]
/openbsd-src/sys/dev/pci/drm/ttm/tests/
H A Dttm_kunit_helpers.c12 struct ttm_device *ttm, in ttm_device_kunit_init() argument
19 err = ttm_device_init(ttm, &ttm_dev_funcs, drm->dev, in ttm_device_kunit_init()
H A Dttm_kunit_helpers.h25 struct ttm_device *ttm,
/openbsd-src/sys/dev/pci/drm/
H A Dfiles.drm101 define ttm
102 file dev/pci/drm/drm_gem_ttm_helper.c ttm
103 file dev/pci/drm/ttm/ttm_agp_backend.c ttm & agp
104 file dev/pci/drm/ttm/ttm_bo.c ttm
105 file dev/pci/drm/ttm/ttm_bo_util.c ttm
106 file dev/pci/drm/ttm/ttm_bo_vm.c ttm
107 file dev/pci/drm/ttm/ttm_device.c ttm
108 file dev/pci/drm/ttm/ttm_execbuf_util.c ttm
109 file dev/pci/drm/ttm/ttm_module.c ttm
110 file dev/pci/drm/ttm/ttm_pool.c ttm
[all …]

12