Home
last modified time | relevance | path

Searched refs:gart (Results 1 – 25 of 35) sorted by relevance

12

/netbsd-src/sys/external/bsd/drm2/dist/drm/radeon/
H A Dradeon_gart.c82 error = bus_dmamem_alloc(rdev->ddev->dmat, rdev->gart.table_size, in radeon_gart_table_ram_alloc()
83 PAGE_SIZE, 0, &rdev->gart.rg_table_seg, 1, &rsegs, BUS_DMA_WAITOK); in radeon_gart_table_ram_alloc()
87 error = bus_dmamap_create(rdev->ddev->dmat, rdev->gart.table_size, 1, in radeon_gart_table_ram_alloc()
88 rdev->gart.table_size, 0, BUS_DMA_WAITOK, in radeon_gart_table_ram_alloc()
89 &rdev->gart.rg_table_map); in radeon_gart_table_ram_alloc()
92 error = bus_dmamem_map(rdev->ddev->dmat, &rdev->gart.rg_table_seg, 1, in radeon_gart_table_ram_alloc()
93 rdev->gart.table_size, &rdev->gart.ptr, in radeon_gart_table_ram_alloc()
97 error = bus_dmamap_load(rdev->ddev->dmat, rdev->gart.rg_table_map, in radeon_gart_table_ram_alloc()
98 rdev->gart.ptr, rdev->gart.table_size, NULL, BUS_DMA_WAITOK); in radeon_gart_table_ram_alloc()
102 memset(rdev->gart.ptr, 0, rdev->gart.table_size); in radeon_gart_table_ram_alloc()
[all …]
H A Dradeon_rs400.c90 if (rdev->gart.ptr) { in rs400_gart_init()
113 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rs400_gart_init()
171 tmp = (u32)rdev->gart.table_addr & 0xfffff000; in rs400_gart_enable()
172 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; in rs400_gart_enable()
199 (unsigned long long)rdev->gart.table_addr); in rs400_gart_enable()
200 rdev->gart.ready = true; in rs400_gart_enable()
243 u32 *gtt = rdev->gart.ptr; in rs400_gart_set_page()
H A Dradeon_asic.c172 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in radeon_agp_disable()
173 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in radeon_agp_disable()
174 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in radeon_agp_disable()
178 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in radeon_agp_disable()
179 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in radeon_agp_disable()
180 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in radeon_agp_disable()
214 .gart = {
282 .gart = {
378 .gart = {
446 .gart = {
[all …]
H A Dradeon_r300.c147 void __iomem *ptr = rdev->gart.ptr; in rv370_pcie_gart_set_page()
164 if (rdev->gart.robj) { in rv370_pcie_gart_init()
175 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rv370_pcie_gart_init()
176 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in rv370_pcie_gart_init()
177 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in rv370_pcie_gart_init()
178 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in rv370_pcie_gart_init()
188 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable()
203 table_addr = rdev->gart.table_addr; in rv370_pcie_gart_enable()
218 rdev->gart.ready = true; in rv370_pcie_gart_enable()
H A Dradeon_rs600.c548 if (rdev->gart.robj) { in rs600_gart_init()
557 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in rs600_gart_init()
566 if (rdev->gart.robj == NULL) { in rs600_gart_enable()
603 rdev->gart.table_addr); in rs600_gart_enable()
620 (unsigned long long)rdev->gart.table_addr); in rs600_gart_enable()
621 rdev->gart.ready = true; in rs600_gart_enable()
674 void __iomem *ptr = (void *)rdev->gart.ptr; in rs600_gart_set_page()
H A Dradeon_rv770.c907 if (rdev->gart.robj == NULL) { in rv770_pcie_gart_enable()
936 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in rv770_pcie_gart_enable()
947 (unsigned long long)rdev->gart.table_addr); in rv770_pcie_gart_enable()
948 rdev->gart.ready = true; in rv770_pcie_gart_enable()
H A Dradeon_r100.c650 if (rdev->gart.ptr) { in r100_pci_gart_init()
658 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in r100_pci_gart_init()
659 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in r100_pci_gart_init()
660 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in r100_pci_gart_init()
661 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in r100_pci_gart_init()
676 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); in r100_pci_gart_enable()
682 (unsigned long long)rdev->gart.table_addr); in r100_pci_gart_enable()
683 rdev->gart.ready = true; in r100_pci_gart_enable()
706 u32 *gtt = rdev->gart.ptr; in r100_pci_gart_set_page()
H A Dradeon_ttm.c1258 if (p >= rdev->gart.num_cpu_pages) in radeon_ttm_gtt_read()
1261 page = rdev->gart.pages[p]; in radeon_ttm_gtt_read()
1267 kunmap(rdev->gart.pages[p]); in radeon_ttm_gtt_read()
H A Dradeon_ni.c1285 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable()
1314 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable()
1360 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable()
1361 rdev->gart.ready = true; in cayman_pcie_gart_enable()
H A Dradeon_vm.c375 uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8; in radeon_vm_set_pages()
607 result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT]; in radeon_vm_map_gart()
H A Dradeon_r600.c1111 void __iomem *ptr = rdev->gart.ptr; in r600_pcie_gart_tlb_flush()
1150 if (rdev->gart.robj) { in r600_pcie_gart_init()
1158 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in r600_pcie_gart_init()
1167 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable()
1204 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in r600_pcie_gart_enable()
1215 (unsigned long long)rdev->gart.table_addr); in r600_pcie_gart_enable()
1216 rdev->gart.ready = true; in r600_pcie_gart_enable()
H A Dradeon.h1913 } gart; member
2429 struct radeon_gart gart; member
2785 #define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart.tlb_flush((rdev))
2786 #define radeon_gart_get_page_entry(a, f) (rdev)->asic->gart.get_page_entry((a), (f))
2787 #define radeon_gart_set_page(rdev, i, e) (rdev)->asic->gart.set_page((rdev), (i), (e))
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
H A Damdgpu_gart.c177 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_alloc()
181 bp.size = adev->gart.table_size; in amdgpu_gart_table_vram_alloc()
188 r = amdgpu_bo_create(adev, &bp, &adev->gart.bo); in amdgpu_gart_table_vram_alloc()
210 r = amdgpu_bo_reserve(adev->gart.bo, false); in amdgpu_gart_table_vram_pin()
213 r = amdgpu_bo_pin(adev->gart.bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gart_table_vram_pin()
215 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin()
218 r = amdgpu_bo_kmap(adev->gart.bo, &adev->gart.ptr); in amdgpu_gart_table_vram_pin()
220 amdgpu_bo_unpin(adev->gart.bo); in amdgpu_gart_table_vram_pin()
221 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin()
237 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_unpin()
[all …]
H A Damdgpu_gmc_v10_0.c381 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in gmc_v10_0_flush_gpu_tlb()
724 if (adev->gart.bo) { in gmc_v10_0_gart_init()
734 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v10_0_gart_init()
735 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(MTYPE_UC) | in gmc_v10_0_gart_init()
915 if (adev->gart.bo == NULL) { in gmc_v10_0_gart_enable()
952 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v10_0_gart_enable()
954 adev->gart.ready = true; in gmc_v10_0_gart_enable()
H A Damdgpu_gmc_v6_0.c494 if (adev->gart.bo == NULL) { in gmc_v6_0_gart_enable()
502 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v6_0_gart_enable()
580 adev->gart.ready = true; in gmc_v6_0_gart_enable()
588 if (adev->gart.bo) { in gmc_v6_0_gart_init()
595 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v6_0_gart_init()
596 adev->gart.gart_pte_flags = 0; in gmc_v6_0_gart_init()
H A Damdgpu_gmc_v9_0.c1019 if (adev->gart.bo) { in gmc_v9_0_gart_init()
1027 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v9_0_gart_init()
1028 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(MTYPE_UC) | in gmc_v9_0_gart_init()
1298 if (adev->gart.bo == NULL) { in gmc_v9_0_gart_enable()
1319 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v9_0_gart_enable()
1320 adev->gart.ready = true; in gmc_v9_0_gart_enable()
H A Damdgpu_gmc_v7_0.c628 if (adev->gart.bo == NULL) { in gmc_v7_0_gart_enable()
636 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v7_0_gart_enable()
724 adev->gart.ready = true; in gmc_v7_0_gart_enable()
732 if (adev->gart.bo) { in gmc_v7_0_gart_init()
740 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v7_0_gart_init()
741 adev->gart.gart_pte_flags = 0; in gmc_v7_0_gart_init()
H A Damdgpu_gmc_v8_0.c849 if (adev->gart.bo == NULL) { in gmc_v8_0_gart_enable()
857 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v8_0_gart_enable()
962 adev->gart.ready = true; in gmc_v8_0_gart_enable()
970 if (adev->gart.bo) { in gmc_v8_0_gart_init()
978 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v8_0_gart_init()
979 adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE; in gmc_v8_0_gart_init()
H A Damdgpu_ttm.c1593 flags |= adev->gart.gart_pte_flags; in amdgpu_ttm_tt_pte_flags()
2206 dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in amdgpu_map_buffer()
2270 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in amdgpu_copy_buffer()
2545 if (p >= adev->gart.num_cpu_pages) in amdgpu_ttm_gtt_read()
2548 page = adev->gart.pages[p]; in amdgpu_ttm_gtt_read()
2554 kunmap(adev->gart.pages[p]); in amdgpu_ttm_gtt_read()
H A Damdgpu_gtt_mgr.c209 lpfn = adev->gart.num_cpu_pages; in amdgpu_gtt_mgr_alloc()
H A Damdgpu_gfxhub_v2_0.c70 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v2_0_init_gart_aperture_regs()
H A Damdgpu_gfxhub_v1_0.c59 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v1_0_init_gart_aperture_regs()
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/
H A Dnouveau_chan.c111 nvif_object_fini(&chan->gart); in nouveau_channel_del()
362 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart) in nouveau_channel_init() argument
422 ret = nvif_object_init(&chan->user, gart, NV_DMA_IN_MEMORY, in nouveau_channel_init()
423 &args, sizeof(args), &chan->gart); in nouveau_channel_init()
H A Dnouveau_chan.h20 struct nvif_object gart; member
/netbsd-src/sys/external/bsd/drm/dist/shared-core/
H A Dxgi_drm.h52 struct drm_map gart; member

12