Home
last modified time | relevance | path

Searched refs:gpu_addr (Results 1 – 25 of 114) sorted by relevance

12345

/netbsd-src/sys/external/bsd/drm2/dist/drm/radeon/
H A Dradeon_r600_dma.c149 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
151 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
156 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
241 u64 gpu_addr; in r600_dma_ring_test() local
248 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
259 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test()
260 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test()
295 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
322 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit()
348 u64 gpu_addr; in r600_dma_ib_test() local
[all …]
H A Dradeon_cik_sdma.c160 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
161 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
209 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
238 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit()
406 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
408 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
413 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
414 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
657 u64 gpu_addr; in cik_sdma_ring_test() local
664 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
[all …]
H A Dradeon_uvd_v4_2.c52 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
54 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
72 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
76 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
H A Dradeon_uvd_v2_2.c48 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
82 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit()
118 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
135 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
139 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
H A Dradeon_uvd_v1_0.c90 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
126 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume()
143 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume()
147 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume()
369 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start()
379 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start()
492 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
H A Dradeon_semaphore.c56 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create()
74 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal()
91 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
H A Dradeon_vce_v1_0.c223 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume()
305 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start()
306 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
312 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start()
313 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
H A Dradeon_trace.h179 __field(uint64_t, gpu_addr)
185 __entry->gpu_addr = sem->gpu_addr;
189 __entry->waiters, __entry->gpu_addr)
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
H A Damdgpu_ih.c108 ih->gpu_addr = dma_addr; in amdgpu_ih_ring_init()
128 &ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_init()
136 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init()
138 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init()
171 (void *)ih->ring, ih->gpu_addr); in amdgpu_ih_ring_fini()
175 amdgpu_bo_free_kernel(&ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_fini()
177 amdgpu_device_wb_free(adev, (ih->wptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini()
178 amdgpu_device_wb_free(adev, (ih->rptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini()
H A Damdgpu_vce_v4_0.c163 uint64_t addr = table->gpu_addr; in vce_v4_0_mmsch_start()
241 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
243 upper_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start()
269 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
272 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
279 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
282 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
285 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start()
288 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start()
351 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start()
[all …]
H A Damdgpu_vcn_v2_5.c409 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
411 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
420 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
422 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
428 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
430 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
464 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
467 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
485 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
488 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
[all …]
H A Damdgpu_vcn_v1_0.c312 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
314 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode()
324 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
326 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode()
332 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
334 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode()
382 lower_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
384 upper_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
394 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
396 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode()
[all …]
H A Damdgpu_si_dma.c80 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib()
81 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
161 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start()
168 WREG32(DMA_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in si_dma_start()
214 u64 gpu_addr; in si_dma_ring_test_ring() local
220 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring()
229 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in si_dma_ring_test_ring()
230 amdgpu_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in si_dma_ring_test_ring()
264 u64 gpu_addr; in si_dma_ring_test_ib() local
271 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib()
[all …]
H A Damdgpu_vcn_v2_0.c324 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
326 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume()
336 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
338 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume()
344 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
346 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume()
381 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
384 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
402 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
405 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode()
[all …]
H A Damdgpu_virt.c186 if (!amdgpu_sriov_vf(adev) || adev->virt.mm_table.gpu_addr) in amdgpu_virt_alloc_mm_table()
192 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
201 adev->virt.mm_table.gpu_addr, in amdgpu_virt_alloc_mm_table()
213 if (!amdgpu_sriov_vf(adev) || !adev->virt.mm_table.gpu_addr) in amdgpu_virt_free_mm_table()
217 &adev->virt.mm_table.gpu_addr, in amdgpu_virt_free_mm_table()
219 adev->virt.mm_table.gpu_addr = 0; in amdgpu_virt_free_mm_table()
H A Damdgpu_sdma_v2_4.c269 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
270 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
462 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()
464 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
468 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v2_4_gfx_resume()
469 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v2_4_gfx_resume()
560 u64 gpu_addr; in sdma_v2_4_ring_test_ring() local
566 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
576 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
577 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring()
[all …]
H A Damdgpu_uvd_v7_0.c679 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
681 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume()
690 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
692 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume()
697 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
699 upper_32_bits(adev->uvd.inst[i].gpu_addr + offset + AMDGPU_UVD_HEAP_SIZE)); in uvd_v7_0_mc_resume()
719 uint64_t addr = table->gpu_addr; in uvd_v7_0_mmsch_start()
821 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
823 upper_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start()
833 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_sriov_start()
[all …]
H A Damdgpu_cik_sdma.c239 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib()
240 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib()
483 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
485 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
489 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
490 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
625 u64 gpu_addr; in cik_sdma_ring_test_ring() local
631 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring()
640 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
641 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in cik_sdma_ring_test_ring()
[all …]
H A Damdgpu_vce_v3_0.c288 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start()
289 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
295 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start()
296 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
302 WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr); in vce_v3_0_start()
303 WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start()
546 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
547 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
548 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
550 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
[all …]
H A Damdgpu_fence.c163 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
210 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
410 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring()
415 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
426 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring()
455 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
H A Damdgpu_sdma_v3_0.c443 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
444 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
701 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
703 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
707 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v3_0_gfx_resume()
708 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v3_0_gfx_resume()
722 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume()
832 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local
838 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
848 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring()
[all …]
H A Damdgpu_jpeg_v1_0.c65 val = lower_32_bits(ring->gpu_addr); in jpeg_v1_0_decode_ring_set_patch_ring()
71 val = upper_32_bits(ring->gpu_addr); in jpeg_v1_0_decode_ring_set_patch_ring()
310 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
314 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
322 amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
326 amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr)); in jpeg_v1_0_decode_ring_emit_ib()
527 WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_LOW, lower_32_bits(ring->gpu_addr)); in jpeg_v1_0_start()
528 WREG32_SOC15(JPEG, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_HIGH, upper_32_bits(ring->gpu_addr)); in jpeg_v1_0_start()
/netbsd-src/sys/external/bsd/drm/dist/shared-core/
H A Dr600_blit.c1202 set_render_target(drm_radeon_private_t *dev_priv, int format, int w, int h, u64 gpu_addr) in set_render_target() argument
1222 OUT_RING(gpu_addr >> 8); in set_render_target()
1229 OUT_RING(gpu_addr >> 8); in set_render_target()
1285 u64 gpu_addr; in set_shaders() local
1305 gpu_addr = dev_priv->gart_buffers_offset + dev_priv->blit_vb->offset; in set_shaders()
1314 OUT_RING(gpu_addr >> 8); in set_shaders()
1327 OUT_RING((gpu_addr + 256) >> 8); in set_shaders()
1343 R600_SH_ACTION_ENA, 512, gpu_addr); in set_shaders()
1347 set_vtx_resource(drm_radeon_private_t *dev_priv, u64 gpu_addr) in set_vtx_resource() argument
1353 sq_vtx_constant_word2 = (((gpu_addr >> 32) & 0xff) | (16 << 8)); in set_vtx_resource()
[all …]
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdkfd/
H A Dkfd_mqd_manager.c62 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr; in allocate_hiq_mqd()
87 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in allocate_sdma_mqd()
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/display/dmub/src/
H A Damdgpu_dmub_srv.c283 gpu_base = params->gpu_addr; in dmub_srv_calc_fb_info()
290 out->fb[i].gpu_addr = gpu_base + reg->base; in dmub_srv_calc_fb_info()
352 cw0.offset.quad_part = inst_fb->gpu_addr; in dmub_srv_hw_init()
356 cw1.offset.quad_part = stack_fb->gpu_addr; in dmub_srv_hw_init()
376 cw2.offset.quad_part = data_fb->gpu_addr; in dmub_srv_hw_init()
380 cw3.offset.quad_part = bios_fb->gpu_addr; in dmub_srv_hw_init()
384 cw4.offset.quad_part = mail_fb->gpu_addr; in dmub_srv_hw_init()
391 cw5.offset.quad_part = tracebuff_fb->gpu_addr; in dmub_srv_hw_init()
395 cw6.offset.quad_part = fw_state_fb->gpu_addr; in dmub_srv_hw_init()

12345