| /openbsd-src/sys/dev/pci/drm/amd/amdgpu/ |
| H A D | amdgpu_ring.c | 40 * Most engines on the GPU are fed via ring buffers. Ring 46 * pointers are equal, the ring is idle. When the host 47 * writes commands to the ring buffer, it increments the 55 * @type: ring type for which to return the limit. 73 * amdgpu_ring_alloc - allocate space on the ring buffer 75 * @ring: amdgpu_ring structure holding ring information 76 * @ndw: number of dwords to allocate in the ring buffer 78 * Allocate @ndw dwords in the ring buffer (all asics). 81 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigne argument 109 amdgpu_ring_insert_nop(struct amdgpu_ring * ring,uint32_t count) amdgpu_ring_insert_nop() argument 125 amdgpu_ring_generic_pad_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib) amdgpu_ring_generic_pad_ib() argument 140 amdgpu_ring_commit(struct amdgpu_ring * ring) amdgpu_ring_commit() argument 164 amdgpu_ring_undo(struct amdgpu_ring * ring) amdgpu_ring_undo() argument 172 amdgpu_ring_get_gpu_addr(ring,offset) global() argument 177 amdgpu_ring_get_cpu_addr(ring,offset) global() argument 196 amdgpu_ring_init(struct amdgpu_device * adev,struct amdgpu_ring * ring,unsigned int max_dw,struct amdgpu_irq_src * irq_src,unsigned int irq_type,unsigned int hw_prio,atomic_t * sched_score) amdgpu_ring_init() argument 372 amdgpu_ring_fini(struct amdgpu_ring * ring) amdgpu_ring_fini() argument 416 amdgpu_ring_emit_reg_write_reg_wait_helper(struct amdgpu_ring * ring,uint32_t reg0,uint32_t reg1,uint32_t ref,uint32_t mask) amdgpu_ring_emit_reg_write_reg_wait_helper() argument 433 amdgpu_ring_soft_recovery(struct amdgpu_ring * ring,unsigned int vmid,struct dma_fence * fence) amdgpu_ring_soft_recovery() argument 471 struct amdgpu_ring *ring = file_inode(f)->i_private; amdgpu_debugfs_ring_read() local 521 struct amdgpu_ring *ring = file_inode(f)->i_private; amdgpu_debugfs_mqd_read() local 585 struct amdgpu_ring *ring = data; amdgpu_debugfs_ring_error() local 597 amdgpu_debugfs_ring_init(struct amdgpu_device * adev,struct amdgpu_ring * ring) amdgpu_debugfs_ring_init() argument 632 amdgpu_ring_test_helper(struct amdgpu_ring * ring) amdgpu_ring_test_helper() argument 649 amdgpu_ring_to_mqd_prop(struct amdgpu_ring * ring,struct amdgpu_mqd_prop * prop) amdgpu_ring_to_mqd_prop() argument 679 amdgpu_ring_init_mqd(struct amdgpu_ring * ring) amdgpu_ring_init_mqd() argument 697 amdgpu_ring_ib_begin(struct amdgpu_ring * ring) amdgpu_ring_ib_begin() argument 703 amdgpu_ring_ib_end(struct amdgpu_ring * ring) amdgpu_ring_ib_end() argument 709 amdgpu_ring_ib_on_emit_cntl(struct amdgpu_ring * ring) amdgpu_ring_ib_on_emit_cntl() argument 715 amdgpu_ring_ib_on_emit_ce(struct amdgpu_ring * ring) amdgpu_ring_ib_on_emit_ce() argument 721 amdgpu_ring_ib_on_emit_de(struct amdgpu_ring * ring) amdgpu_ring_ib_on_emit_de() argument [all...] |
| H A D | jpeg_v1_0.c | 37 static void jpeg_v1_0_ring_begin_use(struct amdgpu_ring *ring); 42 static void jpeg_v1_0_decode_ring_patch_wreg(struct amdgpu_ring *ring, uint32_t *ptr, uint32_t reg_offset, uint32_t val) in jpeg_v1_0_decode_ring_patch_wreg() 44 struct amdgpu_device *adev = ring->adev; in jpeg_v1_0_decode_ring_patch_wreg() 45 ring->ring[(*ptr)++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0); in jpeg_v1_0_decode_ring_patch_wreg() 48 ring->ring[(*ptr)++] = 0; in jpeg_v1_0_decode_ring_patch_wreg() 49 ring->ring[(*ptr)++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE0); in jpeg_v1_0_decode_ring_patch_wreg() 51 ring in jpeg_v1_0_decode_ring_patch_wreg() 38 jpeg_v1_0_decode_ring_patch_wreg(struct amdgpu_ring * ring,uint32_t * ptr,uint32_t reg_offset,uint32_t val) jpeg_v1_0_decode_ring_patch_wreg() argument 53 jpeg_v1_0_decode_ring_set_patch_ring(struct amdgpu_ring * ring,uint32_t ptr) jpeg_v1_0_decode_ring_set_patch_ring() argument 136 jpeg_v1_0_decode_ring_get_rptr(struct amdgpu_ring * ring) jpeg_v1_0_decode_ring_get_rptr() argument 150 jpeg_v1_0_decode_ring_get_wptr(struct amdgpu_ring * ring) jpeg_v1_0_decode_ring_get_wptr() argument 164 jpeg_v1_0_decode_ring_set_wptr(struct amdgpu_ring * ring) jpeg_v1_0_decode_ring_set_wptr() argument 178 jpeg_v1_0_decode_ring_insert_start(struct amdgpu_ring * ring) jpeg_v1_0_decode_ring_insert_start() argument 197 jpeg_v1_0_decode_ring_insert_end(struct amdgpu_ring * ring) jpeg_v1_0_decode_ring_insert_end() argument 219 jpeg_v1_0_decode_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned flags) jpeg_v1_0_decode_ring_emit_fence() argument 293 jpeg_v1_0_decode_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) jpeg_v1_0_decode_ring_emit_ib() argument 346 jpeg_v1_0_decode_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) jpeg_v1_0_decode_ring_emit_reg_wait() argument 376 jpeg_v1_0_decode_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr) jpeg_v1_0_decode_ring_emit_vm_flush() argument 391 jpeg_v1_0_decode_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) jpeg_v1_0_decode_ring_emit_wreg() argument 412 jpeg_v1_0_decode_ring_nop(struct amdgpu_ring * ring,uint32_t count) jpeg_v1_0_decode_ring_nop() argument 480 struct amdgpu_ring *ring; jpeg_v1_0_sw_init() local 526 struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec; jpeg_v1_0_start() local 597 jpeg_v1_0_ring_begin_use(struct amdgpu_ring * ring) jpeg_v1_0_ring_begin_use() argument [all...] |
| H A D | amdgpu_fence.c | 56 struct amdgpu_ring *ring; member 110 static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq) in amdgpu_fence_write() argument 112 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write() 126 static u32 amdgpu_fence_read(struct amdgpu_ring *ring) in amdgpu_fence_read() argument 128 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read() 150 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **f, struct amdgpu_job *job, in amdgpu_fence_emit() argument 153 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_emit() 170 am_fence->ring = ring; in amdgpu_fence_emit() 176 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit() 185 &ring->fence_drv.lock, in amdgpu_fence_emit() [all …]
|
| H A D | amdgpu_ib.c | 126 int amdgpu_ib_schedule(struct amdgpu_ring *ring, unsigned int num_ibs, in amdgpu_ib_schedule() argument 130 struct amdgpu_device *adev = ring->adev; in amdgpu_ib_schedule() 168 if (!ring->sched.ready && !ring->is_mes_queue) { in amdgpu_ib_schedule() 169 dev_err(adev->dev, "couldn't schedule ib on ring <%s>\n", ring->name); in amdgpu_ib_schedule() 173 if (vm && !job->vmid && !ring->is_mes_queue) { in amdgpu_ib_schedule() 179 (!ring->funcs->secure_submission_supported)) { in amdgpu_ib_schedule() 180 dev_err(adev->dev, "secure submissions not supported on ring <%s>\n", ring->name); in amdgpu_ib_schedule() 184 alloc_size = ring->funcs->emit_frame_size + num_ibs * in amdgpu_ib_schedule() 185 ring->funcs->emit_ib_size; in amdgpu_ib_schedule() 187 r = amdgpu_ring_alloc(ring, alloc_size); in amdgpu_ib_schedule() [all …]
|
| H A D | amdgpu_ring_mux.c | 44 struct amdgpu_ring *ring) in amdgpu_ring_mux_sw_entry() argument 46 return ring->entry_index < mux->ring_entry_size ? in amdgpu_ring_mux_sw_entry() 47 &mux->ring_entry[ring->entry_index] : NULL; in amdgpu_ring_mux_sw_entry() 52 struct amdgpu_ring *ring, in amdgpu_ring_mux_copy_pkt_from_sw_ring() argument 58 start = s_start & ring->buf_mask; in amdgpu_ring_mux_copy_pkt_from_sw_ring() 59 end = s_end & ring->buf_mask; in amdgpu_ring_mux_copy_pkt_from_sw_ring() 66 amdgpu_ring_alloc(real_ring, (ring->ring_size >> 2) + end - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring() 67 amdgpu_ring_write_multiple(real_ring, (void *)&ring->ring[start], in amdgpu_ring_mux_copy_pkt_from_sw_ring() 68 (ring->ring_size >> 2) - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring() 69 amdgpu_ring_write_multiple(real_ring, (void *)&ring->ring[0], end); in amdgpu_ring_mux_copy_pkt_from_sw_ring() [all …]
|
| H A D | jpeg_v2_0.c | 45 * Set ring and irq function pointers 70 struct amdgpu_ring *ring; in jpeg_v2_0_sw_init() local 87 ring = adev->jpeg.inst->ring_dec; in jpeg_v2_0_sw_init() 88 ring->use_doorbell = true; in jpeg_v2_0_sw_init() 89 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1; in jpeg_v2_0_sw_init() 90 ring->vm_hub = AMDGPU_MMHUB0(0); in jpeg_v2_0_sw_init() 91 snprintf(ring->name, sizeof(ring->name), "jpeg_dec"); in jpeg_v2_0_sw_init() 92 r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, in jpeg_v2_0_sw_init() 133 struct amdgpu_ring *ring in jpeg_v2_0_hw_init() local 316 struct amdgpu_ring *ring = adev->jpeg.inst->ring_dec; jpeg_v2_0_start() local 393 jpeg_v2_0_dec_ring_get_rptr(struct amdgpu_ring * ring) jpeg_v2_0_dec_ring_get_rptr() argument 407 jpeg_v2_0_dec_ring_get_wptr(struct amdgpu_ring * ring) jpeg_v2_0_dec_ring_get_wptr() argument 424 jpeg_v2_0_dec_ring_set_wptr(struct amdgpu_ring * ring) jpeg_v2_0_dec_ring_set_wptr() argument 443 jpeg_v2_0_dec_ring_insert_start(struct amdgpu_ring * ring) jpeg_v2_0_dec_ring_insert_start() argument 461 jpeg_v2_0_dec_ring_insert_end(struct amdgpu_ring * ring) jpeg_v2_0_dec_ring_insert_end() argument 482 jpeg_v2_0_dec_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned flags) jpeg_v2_0_dec_ring_emit_fence() argument 533 jpeg_v2_0_dec_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) jpeg_v2_0_dec_ring_emit_ib() argument 588 jpeg_v2_0_dec_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) jpeg_v2_0_dec_ring_emit_reg_wait() argument 615 jpeg_v2_0_dec_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr) jpeg_v2_0_dec_ring_emit_vm_flush() argument 630 jpeg_v2_0_dec_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) jpeg_v2_0_dec_ring_emit_wreg() argument 648 jpeg_v2_0_dec_ring_nop(struct amdgpu_ring * ring,uint32_t count) jpeg_v2_0_dec_ring_nop() argument [all...] |
| H A D | amdgpu_ring.h | 128 void amdgpu_fence_driver_clear_job_fences(struct amdgpu_ring *ring); 129 void amdgpu_fence_driver_set_error(struct amdgpu_ring *ring, int error); 130 void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring); 132 int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring); 133 int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring, 140 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **fence, struct amdgpu_job *job, 142 int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s, 144 bool amdgpu_fence_process(struct amdgpu_ring *ring); 145 int amdgpu_fence_wait_empty(struct amdgpu_ring *ring); 146 signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring, [all …]
|
| H A D | uvd_v7_0.c | 67 * @ring: amdgpu_ring pointer 71 static uint64_t uvd_v7_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_ring_get_rptr() argument 73 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_ring_get_rptr() 75 return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_RPTR); in uvd_v7_0_ring_get_rptr() 81 * @ring: amdgpu_ring pointer 85 static uint64_t uvd_v7_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_enc_ring_get_rptr() argument 87 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_enc_ring_get_rptr() 89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 90 return RREG32_SOC15(UVD, ring in uvd_v7_0_enc_ring_get_rptr() 102 uvd_v7_0_ring_get_wptr(struct amdgpu_ring * ring) uvd_v7_0_ring_get_wptr() argument 116 uvd_v7_0_enc_ring_get_wptr(struct amdgpu_ring * ring) uvd_v7_0_enc_ring_get_wptr() argument 136 uvd_v7_0_ring_set_wptr(struct amdgpu_ring * ring) uvd_v7_0_ring_set_wptr() argument 150 uvd_v7_0_enc_ring_set_wptr(struct amdgpu_ring * ring) uvd_v7_0_enc_ring_set_wptr() argument 175 uvd_v7_0_enc_ring_test_ring(struct amdgpu_ring * ring) uvd_v7_0_enc_ring_test_ring() argument 216 uvd_v7_0_enc_get_create_msg(struct amdgpu_ring * ring,u32 handle,struct amdgpu_bo * bo,struct dma_fence ** fence) uvd_v7_0_enc_get_create_msg() argument 279 uvd_v7_0_enc_get_destroy_msg(struct amdgpu_ring * ring,u32 handle,struct amdgpu_bo * bo,struct dma_fence ** fence) uvd_v7_0_enc_get_destroy_msg() argument 339 uvd_v7_0_enc_ring_test_ib(struct amdgpu_ring * ring,long timeout) uvd_v7_0_enc_ring_test_ib() argument 400 struct amdgpu_ring *ring; uvd_v7_0_sw_init() local 524 struct amdgpu_ring *ring; uvd_v7_0_hw_init() local 786 struct amdgpu_ring *ring; uvd_v7_0_sriov_start() local 952 struct amdgpu_ring *ring; uvd_v7_0_start() local 1177 uvd_v7_0_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned flags) uvd_v7_0_ring_emit_fence() argument 1218 uvd_v7_0_enc_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned flags) uvd_v7_0_enc_ring_emit_fence() argument 1236 uvd_v7_0_ring_emit_hdp_flush(struct amdgpu_ring * ring) uvd_v7_0_ring_emit_hdp_flush() argument 1248 uvd_v7_0_ring_test_ring(struct amdgpu_ring * ring) uvd_v7_0_ring_test_ring() argument 1289 struct amdgpu_ring *ring = to_amdgpu_ring(job->base.sched); uvd_v7_0_ring_patch_cs_in_place() local 1317 uvd_v7_0_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) uvd_v7_0_ring_emit_ib() argument 1350 uvd_v7_0_enc_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) uvd_v7_0_enc_ring_emit_ib() argument 1364 uvd_v7_0_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) uvd_v7_0_ring_emit_wreg() argument 1380 uvd_v7_0_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) uvd_v7_0_ring_emit_reg_wait() argument 1399 uvd_v7_0_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr) uvd_v7_0_ring_emit_vm_flush() argument 1414 uvd_v7_0_ring_insert_nop(struct amdgpu_ring * ring,uint32_t count) uvd_v7_0_ring_insert_nop() argument 1427 uvd_v7_0_enc_ring_insert_end(struct amdgpu_ring * ring) uvd_v7_0_enc_ring_insert_end() argument 1432 uvd_v7_0_enc_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) uvd_v7_0_enc_ring_emit_reg_wait() argument 1442 uvd_v7_0_enc_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned int vmid,uint64_t pd_addr) uvd_v7_0_enc_ring_emit_vm_flush() argument 1455 uvd_v7_0_enc_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) uvd_v7_0_enc_ring_emit_wreg() argument [all...] |
| H A D | vcn_sw_ring.c | 27 void vcn_dec_sw_ring_emit_fence(struct amdgpu_ring *ring, u64 addr, in vcn_dec_sw_ring_emit_fence() argument 32 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_FENCE); in vcn_dec_sw_ring_emit_fence() 33 amdgpu_ring_write(ring, addr); in vcn_dec_sw_ring_emit_fence() 34 amdgpu_ring_write(ring, upper_32_bits(addr)); in vcn_dec_sw_ring_emit_fence() 35 amdgpu_ring_write(ring, seq); in vcn_dec_sw_ring_emit_fence() 36 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_TRAP); in vcn_dec_sw_ring_emit_fence() 39 void vcn_dec_sw_ring_insert_end(struct amdgpu_ring *ring) in vcn_dec_sw_ring_insert_end() argument 41 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_END); in vcn_dec_sw_ring_insert_end() 44 void vcn_dec_sw_ring_emit_ib(struct amdgpu_ring *ring, struct amdgpu_job *job, in vcn_dec_sw_ring_emit_ib() argument 49 amdgpu_ring_write(ring, VCN_DEC_SW_CMD_IB); in vcn_dec_sw_ring_emit_ib() [all …]
|
| H A D | sdma_v6_0.c | 82 static unsigned sdma_v6_0_ring_init_cond_exec(struct amdgpu_ring *ring) in sdma_v6_0_ring_init_cond_exec() argument 86 amdgpu_ring_write(ring, SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COND_EXE)); in sdma_v6_0_ring_init_cond_exec() 87 amdgpu_ring_write(ring, lower_32_bits(ring->cond_exe_gpu_addr)); in sdma_v6_0_ring_init_cond_exec() 88 amdgpu_ring_write(ring, upper_32_bits(ring->cond_exe_gpu_addr)); in sdma_v6_0_ring_init_cond_exec() 89 amdgpu_ring_write(ring, 1); in sdma_v6_0_ring_init_cond_exec() 90 ret = ring->wptr & ring->buf_mask;/* this is the offset we need patch later */ in sdma_v6_0_ring_init_cond_exec() 91 amdgpu_ring_write(ring, 0x55aa55aa);/* insert dummy here and patch it later */ in sdma_v6_0_ring_init_cond_exec() 96 static void sdma_v6_0_ring_patch_cond_exec(struct amdgpu_ring *ring, in sdma_v6_0_ring_patch_cond_exec() argument 101 BUG_ON(offset > ring->buf_mask); in sdma_v6_0_ring_patch_cond_exec() 102 BUG_ON(ring->ring[offset] != 0x55aa55aa); in sdma_v6_0_ring_patch_cond_exec() [all …]
|
| H A D | jpeg_v4_0_3.c | 46 static void jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring *ring); 64 * Set ring and irq function pointers 89 struct amdgpu_ring *ring; in jpeg_v4_0_3_sw_init() 112 ring = &adev->jpeg.inst[i].ring_dec[j]; in jpeg_v4_0_3_sw_init() 113 ring->use_doorbell = true; in jpeg_v4_0_3_sw_init() 114 ring->vm_hub = AMDGPU_MMHUB0(adev->jpeg.inst[i].aid_id); in jpeg_v4_0_3_sw_init() 116 ring->doorbell_index = in jpeg_v4_0_3_sw_init() 121 ring->doorbell_index = in jpeg_v4_0_3_sw_init() 125 ring->doorbell_index = in jpeg_v4_0_3_sw_init() 129 snprintf(ring in jpeg_v4_0_3_sw_init() 88 struct amdgpu_ring *ring; jpeg_v4_0_3_sw_init() local 178 struct amdgpu_ring *ring; jpeg_v4_0_3_start_sriov() local 298 struct amdgpu_ring *ring; jpeg_v4_0_3_hw_init() local 472 struct amdgpu_ring *ring; jpeg_v4_0_3_start() local 596 jpeg_v4_0_3_dec_ring_get_rptr(struct amdgpu_ring * ring) jpeg_v4_0_3_dec_ring_get_rptr() argument 612 jpeg_v4_0_3_dec_ring_get_wptr(struct amdgpu_ring * ring) jpeg_v4_0_3_dec_ring_get_wptr() argument 632 jpeg_v4_0_3_dec_ring_set_wptr(struct amdgpu_ring * ring) jpeg_v4_0_3_dec_ring_set_wptr() argument 655 jpeg_v4_0_3_dec_ring_insert_start(struct amdgpu_ring * ring) jpeg_v4_0_3_dec_ring_insert_start() argument 673 jpeg_v4_0_3_dec_ring_insert_end(struct amdgpu_ring * ring) jpeg_v4_0_3_dec_ring_insert_end() argument 694 jpeg_v4_0_3_dec_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned int flags) jpeg_v4_0_3_dec_ring_emit_fence() argument 763 jpeg_v4_0_3_dec_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) jpeg_v4_0_3_dec_ring_emit_ib() argument 814 jpeg_v4_0_3_dec_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) jpeg_v4_0_3_dec_ring_emit_reg_wait() argument 841 jpeg_v4_0_3_dec_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned int vmid,uint64_t pd_addr) jpeg_v4_0_3_dec_ring_emit_vm_flush() argument 856 jpeg_v4_0_3_dec_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) jpeg_v4_0_3_dec_ring_emit_wreg() argument 874 jpeg_v4_0_3_dec_ring_nop(struct amdgpu_ring * ring,uint32_t count) jpeg_v4_0_3_dec_ring_nop() argument [all...] |
| H A D | uvd_v6_0.c | 77 static uint64_t uvd_v6_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_rptr() argument 79 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_rptr() 91 static uint64_t uvd_v6_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_enc_ring_get_rptr() argument 93 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_enc_ring_get_rptr() 95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr() 107 static uint64_t uvd_v6_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_wptr() argument 109 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_wptr() 121 static uint64_t uvd_v6_0_enc_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v6_0_enc_ring_get_wptr() argument 123 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_enc_ring_get_wptr() 125 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_wptr() [all …]
|
| H A D | sdma_v5_2.c | 92 static unsigned sdma_v5_2_ring_init_cond_exec(struct amdgpu_ring *ring) in sdma_v5_2_ring_init_cond_exec() argument 96 amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE)); in sdma_v5_2_ring_init_cond_exec() 97 amdgpu_ring_write(ring, lower_32_bits(ring->cond_exe_gpu_addr)); in sdma_v5_2_ring_init_cond_exec() 98 amdgpu_ring_write(ring, upper_32_bits(ring->cond_exe_gpu_addr)); in sdma_v5_2_ring_init_cond_exec() 99 amdgpu_ring_write(ring, 1); in sdma_v5_2_ring_init_cond_exec() 100 ret = ring->wptr & ring->buf_mask;/* this is the offset we need patch later */ in sdma_v5_2_ring_init_cond_exec() 101 amdgpu_ring_write(ring, in sdma_v5_2_ring_init_cond_exec() 106 sdma_v5_2_ring_patch_cond_exec(struct amdgpu_ring * ring,unsigned offset) sdma_v5_2_ring_patch_cond_exec() argument 128 sdma_v5_2_ring_get_rptr(struct amdgpu_ring * ring) sdma_v5_2_ring_get_rptr() argument 146 sdma_v5_2_ring_get_wptr(struct amdgpu_ring * ring) sdma_v5_2_ring_get_wptr() argument 172 sdma_v5_2_ring_set_wptr(struct amdgpu_ring * ring) sdma_v5_2_ring_set_wptr() argument 206 sdma_v5_2_ring_insert_nop(struct amdgpu_ring * ring,uint32_t count) sdma_v5_2_ring_insert_nop() argument 229 sdma_v5_2_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_job * job,struct amdgpu_ib * ib,uint32_t flags) sdma_v5_2_ring_emit_ib() argument 264 sdma_v5_2_ring_emit_mem_sync(struct amdgpu_ring * ring) sdma_v5_2_ring_emit_mem_sync() argument 289 sdma_v5_2_ring_emit_hdp_flush(struct amdgpu_ring * ring) sdma_v5_2_ring_emit_hdp_flush() argument 324 sdma_v5_2_ring_emit_fence(struct amdgpu_ring * ring,u64 addr,u64 seq,unsigned flags) sdma_v5_2_ring_emit_fence() argument 489 struct amdgpu_ring *ring; sdma_v5_2_gfx_resume() local 826 sdma_v5_2_ring_test_ring(struct amdgpu_ring * ring) sdma_v5_2_ring_test_ring() argument 902 sdma_v5_2_ring_test_ib(struct amdgpu_ring * ring,long timeout) sdma_v5_2_ring_test_ib() argument 1083 sdma_v5_2_ring_pad_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib) sdma_v5_2_ring_pad_ib() argument 1108 sdma_v5_2_ring_emit_pipeline_sync(struct amdgpu_ring * ring) sdma_v5_2_ring_emit_pipeline_sync() argument 1137 sdma_v5_2_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr) sdma_v5_2_ring_emit_vm_flush() argument 1143 sdma_v5_2_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val) sdma_v5_2_ring_emit_wreg() argument 1152 sdma_v5_2_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask) sdma_v5_2_ring_emit_reg_wait() argument 1166 sdma_v5_2_ring_emit_reg_write_reg_wait(struct amdgpu_ring * ring,uint32_t reg0,uint32_t reg1,uint32_t ref,uint32_t mask) sdma_v5_2_ring_emit_reg_write_reg_wait() argument 1225 struct amdgpu_ring *ring; sdma_v5_2_sw_init() local 1352 sdma_v5_2_ring_preempt_ib(struct amdgpu_ring * ring) sdma_v5_2_ring_preempt_ib() argument 1658 sdma_v5_2_ring_begin_use(struct amdgpu_ring * ring) sdma_v5_2_ring_begin_use() argument 1673 sdma_v5_2_ring_end_use(struct amdgpu_ring * ring) sdma_v5_2_ring_end_use() argument [all...] |
| H A D | sdma_v5_0.c | 252 static unsigned sdma_v5_0_ring_init_cond_exec(struct amdgpu_ring *ring) in sdma_v5_0_ring_init_cond_exec() argument 256 amdgpu_ring_write(ring, SDMA_PKT_HEADER_OP(SDMA_OP_COND_EXE)); in sdma_v5_0_ring_init_cond_exec() 257 amdgpu_ring_write(ring, lower_32_bits(ring->cond_exe_gpu_addr)); in sdma_v5_0_ring_init_cond_exec() 258 amdgpu_ring_write(ring, upper_32_bits(ring->cond_exe_gpu_addr)); in sdma_v5_0_ring_init_cond_exec() 259 amdgpu_ring_write(ring, 1); in sdma_v5_0_ring_init_cond_exec() 260 ret = ring->wptr & ring->buf_mask;/* this is the offset we need patch later */ in sdma_v5_0_ring_init_cond_exec() 261 amdgpu_ring_write(ring, 0x55aa55aa);/* insert dummy here and patch it later */ in sdma_v5_0_ring_init_cond_exec() 266 static void sdma_v5_0_ring_patch_cond_exec(struct amdgpu_ring *ring, in sdma_v5_0_ring_patch_cond_exec() argument 271 BUG_ON(offset > ring->buf_mask); in sdma_v5_0_ring_patch_cond_exec() 272 BUG_ON(ring->ring[offset] != 0x55aa55aa); in sdma_v5_0_ring_patch_cond_exec() [all …]
|
| /openbsd-src/usr.bin/telnet/ |
| H A D | ring.c | 77 ring_init(Ring *ring, unsigned char *buffer, int count) in ring_init() argument 79 memset(ring, 0, sizeof *ring); in ring_init() 81 ring->size = count; in ring_init() 83 ring->supply = ring->consume = ring->bottom = buffer; in ring_init() 85 ring->top = ring->bottom+ring->size; in ring_init() 95 ring_mark(Ring *ring) in ring_mark() argument 97 ring->mark = ring_decrement(ring, ring->supply, 1); in ring_mark() 105 ring_at_mark(Ring *ring) in ring_at_mark() argument 107 if (ring->mark == ring->consume) { in ring_at_mark() 119 ring_clear_mark(Ring *ring) in ring_clear_mark() argument [all …]
|
| /openbsd-src/sys/dev/pci/drm/radeon/ |
| H A D | radeon_ring.c | 37 * Most engines on the GPU are fed via ring buffers. Ring 43 * pointers are equal, the ring is idle. When the host 44 * writes commands to the ring buffer, it increments the 48 static void radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring); 51 * radeon_ring_supports_scratch_reg - check if the ring supports 55 * @ring: radeon_ring structure holding ring information 57 * Check if a specific ring supports writing to scratch registers (all asics). 58 * Returns true if the ring supports writing to scratch regs, false if not. 61 struct radeon_ring *ring) in radeon_ring_supports_scratch_reg() argument 81 radeon_ring_free_size(struct radeon_device * rdev,struct radeon_ring * ring) radeon_ring_free_size() argument 107 radeon_ring_alloc(struct radeon_device * rdev,struct radeon_ring * ring,unsigned ndw) radeon_ring_alloc() argument 143 radeon_ring_lock(struct radeon_device * rdev,struct radeon_ring * ring,unsigned ndw) radeon_ring_lock() argument 167 radeon_ring_commit(struct radeon_device * rdev,struct radeon_ring * ring,bool hdp_flush) radeon_ring_commit() argument 198 radeon_ring_unlock_commit(struct radeon_device * rdev,struct radeon_ring * ring,bool hdp_flush) radeon_ring_unlock_commit() argument 212 radeon_ring_undo(struct radeon_ring * ring) radeon_ring_undo() argument 225 radeon_ring_unlock_undo(struct radeon_device * rdev,struct radeon_ring * ring) radeon_ring_unlock_undo() argument 240 radeon_ring_lockup_update(struct radeon_device * rdev,struct radeon_ring * ring) radeon_ring_lockup_update() argument 252 radeon_ring_test_lockup(struct radeon_device * rdev,struct radeon_ring * ring) radeon_ring_test_lockup() argument 283 radeon_ring_backup(struct radeon_device * rdev,struct radeon_ring * ring,uint32_t ** data) radeon_ring_backup() argument 347 radeon_ring_restore(struct radeon_device * rdev,struct radeon_ring * ring,unsigned size,uint32_t * data) radeon_ring_restore() argument 381 radeon_ring_init(struct radeon_device * rdev,struct radeon_ring * ring,unsigned ring_size,unsigned rptr_offs,u32 nop) radeon_ring_init() argument 437 radeon_ring_fini(struct radeon_device * rdev,struct radeon_ring * ring) radeon_ring_fini() argument 467 struct radeon_ring *ring = m->private; radeon_debugfs_ring_info_show() local 547 radeon_debugfs_ring_init(struct radeon_device * rdev,struct radeon_ring * ring) radeon_debugfs_ring_init() argument [all...] |
| H A D | radeon_fence.c | 52 * are no longer in use by the associated ring on the GPU and 63 * @ring: ring index the fence is associated with 67 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument 69 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 83 * @ring: ring index the fence is associated with 88 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument 90 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 109 * @ring 113 radeon_fence_schedule_check(struct radeon_device * rdev,int ring) radeon_fence_schedule_check() argument 136 radeon_fence_emit(struct radeon_device * rdev,struct radeon_fence ** fence,int ring) radeon_fence_emit() argument 197 radeon_fence_activity(struct radeon_device * rdev,int ring) radeon_fence_activity() argument 271 int ring; radeon_fence_check_lockup() local 320 radeon_fence_process(struct radeon_device * rdev,int ring) radeon_fence_process() argument 341 radeon_fence_seq_signaled(struct radeon_device * rdev,u64 seq,unsigned ring) radeon_fence_seq_signaled() argument 358 unsigned ring = fence->ring; radeon_fence_is_signaled() local 634 radeon_fence_wait_next(struct radeon_device * rdev,int ring) radeon_fence_wait_next() argument 661 radeon_fence_wait_empty(struct radeon_device * rdev,int ring) radeon_fence_wait_empty() argument 722 radeon_fence_count_emitted(struct radeon_device * rdev,int ring) radeon_fence_count_emitted() argument 816 radeon_fence_driver_start_ring(struct radeon_device * rdev,int ring) radeon_fence_driver_start_ring() argument 866 radeon_fence_driver_init_ring(struct radeon_device * rdev,int ring) radeon_fence_driver_init_ring() argument 895 int ring; radeon_fence_driver_init() local 915 int ring, r; radeon_fence_driver_fini() local 943 radeon_fence_driver_force_completion(struct radeon_device * rdev,int ring) radeon_fence_driver_force_completion() argument [all...] |
| H A D | evergreen_dma.c | 43 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit() local 44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 46 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0)); in evergreen_dma_fence_ring_emit() 47 radeon_ring_write(ring, addr & 0xfffffffc); in evergreen_dma_fence_ring_emit() 48 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff)); in evergreen_dma_fence_ring_emit() 49 radeon_ring_write(ring, fence->seq); in evergreen_dma_fence_ring_emit() 51 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0)); in evergreen_dma_fence_ring_emit() 53 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0)); in evergreen_dma_fence_ring_emit() 54 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2)); in evergreen_dma_fence_ring_emit() 55 radeon_ring_write(ring, 1); in evergreen_dma_fence_ring_emit() [all …]
|
| H A D | r600_dma.c | 51 struct radeon_ring *ring) in r600_dma_get_rptr() argument 56 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 72 struct radeon_ring *ring) in r600_dma_get_wptr() argument 86 struct radeon_ring *ring) in r600_dma_set_wptr() argument 88 WREG32(DMA_RB_WPTR, (ring->wptr << 2) & 0x3fffc); in r600_dma_set_wptr() 108 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in r600_dma_stop() 121 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in r600_dma_resume() local 130 rb_bufsz = order_base_2(ring->ring_size / 4); in r600_dma_resume() 150 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume() 166 ring->wptr = 0; in r600_dma_resume() [all …]
|
| H A D | uvd_v1_0.c | 40 struct radeon_ring *ring) in uvd_v1_0_get_rptr() argument 54 struct radeon_ring *ring) in uvd_v1_0_get_wptr() argument 68 struct radeon_ring *ring) in uvd_v1_0_set_wptr() argument 70 WREG32(UVD_RBC_RB_WPTR, ring->wptr); in uvd_v1_0_set_wptr() 84 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v1_0_fence_emit() local 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 87 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0)); in uvd_v1_0_fence_emit() 88 radeon_ring_write(ring, addr & 0xffffffff); in uvd_v1_0_fence_emit() 89 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0)); in uvd_v1_0_fence_emit() 90 radeon_ring_write(ring, fence->seq); in uvd_v1_0_fence_emit() [all …]
|
| H A D | cik_sdma.c | 63 struct radeon_ring *ring) in cik_sdma_get_rptr() argument 68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 70 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_rptr() 90 struct radeon_ring *ring) in cik_sdma_get_wptr() argument 94 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_wptr() 111 struct radeon_ring *ring) in cik_sdma_set_wptr() argument 115 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_set_wptr() 120 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cik_sdma_set_wptr() 135 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() local 136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute() [all …]
|
| H A D | ni_dma.c | 53 struct radeon_ring *ring) in cayman_dma_get_rptr() argument 58 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr() 60 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_rptr() 80 struct radeon_ring *ring) in cayman_dma_get_wptr() argument 84 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_wptr() 101 struct radeon_ring *ring) in cayman_dma_set_wptr() argument 105 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_set_wptr() 110 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cayman_dma_set_wptr() 124 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() local 125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute() [all …]
|
| H A D | radeon_trace.h | 34 __field(u32, ring) 40 __entry->ring = p->ring; 43 p->rdev, p->ring); 46 __entry->ring, __entry->dw, 51 TP_PROTO(unsigned vmid, int ring), 52 TP_ARGS(vmid, ring), 55 __field(u32, ring) 60 __entry->ring = ring; 62 TP_printk("vmid=%u, ring=%u", __entry->vmid, __entry->ring) 108 TP_PROTO(uint64_t pd_addr, unsigned ring, unsigned id), [all …]
|
| /openbsd-src/sys/dev/pci/drm/i915/gt/ |
| H A D | intel_ring.c | 19 unsigned int intel_ring_update_space(struct intel_ring *ring) in intel_ring_update_space() argument 23 space = __intel_ring_space(ring->head, ring->emit, ring->size); in intel_ring_update_space() 25 ring->space = space; in intel_ring_update_space() 29 void __intel_ring_pin(struct intel_ring *ring) in __intel_ring_pin() argument 31 GEM_BUG_ON(!atomic_read(&ring->pin_count)); in __intel_ring_pin() 32 atomic_inc(&ring->pin_count); in __intel_ring_pin() 35 int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww) in intel_ring_pin() argument 37 struct i915_vma *vma = ring->vma; in intel_ring_pin() 42 if (atomic_fetch_inc(&ring->pin_count)) in intel_ring_pin() 73 intel_ring_reset(ring, ring->emit); in intel_ring_pin() [all …]
|
| H A D | intel_ring.h | 21 unsigned int intel_ring_update_space(struct intel_ring *ring); 23 void __intel_ring_pin(struct intel_ring *ring); 24 int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww); 25 void intel_ring_unpin(struct intel_ring *ring); 26 void intel_ring_reset(struct intel_ring *ring, u32 tail); 30 static inline struct intel_ring *intel_ring_get(struct intel_ring *ring) in intel_ring_get() argument 32 kref_get(&ring->ref); in intel_ring_get() 33 return ring; in intel_ring_get() 36 static inline void intel_ring_put(struct intel_ring *ring) in intel_ring_put() argument 38 kref_put(&ring->ref, intel_ring_free); in intel_ring_put() [all …]
|