Lines Matching refs:seq
75 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
80 *drv->cpu_addr = cpu_to_le32(seq); in radeon_fence_write()
83 WREG32(drv->scratch_reg, seq); in radeon_fence_write()
99 u32 seq = 0; in radeon_fence_read() local
103 seq = le32_to_cpu(*drv->cpu_addr); in radeon_fence_read()
105 seq = lower_32_bits(atomic64_read(&drv->last_seq)); in radeon_fence_read()
108 seq = RREG32(drv->scratch_reg); in radeon_fence_read()
110 return seq; in radeon_fence_read()
146 u64 seq; in radeon_fence_emit() local
154 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
160 seq); in radeon_fence_emit()
162 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
176 u64 seq; in radeon_fence_check_signaled() local
184 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
185 if (seq >= fence->seq) { in radeon_fence_check_signaled()
225 uint64_t seq, last_seq, last_emitted; in radeon_fence_activity() local
255 seq = radeon_fence_read(rdev, ring); in radeon_fence_activity()
256 seq |= last_seq & 0xffffffff00000000LL; in radeon_fence_activity()
257 if (seq < last_seq) { in radeon_fence_activity()
258 seq &= 0xffffffff; in radeon_fence_activity()
259 seq |= last_emitted & 0xffffffff00000000LL; in radeon_fence_activity()
262 if (seq <= last_seq || seq > last_emitted) { in radeon_fence_activity()
270 last_seq = seq; in radeon_fence_activity()
279 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity()
281 if (seq < last_emitted) in radeon_fence_activity()
381 u64 seq, unsigned ring) in radeon_fence_seq_signaled() argument
384 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
389 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_seq_signaled()
400 u64 seq = fence->seq; in radeon_fence_is_signaled() local
404 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
412 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) { in radeon_fence_is_signaled()
434 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) in radeon_fence_enable_signaling()
444 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { in radeon_fence_enable_signaling()
479 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
503 static bool radeon_fence_any_seq_signaled(struct radeon_device *rdev, u64 *seq) in radeon_fence_any_seq_signaled() argument
510 if (seq[i] && radeon_fence_seq_signaled(rdev, seq[i], i)) in radeon_fence_any_seq_signaled()
604 uint64_t seq[RADEON_NUM_RINGS] = {}; in radeon_fence_wait_timeout() local
617 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
618 r = radeon_fence_wait_seq_timeout(fence->rdev, seq, intr, timeout); in radeon_fence_wait_timeout()
667 uint64_t seq[RADEON_NUM_RINGS]; in radeon_fence_wait_any() local
672 seq[i] = 0; in radeon_fence_wait_any()
678 seq[i] = fences[i]->seq; in radeon_fence_wait_any()
686 r = radeon_fence_wait_seq_timeout(rdev, seq, intr, MAX_SCHEDULE_TIMEOUT); in radeon_fence_wait_any()
705 uint64_t seq[RADEON_NUM_RINGS] = {}; in radeon_fence_wait_next() local
708 seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL; in radeon_fence_wait_next()
709 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { in radeon_fence_wait_next()
714 r = radeon_fence_wait_seq_timeout(rdev, seq, false, MAX_SCHEDULE_TIMEOUT); in radeon_fence_wait_next()
732 uint64_t seq[RADEON_NUM_RINGS] = {}; in radeon_fence_wait_empty() local
735 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_wait_empty()
736 if (!seq[ring]) in radeon_fence_wait_empty()
739 r = radeon_fence_wait_seq_timeout(rdev, seq, false, MAX_SCHEDULE_TIMEOUT); in radeon_fence_wait_empty()
833 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync()