xref: /dflybsd-src/sys/dev/drm/amd/amdgpu/vcn_v1_0.c (revision 789731325bde747251c28a37e0a00ed4efb88c46)
1b843c749SSergey Zigachev /*
2b843c749SSergey Zigachev  * Copyright 2016 Advanced Micro Devices, Inc.
3b843c749SSergey Zigachev  *
4b843c749SSergey Zigachev  * Permission is hereby granted, free of charge, to any person obtaining a
5b843c749SSergey Zigachev  * copy of this software and associated documentation files (the "Software"),
6b843c749SSergey Zigachev  * to deal in the Software without restriction, including without limitation
7b843c749SSergey Zigachev  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8b843c749SSergey Zigachev  * and/or sell copies of the Software, and to permit persons to whom the
9b843c749SSergey Zigachev  * Software is furnished to do so, subject to the following conditions:
10b843c749SSergey Zigachev  *
11b843c749SSergey Zigachev  * The above copyright notice and this permission notice shall be included in
12b843c749SSergey Zigachev  * all copies or substantial portions of the Software.
13b843c749SSergey Zigachev  *
14b843c749SSergey Zigachev  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15b843c749SSergey Zigachev  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16b843c749SSergey Zigachev  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17b843c749SSergey Zigachev  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18b843c749SSergey Zigachev  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19b843c749SSergey Zigachev  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20b843c749SSergey Zigachev  * OTHER DEALINGS IN THE SOFTWARE.
21b843c749SSergey Zigachev  *
22b843c749SSergey Zigachev  */
23b843c749SSergey Zigachev 
24b843c749SSergey Zigachev #include <linux/firmware.h>
25b843c749SSergey Zigachev #include <drm/drmP.h>
26b843c749SSergey Zigachev #include "amdgpu.h"
27b843c749SSergey Zigachev #include "amdgpu_vcn.h"
28b843c749SSergey Zigachev #include "soc15.h"
29b843c749SSergey Zigachev #include "soc15d.h"
30b843c749SSergey Zigachev #include "soc15_common.h"
31b843c749SSergey Zigachev 
32b843c749SSergey Zigachev #include "vcn/vcn_1_0_offset.h"
33b843c749SSergey Zigachev #include "vcn/vcn_1_0_sh_mask.h"
34b843c749SSergey Zigachev #include "hdp/hdp_4_0_offset.h"
35b843c749SSergey Zigachev #include "mmhub/mmhub_9_1_offset.h"
36b843c749SSergey Zigachev #include "mmhub/mmhub_9_1_sh_mask.h"
37b843c749SSergey Zigachev 
38b843c749SSergey Zigachev #include "ivsrcid/vcn/irqsrcs_vcn_1_0.h"
39b843c749SSergey Zigachev 
40b843c749SSergey Zigachev static int vcn_v1_0_stop(struct amdgpu_device *adev);
41b843c749SSergey Zigachev static void vcn_v1_0_set_dec_ring_funcs(struct amdgpu_device *adev);
42b843c749SSergey Zigachev static void vcn_v1_0_set_enc_ring_funcs(struct amdgpu_device *adev);
43b843c749SSergey Zigachev static void vcn_v1_0_set_jpeg_ring_funcs(struct amdgpu_device *adev);
44b843c749SSergey Zigachev static void vcn_v1_0_set_irq_funcs(struct amdgpu_device *adev);
45b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_set_patch_ring(struct amdgpu_ring *ring, uint32_t ptr);
46b843c749SSergey Zigachev static int vcn_v1_0_set_powergating_state(void *handle, enum amd_powergating_state state);
47b843c749SSergey Zigachev 
48b843c749SSergey Zigachev /**
49b843c749SSergey Zigachev  * vcn_v1_0_early_init - set function pointers
50b843c749SSergey Zigachev  *
51b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
52b843c749SSergey Zigachev  *
53b843c749SSergey Zigachev  * Set ring and irq function pointers
54b843c749SSergey Zigachev  */
vcn_v1_0_early_init(void * handle)55b843c749SSergey Zigachev static int vcn_v1_0_early_init(void *handle)
56b843c749SSergey Zigachev {
57b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
58b843c749SSergey Zigachev 
59b843c749SSergey Zigachev 	adev->vcn.num_enc_rings = 2;
60b843c749SSergey Zigachev 
61b843c749SSergey Zigachev 	vcn_v1_0_set_dec_ring_funcs(adev);
62b843c749SSergey Zigachev 	vcn_v1_0_set_enc_ring_funcs(adev);
63b843c749SSergey Zigachev 	vcn_v1_0_set_jpeg_ring_funcs(adev);
64b843c749SSergey Zigachev 	vcn_v1_0_set_irq_funcs(adev);
65b843c749SSergey Zigachev 
66b843c749SSergey Zigachev 	return 0;
67b843c749SSergey Zigachev }
68b843c749SSergey Zigachev 
69b843c749SSergey Zigachev /**
70b843c749SSergey Zigachev  * vcn_v1_0_sw_init - sw init for VCN block
71b843c749SSergey Zigachev  *
72b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
73b843c749SSergey Zigachev  *
74b843c749SSergey Zigachev  * Load firmware and sw initialization
75b843c749SSergey Zigachev  */
vcn_v1_0_sw_init(void * handle)76b843c749SSergey Zigachev static int vcn_v1_0_sw_init(void *handle)
77b843c749SSergey Zigachev {
78b843c749SSergey Zigachev 	struct amdgpu_ring *ring;
79b843c749SSergey Zigachev 	int i, r;
80b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
81b843c749SSergey Zigachev 
82b843c749SSergey Zigachev 	/* VCN DEC TRAP */
83b843c749SSergey Zigachev 	r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_VCN, VCN_1_0__SRCID__UVD_SYSTEM_MESSAGE_INTERRUPT, &adev->vcn.irq);
84b843c749SSergey Zigachev 	if (r)
85b843c749SSergey Zigachev 		return r;
86b843c749SSergey Zigachev 
87b843c749SSergey Zigachev 	/* VCN ENC TRAP */
88b843c749SSergey Zigachev 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
89b843c749SSergey Zigachev 		r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_VCN, i + VCN_1_0__SRCID__UVD_ENC_GENERAL_PURPOSE,
90b843c749SSergey Zigachev 					&adev->vcn.irq);
91b843c749SSergey Zigachev 		if (r)
92b843c749SSergey Zigachev 			return r;
93b843c749SSergey Zigachev 	}
94b843c749SSergey Zigachev 
95b843c749SSergey Zigachev 	/* VCN JPEG TRAP */
96b843c749SSergey Zigachev 	r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_VCN, 126, &adev->vcn.irq);
97b843c749SSergey Zigachev 	if (r)
98b843c749SSergey Zigachev 		return r;
99b843c749SSergey Zigachev 
100b843c749SSergey Zigachev 	r = amdgpu_vcn_sw_init(adev);
101b843c749SSergey Zigachev 	if (r)
102b843c749SSergey Zigachev 		return r;
103b843c749SSergey Zigachev 
104b843c749SSergey Zigachev 	if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) {
105b843c749SSergey Zigachev 		const struct common_firmware_header *hdr;
106b843c749SSergey Zigachev 		hdr = (const struct common_firmware_header *)adev->vcn.fw->data;
107b843c749SSergey Zigachev 		adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].ucode_id = AMDGPU_UCODE_ID_VCN;
108b843c749SSergey Zigachev 		adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].fw = adev->vcn.fw;
109b843c749SSergey Zigachev 		adev->firmware.fw_size +=
110b843c749SSergey Zigachev 			ALIGN(le32_to_cpu(hdr->ucode_size_bytes), PAGE_SIZE);
111b843c749SSergey Zigachev 		DRM_INFO("PSP loading VCN firmware\n");
112b843c749SSergey Zigachev 	}
113b843c749SSergey Zigachev 
114b843c749SSergey Zigachev 	r = amdgpu_vcn_resume(adev);
115b843c749SSergey Zigachev 	if (r)
116b843c749SSergey Zigachev 		return r;
117b843c749SSergey Zigachev 
118b843c749SSergey Zigachev 	ring = &adev->vcn.ring_dec;
119*78973132SSergey Zigachev 	ksprintf(ring->name, "vcn_dec");
120b843c749SSergey Zigachev 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.irq, 0);
121b843c749SSergey Zigachev 	if (r)
122b843c749SSergey Zigachev 		return r;
123b843c749SSergey Zigachev 
124b843c749SSergey Zigachev 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
125b843c749SSergey Zigachev 		ring = &adev->vcn.ring_enc[i];
126*78973132SSergey Zigachev 		ksprintf(ring->name, "vcn_enc%d", i);
127b843c749SSergey Zigachev 		r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.irq, 0);
128b843c749SSergey Zigachev 		if (r)
129b843c749SSergey Zigachev 			return r;
130b843c749SSergey Zigachev 	}
131b843c749SSergey Zigachev 
132b843c749SSergey Zigachev 	ring = &adev->vcn.ring_jpeg;
133*78973132SSergey Zigachev 	ksprintf(ring->name, "vcn_jpeg");
134b843c749SSergey Zigachev 	r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.irq, 0);
135b843c749SSergey Zigachev 	if (r)
136b843c749SSergey Zigachev 		return r;
137b843c749SSergey Zigachev 
138b843c749SSergey Zigachev 	return r;
139b843c749SSergey Zigachev }
140b843c749SSergey Zigachev 
141b843c749SSergey Zigachev /**
142b843c749SSergey Zigachev  * vcn_v1_0_sw_fini - sw fini for VCN block
143b843c749SSergey Zigachev  *
144b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
145b843c749SSergey Zigachev  *
146b843c749SSergey Zigachev  * VCN suspend and free up sw allocation
147b843c749SSergey Zigachev  */
vcn_v1_0_sw_fini(void * handle)148b843c749SSergey Zigachev static int vcn_v1_0_sw_fini(void *handle)
149b843c749SSergey Zigachev {
150b843c749SSergey Zigachev 	int r;
151b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
152b843c749SSergey Zigachev 
153b843c749SSergey Zigachev 	r = amdgpu_vcn_suspend(adev);
154b843c749SSergey Zigachev 	if (r)
155b843c749SSergey Zigachev 		return r;
156b843c749SSergey Zigachev 
157b843c749SSergey Zigachev 	r = amdgpu_vcn_sw_fini(adev);
158b843c749SSergey Zigachev 
159b843c749SSergey Zigachev 	return r;
160b843c749SSergey Zigachev }
161b843c749SSergey Zigachev 
162b843c749SSergey Zigachev /**
163b843c749SSergey Zigachev  * vcn_v1_0_hw_init - start and test VCN block
164b843c749SSergey Zigachev  *
165b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
166b843c749SSergey Zigachev  *
167b843c749SSergey Zigachev  * Initialize the hardware, boot up the VCPU and do some testing
168b843c749SSergey Zigachev  */
vcn_v1_0_hw_init(void * handle)169b843c749SSergey Zigachev static int vcn_v1_0_hw_init(void *handle)
170b843c749SSergey Zigachev {
171b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
172b843c749SSergey Zigachev 	struct amdgpu_ring *ring = &adev->vcn.ring_dec;
173b843c749SSergey Zigachev 	int i, r;
174b843c749SSergey Zigachev 
175b843c749SSergey Zigachev 	ring->ready = true;
176b843c749SSergey Zigachev 	r = amdgpu_ring_test_ring(ring);
177b843c749SSergey Zigachev 	if (r) {
178b843c749SSergey Zigachev 		ring->ready = false;
179b843c749SSergey Zigachev 		goto done;
180b843c749SSergey Zigachev 	}
181b843c749SSergey Zigachev 
182b843c749SSergey Zigachev 	for (i = 0; i < adev->vcn.num_enc_rings; ++i) {
183b843c749SSergey Zigachev 		ring = &adev->vcn.ring_enc[i];
184b843c749SSergey Zigachev 		ring->ready = true;
185b843c749SSergey Zigachev 		r = amdgpu_ring_test_ring(ring);
186b843c749SSergey Zigachev 		if (r) {
187b843c749SSergey Zigachev 			ring->ready = false;
188b843c749SSergey Zigachev 			goto done;
189b843c749SSergey Zigachev 		}
190b843c749SSergey Zigachev 	}
191b843c749SSergey Zigachev 
192b843c749SSergey Zigachev 	ring = &adev->vcn.ring_jpeg;
193b843c749SSergey Zigachev 	ring->ready = true;
194b843c749SSergey Zigachev 	r = amdgpu_ring_test_ring(ring);
195b843c749SSergey Zigachev 	if (r) {
196b843c749SSergey Zigachev 		ring->ready = false;
197b843c749SSergey Zigachev 		goto done;
198b843c749SSergey Zigachev 	}
199b843c749SSergey Zigachev 
200b843c749SSergey Zigachev done:
201b843c749SSergey Zigachev 	if (!r)
202b843c749SSergey Zigachev 		DRM_INFO("VCN decode and encode initialized successfully.\n");
203b843c749SSergey Zigachev 
204b843c749SSergey Zigachev 	return r;
205b843c749SSergey Zigachev }
206b843c749SSergey Zigachev 
207b843c749SSergey Zigachev /**
208b843c749SSergey Zigachev  * vcn_v1_0_hw_fini - stop the hardware block
209b843c749SSergey Zigachev  *
210b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
211b843c749SSergey Zigachev  *
212b843c749SSergey Zigachev  * Stop the VCN block, mark ring as not ready any more
213b843c749SSergey Zigachev  */
vcn_v1_0_hw_fini(void * handle)214b843c749SSergey Zigachev static int vcn_v1_0_hw_fini(void *handle)
215b843c749SSergey Zigachev {
216b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
217b843c749SSergey Zigachev 	struct amdgpu_ring *ring = &adev->vcn.ring_dec;
218b843c749SSergey Zigachev 
219b843c749SSergey Zigachev 	if (RREG32_SOC15(VCN, 0, mmUVD_STATUS))
220b843c749SSergey Zigachev 		vcn_v1_0_set_powergating_state(adev, AMD_PG_STATE_GATE);
221b843c749SSergey Zigachev 
222b843c749SSergey Zigachev 	ring->ready = false;
223b843c749SSergey Zigachev 
224b843c749SSergey Zigachev 	return 0;
225b843c749SSergey Zigachev }
226b843c749SSergey Zigachev 
227b843c749SSergey Zigachev /**
228b843c749SSergey Zigachev  * vcn_v1_0_suspend - suspend VCN block
229b843c749SSergey Zigachev  *
230b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
231b843c749SSergey Zigachev  *
232b843c749SSergey Zigachev  * HW fini and suspend VCN block
233b843c749SSergey Zigachev  */
vcn_v1_0_suspend(void * handle)234b843c749SSergey Zigachev static int vcn_v1_0_suspend(void *handle)
235b843c749SSergey Zigachev {
236b843c749SSergey Zigachev 	int r;
237b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
238b843c749SSergey Zigachev 
239b843c749SSergey Zigachev 	r = vcn_v1_0_hw_fini(adev);
240b843c749SSergey Zigachev 	if (r)
241b843c749SSergey Zigachev 		return r;
242b843c749SSergey Zigachev 
243b843c749SSergey Zigachev 	r = amdgpu_vcn_suspend(adev);
244b843c749SSergey Zigachev 
245b843c749SSergey Zigachev 	return r;
246b843c749SSergey Zigachev }
247b843c749SSergey Zigachev 
248b843c749SSergey Zigachev /**
249b843c749SSergey Zigachev  * vcn_v1_0_resume - resume VCN block
250b843c749SSergey Zigachev  *
251b843c749SSergey Zigachev  * @handle: amdgpu_device pointer
252b843c749SSergey Zigachev  *
253b843c749SSergey Zigachev  * Resume firmware and hw init VCN block
254b843c749SSergey Zigachev  */
vcn_v1_0_resume(void * handle)255b843c749SSergey Zigachev static int vcn_v1_0_resume(void *handle)
256b843c749SSergey Zigachev {
257b843c749SSergey Zigachev 	int r;
258b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
259b843c749SSergey Zigachev 
260b843c749SSergey Zigachev 	r = amdgpu_vcn_resume(adev);
261b843c749SSergey Zigachev 	if (r)
262b843c749SSergey Zigachev 		return r;
263b843c749SSergey Zigachev 
264b843c749SSergey Zigachev 	r = vcn_v1_0_hw_init(adev);
265b843c749SSergey Zigachev 
266b843c749SSergey Zigachev 	return r;
267b843c749SSergey Zigachev }
268b843c749SSergey Zigachev 
269b843c749SSergey Zigachev /**
270b843c749SSergey Zigachev  * vcn_v1_0_mc_resume - memory controller programming
271b843c749SSergey Zigachev  *
272b843c749SSergey Zigachev  * @adev: amdgpu_device pointer
273b843c749SSergey Zigachev  *
274b843c749SSergey Zigachev  * Let the VCN memory controller know it's offsets
275b843c749SSergey Zigachev  */
vcn_v1_0_mc_resume(struct amdgpu_device * adev)276b843c749SSergey Zigachev static void vcn_v1_0_mc_resume(struct amdgpu_device *adev)
277b843c749SSergey Zigachev {
278*78973132SSergey Zigachev 	uint32_t size = AMDGPU_GPU_PAGE_ALIGN(adev->vcn.fw->datasize + 4);
279b843c749SSergey Zigachev 	uint32_t offset;
280b843c749SSergey Zigachev 
281b843c749SSergey Zigachev 	if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) {
282b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW,
283b843c749SSergey Zigachev 			     (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_lo));
284b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH,
285b843c749SSergey Zigachev 			     (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN].tmr_mc_addr_hi));
286b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET0, 0);
287b843c749SSergey Zigachev 		offset = 0;
288b843c749SSergey Zigachev 	} else {
289b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW,
290b843c749SSergey Zigachev 			lower_32_bits(adev->vcn.gpu_addr));
291b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH,
292b843c749SSergey Zigachev 			upper_32_bits(adev->vcn.gpu_addr));
293b843c749SSergey Zigachev 		offset = size;
294b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET0,
295b843c749SSergey Zigachev 			     AMDGPU_UVD_FIRMWARE_OFFSET >> 3);
296b843c749SSergey Zigachev 	}
297b843c749SSergey Zigachev 
298b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE0, size);
299b843c749SSergey Zigachev 
300b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW,
301b843c749SSergey Zigachev 		     lower_32_bits(adev->vcn.gpu_addr + offset));
302b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH,
303b843c749SSergey Zigachev 		     upper_32_bits(adev->vcn.gpu_addr + offset));
304b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET1, 0);
305b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE1, AMDGPU_VCN_HEAP_SIZE);
306b843c749SSergey Zigachev 
307b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_LOW,
308b843c749SSergey Zigachev 		     lower_32_bits(adev->vcn.gpu_addr + offset + AMDGPU_VCN_HEAP_SIZE));
309b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_VCPU_CACHE2_64BIT_BAR_HIGH,
310b843c749SSergey Zigachev 		     upper_32_bits(adev->vcn.gpu_addr + offset + AMDGPU_VCN_HEAP_SIZE));
311b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_OFFSET2, 0);
312b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CACHE_SIZE2,
313b843c749SSergey Zigachev 			AMDGPU_VCN_STACK_SIZE + (AMDGPU_VCN_SESSION_SIZE * 40));
314b843c749SSergey Zigachev 
315b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_UDEC_ADDR_CONFIG,
316b843c749SSergey Zigachev 			adev->gfx.config.gb_addr_config);
317b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_UDEC_DB_ADDR_CONFIG,
318b843c749SSergey Zigachev 			adev->gfx.config.gb_addr_config);
319b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_UDEC_DBW_ADDR_CONFIG,
320b843c749SSergey Zigachev 			adev->gfx.config.gb_addr_config);
321b843c749SSergey Zigachev }
322b843c749SSergey Zigachev 
323b843c749SSergey Zigachev /**
324b843c749SSergey Zigachev  * vcn_v1_0_disable_clock_gating - disable VCN clock gating
325b843c749SSergey Zigachev  *
326b843c749SSergey Zigachev  * @adev: amdgpu_device pointer
327b843c749SSergey Zigachev  * @sw: enable SW clock gating
328b843c749SSergey Zigachev  *
329b843c749SSergey Zigachev  * Disable clock gating for VCN block
330b843c749SSergey Zigachev  */
vcn_v1_0_disable_clock_gating(struct amdgpu_device * adev)331b843c749SSergey Zigachev static void vcn_v1_0_disable_clock_gating(struct amdgpu_device *adev)
332b843c749SSergey Zigachev {
333b843c749SSergey Zigachev 	uint32_t data;
334b843c749SSergey Zigachev 
335b843c749SSergey Zigachev 	/* JPEG disable CGC */
336b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL);
337b843c749SSergey Zigachev 
338b843c749SSergey Zigachev 	if (adev->cg_flags & AMD_CG_SUPPORT_VCN_MGCG)
339b843c749SSergey Zigachev 		data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
340b843c749SSergey Zigachev 	else
341b843c749SSergey Zigachev 		data &= ~JPEG_CGC_CTRL__DYN_CLOCK_MODE_MASK;
342b843c749SSergey Zigachev 
343b843c749SSergey Zigachev 	data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
344b843c749SSergey Zigachev 	data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
345b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL, data);
346b843c749SSergey Zigachev 
347b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE);
348b843c749SSergey Zigachev 	data &= ~(JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK);
349b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE, data);
350b843c749SSergey Zigachev 
351b843c749SSergey Zigachev 	/* UVD disable CGC */
352b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
353b843c749SSergey Zigachev 	if (adev->cg_flags & AMD_CG_SUPPORT_VCN_MGCG)
354b843c749SSergey Zigachev 		data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
355b843c749SSergey Zigachev 	else
356b843c749SSergey Zigachev 		data &= ~ UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK;
357b843c749SSergey Zigachev 
358b843c749SSergey Zigachev 	data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
359b843c749SSergey Zigachev 	data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
360b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
361b843c749SSergey Zigachev 
362b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_CGC_GATE);
363b843c749SSergey Zigachev 	data &= ~(UVD_CGC_GATE__SYS_MASK
364b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_MASK
365b843c749SSergey Zigachev 		| UVD_CGC_GATE__MPEG2_MASK
366b843c749SSergey Zigachev 		| UVD_CGC_GATE__REGS_MASK
367b843c749SSergey Zigachev 		| UVD_CGC_GATE__RBC_MASK
368b843c749SSergey Zigachev 		| UVD_CGC_GATE__LMI_MC_MASK
369b843c749SSergey Zigachev 		| UVD_CGC_GATE__LMI_UMC_MASK
370b843c749SSergey Zigachev 		| UVD_CGC_GATE__IDCT_MASK
371b843c749SSergey Zigachev 		| UVD_CGC_GATE__MPRD_MASK
372b843c749SSergey Zigachev 		| UVD_CGC_GATE__MPC_MASK
373b843c749SSergey Zigachev 		| UVD_CGC_GATE__LBSI_MASK
374b843c749SSergey Zigachev 		| UVD_CGC_GATE__LRBBM_MASK
375b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_RE_MASK
376b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_CM_MASK
377b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_IT_MASK
378b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_DB_MASK
379b843c749SSergey Zigachev 		| UVD_CGC_GATE__UDEC_MP_MASK
380b843c749SSergey Zigachev 		| UVD_CGC_GATE__WCB_MASK
381b843c749SSergey Zigachev 		| UVD_CGC_GATE__VCPU_MASK
382b843c749SSergey Zigachev 		| UVD_CGC_GATE__SCPU_MASK);
383b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_CGC_GATE, data);
384b843c749SSergey Zigachev 
385b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
386b843c749SSergey Zigachev 	data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK
387b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_CM_MODE_MASK
388b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_IT_MODE_MASK
389b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_DB_MODE_MASK
390b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_MP_MODE_MASK
391b843c749SSergey Zigachev 		| UVD_CGC_CTRL__SYS_MODE_MASK
392b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_MODE_MASK
393b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPEG2_MODE_MASK
394b843c749SSergey Zigachev 		| UVD_CGC_CTRL__REGS_MODE_MASK
395b843c749SSergey Zigachev 		| UVD_CGC_CTRL__RBC_MODE_MASK
396b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LMI_MC_MODE_MASK
397b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LMI_UMC_MODE_MASK
398b843c749SSergey Zigachev 		| UVD_CGC_CTRL__IDCT_MODE_MASK
399b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPRD_MODE_MASK
400b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPC_MODE_MASK
401b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LBSI_MODE_MASK
402b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LRBBM_MODE_MASK
403b843c749SSergey Zigachev 		| UVD_CGC_CTRL__WCB_MODE_MASK
404b843c749SSergey Zigachev 		| UVD_CGC_CTRL__VCPU_MODE_MASK
405b843c749SSergey Zigachev 		| UVD_CGC_CTRL__SCPU_MODE_MASK);
406b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
407b843c749SSergey Zigachev 
408b843c749SSergey Zigachev 	/* turn on */
409b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_GATE);
410b843c749SSergey Zigachev 	data |= (UVD_SUVD_CGC_GATE__SRE_MASK
411b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_MASK
412b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SMP_MASK
413b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SCM_MASK
414b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SDB_MASK
415b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SRE_H264_MASK
416b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SRE_HEVC_MASK
417b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_H264_MASK
418b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_HEVC_MASK
419b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SCM_H264_MASK
420b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SCM_HEVC_MASK
421b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SDB_H264_MASK
422b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SDB_HEVC_MASK
423b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SCLR_MASK
424b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__UVD_SC_MASK
425b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__ENT_MASK
426b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_HEVC_DEC_MASK
427b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_HEVC_ENC_MASK
428b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SITE_MASK
429b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SRE_VP9_MASK
430b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SCM_VP9_MASK
431b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SIT_VP9_DEC_MASK
432b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__SDB_VP9_MASK
433b843c749SSergey Zigachev 		| UVD_SUVD_CGC_GATE__IME_HEVC_MASK);
434b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_GATE, data);
435b843c749SSergey Zigachev 
436b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL);
437b843c749SSergey Zigachev 	data &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK
438b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SIT_MODE_MASK
439b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SMP_MODE_MASK
440b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SCM_MODE_MASK
441b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SDB_MODE_MASK
442b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SCLR_MODE_MASK
443b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__UVD_SC_MODE_MASK
444b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__ENT_MODE_MASK
445b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__IME_MODE_MASK
446b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SITE_MODE_MASK);
447b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL, data);
448b843c749SSergey Zigachev }
449b843c749SSergey Zigachev 
450b843c749SSergey Zigachev /**
451b843c749SSergey Zigachev  * vcn_v1_0_enable_clock_gating - enable VCN clock gating
452b843c749SSergey Zigachev  *
453b843c749SSergey Zigachev  * @adev: amdgpu_device pointer
454b843c749SSergey Zigachev  * @sw: enable SW clock gating
455b843c749SSergey Zigachev  *
456b843c749SSergey Zigachev  * Enable clock gating for VCN block
457b843c749SSergey Zigachev  */
vcn_v1_0_enable_clock_gating(struct amdgpu_device * adev)458b843c749SSergey Zigachev static void vcn_v1_0_enable_clock_gating(struct amdgpu_device *adev)
459b843c749SSergey Zigachev {
460b843c749SSergey Zigachev 	uint32_t data = 0;
461b843c749SSergey Zigachev 
462b843c749SSergey Zigachev 	/* enable JPEG CGC */
463b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL);
464b843c749SSergey Zigachev 	if (adev->cg_flags & AMD_CG_SUPPORT_VCN_MGCG)
465b843c749SSergey Zigachev 		data |= 1 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
466b843c749SSergey Zigachev 	else
467b843c749SSergey Zigachev 		data |= 0 << JPEG_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
468b843c749SSergey Zigachev 	data |= 1 << JPEG_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
469b843c749SSergey Zigachev 	data |= 4 << JPEG_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
470b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmJPEG_CGC_CTRL, data);
471b843c749SSergey Zigachev 
472b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE);
473b843c749SSergey Zigachev 	data |= (JPEG_CGC_GATE__JPEG_MASK | JPEG_CGC_GATE__JPEG2_MASK);
474b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmJPEG_CGC_GATE, data);
475b843c749SSergey Zigachev 
476b843c749SSergey Zigachev 	/* enable UVD CGC */
477b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
478b843c749SSergey Zigachev 	if (adev->cg_flags & AMD_CG_SUPPORT_VCN_MGCG)
479b843c749SSergey Zigachev 		data |= 1 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
480b843c749SSergey Zigachev 	else
481b843c749SSergey Zigachev 		data |= 0 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT;
482b843c749SSergey Zigachev 	data |= 1 << UVD_CGC_CTRL__CLK_GATE_DLY_TIMER__SHIFT;
483b843c749SSergey Zigachev 	data |= 4 << UVD_CGC_CTRL__CLK_OFF_DELAY__SHIFT;
484b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
485b843c749SSergey Zigachev 
486b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL);
487b843c749SSergey Zigachev 	data |= (UVD_CGC_CTRL__UDEC_RE_MODE_MASK
488b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_CM_MODE_MASK
489b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_IT_MODE_MASK
490b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_DB_MODE_MASK
491b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_MP_MODE_MASK
492b843c749SSergey Zigachev 		| UVD_CGC_CTRL__SYS_MODE_MASK
493b843c749SSergey Zigachev 		| UVD_CGC_CTRL__UDEC_MODE_MASK
494b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPEG2_MODE_MASK
495b843c749SSergey Zigachev 		| UVD_CGC_CTRL__REGS_MODE_MASK
496b843c749SSergey Zigachev 		| UVD_CGC_CTRL__RBC_MODE_MASK
497b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LMI_MC_MODE_MASK
498b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LMI_UMC_MODE_MASK
499b843c749SSergey Zigachev 		| UVD_CGC_CTRL__IDCT_MODE_MASK
500b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPRD_MODE_MASK
501b843c749SSergey Zigachev 		| UVD_CGC_CTRL__MPC_MODE_MASK
502b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LBSI_MODE_MASK
503b843c749SSergey Zigachev 		| UVD_CGC_CTRL__LRBBM_MODE_MASK
504b843c749SSergey Zigachev 		| UVD_CGC_CTRL__WCB_MODE_MASK
505b843c749SSergey Zigachev 		| UVD_CGC_CTRL__VCPU_MODE_MASK
506b843c749SSergey Zigachev 		| UVD_CGC_CTRL__SCPU_MODE_MASK);
507b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_CGC_CTRL, data);
508b843c749SSergey Zigachev 
509b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL);
510b843c749SSergey Zigachev 	data |= (UVD_SUVD_CGC_CTRL__SRE_MODE_MASK
511b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SIT_MODE_MASK
512b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SMP_MODE_MASK
513b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SCM_MODE_MASK
514b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SDB_MODE_MASK
515b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SCLR_MODE_MASK
516b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__UVD_SC_MODE_MASK
517b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__ENT_MODE_MASK
518b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__IME_MODE_MASK
519b843c749SSergey Zigachev 		| UVD_SUVD_CGC_CTRL__SITE_MODE_MASK);
520b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_SUVD_CGC_CTRL, data);
521b843c749SSergey Zigachev }
522b843c749SSergey Zigachev 
vcn_1_0_disable_static_power_gating(struct amdgpu_device * adev)523b843c749SSergey Zigachev static void vcn_1_0_disable_static_power_gating(struct amdgpu_device *adev)
524b843c749SSergey Zigachev {
525b843c749SSergey Zigachev 	uint32_t data = 0;
526b843c749SSergey Zigachev 	int ret;
527b843c749SSergey Zigachev 
528b843c749SSergey Zigachev 	if (adev->pg_flags & AMD_PG_SUPPORT_VCN) {
529b843c749SSergey Zigachev 		data = (1 << UVD_PGFSM_CONFIG__UVDM_PWR_CONFIG__SHIFT
530b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDU_PWR_CONFIG__SHIFT
531b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDF_PWR_CONFIG__SHIFT
532b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDC_PWR_CONFIG__SHIFT
533b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDB_PWR_CONFIG__SHIFT
534b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDIL_PWR_CONFIG__SHIFT
535b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDIR_PWR_CONFIG__SHIFT
536b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDTD_PWR_CONFIG__SHIFT
537b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDTE_PWR_CONFIG__SHIFT
538b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDE_PWR_CONFIG__SHIFT
539b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDW_PWR_CONFIG__SHIFT);
540b843c749SSergey Zigachev 
541b843c749SSergey Zigachev 		WREG32_SOC15(VCN, 0, mmUVD_PGFSM_CONFIG, data);
542b843c749SSergey Zigachev 		SOC15_WAIT_ON_RREG(VCN, 0, mmUVD_PGFSM_STATUS, UVD_PGFSM_STATUS__UVDM_UVDU_PWR_ON, 0xFFFFFF, ret);
543b843c749SSergey Zigachev 	} else {
544b843c749SSergey Zigachev 		data = (1 << UVD_PGFSM_CONFIG__UVDM_PWR_CONFIG__SHIFT
545b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDU_PWR_CONFIG__SHIFT
546b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDF_PWR_CONFIG__SHIFT
547b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDC_PWR_CONFIG__SHIFT
548b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDB_PWR_CONFIG__SHIFT
549b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDIL_PWR_CONFIG__SHIFT
550b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDIR_PWR_CONFIG__SHIFT
551b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDTD_PWR_CONFIG__SHIFT
552b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDTE_PWR_CONFIG__SHIFT
553b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDE_PWR_CONFIG__SHIFT
554b843c749SSergey Zigachev 			| 1 << UVD_PGFSM_CONFIG__UVDW_PWR_CONFIG__SHIFT);
555b843c749SSergey Zigachev 		WREG32_SOC15(VCN, 0, mmUVD_PGFSM_CONFIG, data);
556b843c749SSergey Zigachev 		SOC15_WAIT_ON_RREG(VCN, 0, mmUVD_PGFSM_STATUS, 0,  0xFFFFFFFF, ret);
557b843c749SSergey Zigachev 	}
558b843c749SSergey Zigachev 
559b843c749SSergey Zigachev 	/* polling UVD_PGFSM_STATUS to confirm UVDM_PWR_STATUS , UVDU_PWR_STATUS are 0 (power on) */
560b843c749SSergey Zigachev 
561b843c749SSergey Zigachev 	data = RREG32_SOC15(VCN, 0, mmUVD_POWER_STATUS);
562b843c749SSergey Zigachev 	data &= ~0x103;
563b843c749SSergey Zigachev 	if (adev->pg_flags & AMD_PG_SUPPORT_VCN)
564b843c749SSergey Zigachev 		data |= UVD_PGFSM_CONFIG__UVDM_UVDU_PWR_ON | UVD_POWER_STATUS__UVD_PG_EN_MASK;
565b843c749SSergey Zigachev 
566b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_POWER_STATUS, data);
567b843c749SSergey Zigachev }
568b843c749SSergey Zigachev 
vcn_1_0_enable_static_power_gating(struct amdgpu_device * adev)569b843c749SSergey Zigachev static void vcn_1_0_enable_static_power_gating(struct amdgpu_device *adev)
570b843c749SSergey Zigachev {
571b843c749SSergey Zigachev 	uint32_t data = 0;
572b843c749SSergey Zigachev 	int ret;
573b843c749SSergey Zigachev 
574b843c749SSergey Zigachev 	if (adev->pg_flags & AMD_PG_SUPPORT_VCN) {
575b843c749SSergey Zigachev 		/* Before power off, this indicator has to be turned on */
576b843c749SSergey Zigachev 		data = RREG32_SOC15(VCN, 0, mmUVD_POWER_STATUS);
577b843c749SSergey Zigachev 		data &= ~UVD_POWER_STATUS__UVD_POWER_STATUS_MASK;
578b843c749SSergey Zigachev 		data |= UVD_POWER_STATUS__UVD_POWER_STATUS_TILES_OFF;
579b843c749SSergey Zigachev 		WREG32_SOC15(VCN, 0, mmUVD_POWER_STATUS, data);
580b843c749SSergey Zigachev 
581b843c749SSergey Zigachev 
582b843c749SSergey Zigachev 		data = (2 << UVD_PGFSM_CONFIG__UVDM_PWR_CONFIG__SHIFT
583b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDU_PWR_CONFIG__SHIFT
584b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDF_PWR_CONFIG__SHIFT
585b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDC_PWR_CONFIG__SHIFT
586b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDB_PWR_CONFIG__SHIFT
587b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDIL_PWR_CONFIG__SHIFT
588b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDIR_PWR_CONFIG__SHIFT
589b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDTD_PWR_CONFIG__SHIFT
590b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDTE_PWR_CONFIG__SHIFT
591b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDE_PWR_CONFIG__SHIFT
592b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_CONFIG__UVDW_PWR_CONFIG__SHIFT);
593b843c749SSergey Zigachev 
594b843c749SSergey Zigachev 		WREG32_SOC15(VCN, 0, mmUVD_PGFSM_CONFIG, data);
595b843c749SSergey Zigachev 
596b843c749SSergey Zigachev 		data = (2 << UVD_PGFSM_STATUS__UVDM_PWR_STATUS__SHIFT
597b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDU_PWR_STATUS__SHIFT
598b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDF_PWR_STATUS__SHIFT
599b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDC_PWR_STATUS__SHIFT
600b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDB_PWR_STATUS__SHIFT
601b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDIL_PWR_STATUS__SHIFT
602b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDIR_PWR_STATUS__SHIFT
603b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDTD_PWR_STATUS__SHIFT
604b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDTE_PWR_STATUS__SHIFT
605b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDE_PWR_STATUS__SHIFT
606b843c749SSergey Zigachev 			| 2 << UVD_PGFSM_STATUS__UVDW_PWR_STATUS__SHIFT);
607b843c749SSergey Zigachev 		SOC15_WAIT_ON_RREG(VCN, 0, mmUVD_PGFSM_STATUS, data, 0xFFFFFFFF, ret);
608b843c749SSergey Zigachev 	}
609b843c749SSergey Zigachev }
610b843c749SSergey Zigachev 
611b843c749SSergey Zigachev /**
612b843c749SSergey Zigachev  * vcn_v1_0_start - start VCN block
613b843c749SSergey Zigachev  *
614b843c749SSergey Zigachev  * @adev: amdgpu_device pointer
615b843c749SSergey Zigachev  *
616b843c749SSergey Zigachev  * Setup and start the VCN block
617b843c749SSergey Zigachev  */
vcn_v1_0_start(struct amdgpu_device * adev)618b843c749SSergey Zigachev static int vcn_v1_0_start(struct amdgpu_device *adev)
619b843c749SSergey Zigachev {
620b843c749SSergey Zigachev 	struct amdgpu_ring *ring = &adev->vcn.ring_dec;
621b843c749SSergey Zigachev 	uint32_t rb_bufsz, tmp;
622b843c749SSergey Zigachev 	uint32_t lmi_swap_cntl;
623b843c749SSergey Zigachev 	int i, j, r;
624b843c749SSergey Zigachev 
625b843c749SSergey Zigachev 	/* disable byte swapping */
626b843c749SSergey Zigachev 	lmi_swap_cntl = 0;
627b843c749SSergey Zigachev 
628b843c749SSergey Zigachev 	vcn_1_0_disable_static_power_gating(adev);
629b843c749SSergey Zigachev 	/* disable clock gating */
630b843c749SSergey Zigachev 	vcn_v1_0_disable_clock_gating(adev);
631b843c749SSergey Zigachev 
632b843c749SSergey Zigachev 	vcn_v1_0_mc_resume(adev);
633b843c749SSergey Zigachev 
634b843c749SSergey Zigachev 	/* disable interupt */
635b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_MASTINT_EN), 0,
636b843c749SSergey Zigachev 			~UVD_MASTINT_EN__VCPU_EN_MASK);
637b843c749SSergey Zigachev 
638b843c749SSergey Zigachev 	/* stall UMC and register bus before resetting VCPU */
639b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2),
640b843c749SSergey Zigachev 			UVD_LMI_CTRL2__STALL_ARB_UMC_MASK,
641b843c749SSergey Zigachev 			~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK);
642b843c749SSergey Zigachev 	mdelay(1);
643b843c749SSergey Zigachev 
644b843c749SSergey Zigachev 	/* put LMI, VCPU, RBC etc... into reset */
645b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
646b843c749SSergey Zigachev 		UVD_SOFT_RESET__LMI_SOFT_RESET_MASK |
647b843c749SSergey Zigachev 		UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK |
648b843c749SSergey Zigachev 		UVD_SOFT_RESET__LBSI_SOFT_RESET_MASK |
649b843c749SSergey Zigachev 		UVD_SOFT_RESET__RBC_SOFT_RESET_MASK |
650b843c749SSergey Zigachev 		UVD_SOFT_RESET__CSM_SOFT_RESET_MASK |
651b843c749SSergey Zigachev 		UVD_SOFT_RESET__CXW_SOFT_RESET_MASK |
652b843c749SSergey Zigachev 		UVD_SOFT_RESET__TAP_SOFT_RESET_MASK |
653b843c749SSergey Zigachev 		UVD_SOFT_RESET__LMI_UMC_SOFT_RESET_MASK);
654b843c749SSergey Zigachev 	mdelay(5);
655b843c749SSergey Zigachev 
656b843c749SSergey Zigachev 	/* initialize VCN memory controller */
657b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_CTRL,
658b843c749SSergey Zigachev 		(0x40 << UVD_LMI_CTRL__WRITE_CLEAN_TIMER__SHIFT) |
659b843c749SSergey Zigachev 		UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK |
660b843c749SSergey Zigachev 		UVD_LMI_CTRL__DATA_COHERENCY_EN_MASK |
661b843c749SSergey Zigachev 		UVD_LMI_CTRL__VCPU_DATA_COHERENCY_EN_MASK |
662b843c749SSergey Zigachev 		UVD_LMI_CTRL__REQ_MODE_MASK |
663b843c749SSergey Zigachev 		0x00100000L);
664b843c749SSergey Zigachev 
665b843c749SSergey Zigachev #ifdef __BIG_ENDIAN
666b843c749SSergey Zigachev 	/* swap (8 in 32) RB and IB */
667b843c749SSergey Zigachev 	lmi_swap_cntl = 0xa;
668b843c749SSergey Zigachev #endif
669b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_SWAP_CNTL, lmi_swap_cntl);
670b843c749SSergey Zigachev 
671b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXA0, 0x40c2040);
672b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXA1, 0x0);
673b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXB0, 0x40c2040);
674b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUXB1, 0x0);
675b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_ALU, 0);
676b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_MPC_SET_MUX, 0x88);
677b843c749SSergey Zigachev 
678b843c749SSergey Zigachev 	/* take all subblocks out of reset, except VCPU */
679b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
680b843c749SSergey Zigachev 			UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
681b843c749SSergey Zigachev 	mdelay(5);
682b843c749SSergey Zigachev 
683b843c749SSergey Zigachev 	/* enable VCPU clock */
684b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CNTL,
685b843c749SSergey Zigachev 			UVD_VCPU_CNTL__CLK_EN_MASK);
686b843c749SSergey Zigachev 
687b843c749SSergey Zigachev 	/* enable UMC */
688b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2), 0,
689b843c749SSergey Zigachev 			~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK);
690b843c749SSergey Zigachev 
691b843c749SSergey Zigachev 	/* boot up the VCPU */
692b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET, 0);
693b843c749SSergey Zigachev 	mdelay(10);
694b843c749SSergey Zigachev 
695b843c749SSergey Zigachev 	for (i = 0; i < 10; ++i) {
696b843c749SSergey Zigachev 		uint32_t status;
697b843c749SSergey Zigachev 
698b843c749SSergey Zigachev 		for (j = 0; j < 100; ++j) {
699b843c749SSergey Zigachev 			status = RREG32_SOC15(UVD, 0, mmUVD_STATUS);
700b843c749SSergey Zigachev 			if (status & 2)
701b843c749SSergey Zigachev 				break;
702b843c749SSergey Zigachev 			mdelay(10);
703b843c749SSergey Zigachev 		}
704b843c749SSergey Zigachev 		r = 0;
705b843c749SSergey Zigachev 		if (status & 2)
706b843c749SSergey Zigachev 			break;
707b843c749SSergey Zigachev 
708b843c749SSergey Zigachev 		DRM_ERROR("VCN decode not responding, trying to reset the VCPU!!!\n");
709b843c749SSergey Zigachev 		WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET),
710b843c749SSergey Zigachev 				UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK,
711b843c749SSergey Zigachev 				~UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
712b843c749SSergey Zigachev 		mdelay(10);
713b843c749SSergey Zigachev 		WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_SOFT_RESET), 0,
714b843c749SSergey Zigachev 				~UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
715b843c749SSergey Zigachev 		mdelay(10);
716b843c749SSergey Zigachev 		r = -1;
717b843c749SSergey Zigachev 	}
718b843c749SSergey Zigachev 
719b843c749SSergey Zigachev 	if (r) {
720b843c749SSergey Zigachev 		DRM_ERROR("VCN decode not responding, giving up!!!\n");
721b843c749SSergey Zigachev 		return r;
722b843c749SSergey Zigachev 	}
723b843c749SSergey Zigachev 	/* enable master interrupt */
724b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_MASTINT_EN),
725b843c749SSergey Zigachev 		(UVD_MASTINT_EN__VCPU_EN_MASK|UVD_MASTINT_EN__SYS_EN_MASK),
726b843c749SSergey Zigachev 		~(UVD_MASTINT_EN__VCPU_EN_MASK|UVD_MASTINT_EN__SYS_EN_MASK));
727b843c749SSergey Zigachev 
728b843c749SSergey Zigachev 	/* clear the bit 4 of VCN_STATUS */
729b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_STATUS), 0,
730b843c749SSergey Zigachev 			~(2 << UVD_STATUS__VCPU_REPORT__SHIFT));
731b843c749SSergey Zigachev 
732b843c749SSergey Zigachev 	/* force RBC into idle state */
733b843c749SSergey Zigachev 	rb_bufsz = order_base_2(ring->ring_size);
734b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(0, UVD_RBC_RB_CNTL, RB_BUFSZ, rb_bufsz);
735b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_BLKSZ, 1);
736b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_NO_FETCH, 1);
737b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_WPTR_POLL_EN, 0);
738b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_NO_UPDATE, 1);
739b843c749SSergey Zigachev 	tmp = REG_SET_FIELD(tmp, UVD_RBC_RB_CNTL, RB_RPTR_WR_EN, 1);
740b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_CNTL, tmp);
741b843c749SSergey Zigachev 
742b843c749SSergey Zigachev 	/* set the write pointer delay */
743b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR_CNTL, 0);
744b843c749SSergey Zigachev 
745b843c749SSergey Zigachev 	/* set the wb address */
746b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR_ADDR,
747b843c749SSergey Zigachev 			(upper_32_bits(ring->gpu_addr) >> 2));
748b843c749SSergey Zigachev 
749b843c749SSergey Zigachev 	/* programm the RB_BASE for ring buffer */
750b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_LOW,
751b843c749SSergey Zigachev 			lower_32_bits(ring->gpu_addr));
752b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_RBC_RB_64BIT_BAR_HIGH,
753b843c749SSergey Zigachev 			upper_32_bits(ring->gpu_addr));
754b843c749SSergey Zigachev 
755b843c749SSergey Zigachev 	/* Initialize the ring buffer's read and write pointers */
756b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR, 0);
757b843c749SSergey Zigachev 
758b843c749SSergey Zigachev 	ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
759b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR,
760b843c749SSergey Zigachev 			lower_32_bits(ring->wptr));
761b843c749SSergey Zigachev 
762b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_RB_CNTL), 0,
763b843c749SSergey Zigachev 			~UVD_RBC_RB_CNTL__RB_NO_FETCH_MASK);
764b843c749SSergey Zigachev 
765b843c749SSergey Zigachev 	ring = &adev->vcn.ring_enc[0];
766b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr));
767b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr));
768b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO, ring->gpu_addr);
769b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI, upper_32_bits(ring->gpu_addr));
770b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE, ring->ring_size / 4);
771b843c749SSergey Zigachev 
772b843c749SSergey Zigachev 	ring = &adev->vcn.ring_enc[1];
773b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr));
774b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr));
775b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_LO2, ring->gpu_addr);
776b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_BASE_HI2, upper_32_bits(ring->gpu_addr));
777b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RB_SIZE2, ring->ring_size / 4);
778b843c749SSergey Zigachev 
779b843c749SSergey Zigachev 	ring = &adev->vcn.ring_jpeg;
780b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_JRBC_RB_VMID, 0);
781b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_CNTL, (0x00000001L | 0x00000002L));
782b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_LOW, lower_32_bits(ring->gpu_addr));
783b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_LMI_JRBC_RB_64BIT_BAR_HIGH, upper_32_bits(ring->gpu_addr));
784b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_RPTR, 0);
785b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_WPTR, 0);
786b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_CNTL, 0x00000002L);
787b843c749SSergey Zigachev 
788b843c749SSergey Zigachev 	/* initialize wptr */
789b843c749SSergey Zigachev 	ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_WPTR);
790b843c749SSergey Zigachev 
791b843c749SSergey Zigachev 	/* copy patch commands to the jpeg ring */
792b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_set_patch_ring(ring,
793b843c749SSergey Zigachev 		(ring->wptr + ring->max_dw * amdgpu_sched_hw_submission));
794b843c749SSergey Zigachev 
795b843c749SSergey Zigachev 	return 0;
796b843c749SSergey Zigachev }
797b843c749SSergey Zigachev 
798b843c749SSergey Zigachev /**
799b843c749SSergey Zigachev  * vcn_v1_0_stop - stop VCN block
800b843c749SSergey Zigachev  *
801b843c749SSergey Zigachev  * @adev: amdgpu_device pointer
802b843c749SSergey Zigachev  *
803b843c749SSergey Zigachev  * stop the VCN block
804b843c749SSergey Zigachev  */
vcn_v1_0_stop(struct amdgpu_device * adev)805b843c749SSergey Zigachev static int vcn_v1_0_stop(struct amdgpu_device *adev)
806b843c749SSergey Zigachev {
807b843c749SSergey Zigachev 	/* force RBC into idle state */
808b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_CNTL, 0x11010101);
809b843c749SSergey Zigachev 
810b843c749SSergey Zigachev 	/* Stall UMC and register bus before resetting VCPU */
811b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2),
812b843c749SSergey Zigachev 			UVD_LMI_CTRL2__STALL_ARB_UMC_MASK,
813b843c749SSergey Zigachev 			~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK);
814b843c749SSergey Zigachev 	mdelay(1);
815b843c749SSergey Zigachev 
816b843c749SSergey Zigachev 	/* put VCPU into reset */
817b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_SOFT_RESET,
818b843c749SSergey Zigachev 			UVD_SOFT_RESET__VCPU_SOFT_RESET_MASK);
819b843c749SSergey Zigachev 	mdelay(5);
820b843c749SSergey Zigachev 
821b843c749SSergey Zigachev 	/* disable VCPU clock */
822b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_VCPU_CNTL, 0x0);
823b843c749SSergey Zigachev 
824b843c749SSergey Zigachev 	/* Unstall UMC and register bus */
825b843c749SSergey Zigachev 	WREG32_P(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_CTRL2), 0,
826b843c749SSergey Zigachev 			~UVD_LMI_CTRL2__STALL_ARB_UMC_MASK);
827b843c749SSergey Zigachev 
828b843c749SSergey Zigachev 	WREG32_SOC15(VCN, 0, mmUVD_STATUS, 0);
829b843c749SSergey Zigachev 
830b843c749SSergey Zigachev 	vcn_v1_0_enable_clock_gating(adev);
831b843c749SSergey Zigachev 	vcn_1_0_enable_static_power_gating(adev);
832b843c749SSergey Zigachev 	return 0;
833b843c749SSergey Zigachev }
834b843c749SSergey Zigachev 
vcn_v1_0_is_idle(void * handle)835b843c749SSergey Zigachev static bool vcn_v1_0_is_idle(void *handle)
836b843c749SSergey Zigachev {
837b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
838b843c749SSergey Zigachev 
839b843c749SSergey Zigachev 	return (RREG32_SOC15(VCN, 0, mmUVD_STATUS) == 0x2);
840b843c749SSergey Zigachev }
841b843c749SSergey Zigachev 
vcn_v1_0_wait_for_idle(void * handle)842b843c749SSergey Zigachev static int vcn_v1_0_wait_for_idle(void *handle)
843b843c749SSergey Zigachev {
844b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
845b843c749SSergey Zigachev 	int ret = 0;
846b843c749SSergey Zigachev 
847b843c749SSergey Zigachev 	SOC15_WAIT_ON_RREG(VCN, 0, mmUVD_STATUS, 0x2, 0x2, ret);
848b843c749SSergey Zigachev 
849b843c749SSergey Zigachev 	return ret;
850b843c749SSergey Zigachev }
851b843c749SSergey Zigachev 
vcn_v1_0_set_clockgating_state(void * handle,enum amd_clockgating_state state)852b843c749SSergey Zigachev static int vcn_v1_0_set_clockgating_state(void *handle,
853b843c749SSergey Zigachev 					  enum amd_clockgating_state state)
854b843c749SSergey Zigachev {
855b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
856b843c749SSergey Zigachev 	bool enable = (state == AMD_CG_STATE_GATE) ? true : false;
857b843c749SSergey Zigachev 
858b843c749SSergey Zigachev 	if (enable) {
859b843c749SSergey Zigachev 		/* wait for STATUS to clear */
860b843c749SSergey Zigachev 		if (!vcn_v1_0_is_idle(handle))
861b843c749SSergey Zigachev 			return -EBUSY;
862b843c749SSergey Zigachev 		vcn_v1_0_enable_clock_gating(adev);
863b843c749SSergey Zigachev 	} else {
864b843c749SSergey Zigachev 		/* disable HW gating and enable Sw gating */
865b843c749SSergey Zigachev 		vcn_v1_0_disable_clock_gating(adev);
866b843c749SSergey Zigachev 	}
867b843c749SSergey Zigachev 	return 0;
868b843c749SSergey Zigachev }
869b843c749SSergey Zigachev 
870b843c749SSergey Zigachev /**
871b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_get_rptr - get read pointer
872b843c749SSergey Zigachev  *
873b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
874b843c749SSergey Zigachev  *
875b843c749SSergey Zigachev  * Returns the current hardware read pointer
876b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_get_rptr(struct amdgpu_ring * ring)877b843c749SSergey Zigachev static uint64_t vcn_v1_0_dec_ring_get_rptr(struct amdgpu_ring *ring)
878b843c749SSergey Zigachev {
879b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
880b843c749SSergey Zigachev 
881b843c749SSergey Zigachev 	return RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR);
882b843c749SSergey Zigachev }
883b843c749SSergey Zigachev 
884b843c749SSergey Zigachev /**
885b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_get_wptr - get write pointer
886b843c749SSergey Zigachev  *
887b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
888b843c749SSergey Zigachev  *
889b843c749SSergey Zigachev  * Returns the current hardware write pointer
890b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_get_wptr(struct amdgpu_ring * ring)891b843c749SSergey Zigachev static uint64_t vcn_v1_0_dec_ring_get_wptr(struct amdgpu_ring *ring)
892b843c749SSergey Zigachev {
893b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
894b843c749SSergey Zigachev 
895b843c749SSergey Zigachev 	return RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR);
896b843c749SSergey Zigachev }
897b843c749SSergey Zigachev 
898b843c749SSergey Zigachev /**
899b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_set_wptr - set write pointer
900b843c749SSergey Zigachev  *
901b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
902b843c749SSergey Zigachev  *
903b843c749SSergey Zigachev  * Commits the write pointer to the hardware
904b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_set_wptr(struct amdgpu_ring * ring)905b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_set_wptr(struct amdgpu_ring *ring)
906b843c749SSergey Zigachev {
907b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
908b843c749SSergey Zigachev 
909b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_RBC_RB_WPTR, lower_32_bits(ring->wptr));
910b843c749SSergey Zigachev }
911b843c749SSergey Zigachev 
912b843c749SSergey Zigachev /**
913b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_insert_start - insert a start command
914b843c749SSergey Zigachev  *
915b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
916b843c749SSergey Zigachev  *
917b843c749SSergey Zigachev  * Write a start command to the ring.
918b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_insert_start(struct amdgpu_ring * ring)919b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_insert_start(struct amdgpu_ring *ring)
920b843c749SSergey Zigachev {
921b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
922b843c749SSergey Zigachev 
923b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
924b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0));
925b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0);
926b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
927b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
928b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_PACKET_START << 1);
929b843c749SSergey Zigachev }
930b843c749SSergey Zigachev 
931b843c749SSergey Zigachev /**
932b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_insert_end - insert a end command
933b843c749SSergey Zigachev  *
934b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
935b843c749SSergey Zigachev  *
936b843c749SSergey Zigachev  * Write a end command to the ring.
937b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_insert_end(struct amdgpu_ring * ring)938b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_insert_end(struct amdgpu_ring *ring)
939b843c749SSergey Zigachev {
940b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
941b843c749SSergey Zigachev 
942b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
943b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
944b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_PACKET_END << 1);
945b843c749SSergey Zigachev }
946b843c749SSergey Zigachev 
947b843c749SSergey Zigachev /**
948b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_emit_fence - emit an fence & trap command
949b843c749SSergey Zigachev  *
950b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
951b843c749SSergey Zigachev  * @fence: fence to emit
952b843c749SSergey Zigachev  *
953b843c749SSergey Zigachev  * Write a fence and a trap command to the ring.
954b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_emit_fence(struct amdgpu_ring * ring,uint64_t addr,uint64_t seq,unsigned flags)955*78973132SSergey Zigachev static void vcn_v1_0_dec_ring_emit_fence(struct amdgpu_ring *ring, uint64_t addr, uint64_t seq,
956b843c749SSergey Zigachev 				     unsigned flags)
957b843c749SSergey Zigachev {
958b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
959b843c749SSergey Zigachev 
960b843c749SSergey Zigachev 	WARN_ON(flags & AMDGPU_FENCE_FLAG_64BIT);
961b843c749SSergey Zigachev 
962b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
963b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_CONTEXT_ID), 0));
964b843c749SSergey Zigachev 	amdgpu_ring_write(ring, seq);
965b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
966b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0));
967b843c749SSergey Zigachev 	amdgpu_ring_write(ring, addr & 0xffffffff);
968b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
969b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1), 0));
970b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(addr) & 0xff);
971b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
972b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
973b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_FENCE << 1);
974b843c749SSergey Zigachev 
975b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
976b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0));
977b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0);
978b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
979b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1), 0));
980b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0);
981b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
982b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
983b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_TRAP << 1);
984b843c749SSergey Zigachev }
985b843c749SSergey Zigachev 
986b843c749SSergey Zigachev /**
987b843c749SSergey Zigachev  * vcn_v1_0_dec_ring_emit_ib - execute indirect buffer
988b843c749SSergey Zigachev  *
989b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
990b843c749SSergey Zigachev  * @ib: indirect buffer to execute
991b843c749SSergey Zigachev  *
992b843c749SSergey Zigachev  * Write ring commands to execute the indirect buffer
993b843c749SSergey Zigachev  */
vcn_v1_0_dec_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib,unsigned vmid,bool ctx_switch)994b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_emit_ib(struct amdgpu_ring *ring,
995b843c749SSergey Zigachev 				  struct amdgpu_ib *ib,
996b843c749SSergey Zigachev 				  unsigned vmid, bool ctx_switch)
997b843c749SSergey Zigachev {
998b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
999b843c749SSergey Zigachev 
1000b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1001b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_RBC_IB_VMID), 0));
1002b843c749SSergey Zigachev 	amdgpu_ring_write(ring, vmid);
1003b843c749SSergey Zigachev 
1004b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1005b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_RBC_IB_64BIT_BAR_LOW), 0));
1006b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
1007b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1008b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH), 0));
1009b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
1010b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1011b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_RBC_IB_SIZE), 0));
1012b843c749SSergey Zigachev 	amdgpu_ring_write(ring, ib->length_dw);
1013b843c749SSergey Zigachev }
1014b843c749SSergey Zigachev 
vcn_v1_0_dec_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask)1015b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_emit_reg_wait(struct amdgpu_ring *ring,
1016b843c749SSergey Zigachev 					    uint32_t reg, uint32_t val,
1017b843c749SSergey Zigachev 					    uint32_t mask)
1018b843c749SSergey Zigachev {
1019b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1020b843c749SSergey Zigachev 
1021b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1022b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0));
1023b843c749SSergey Zigachev 	amdgpu_ring_write(ring, reg << 2);
1024b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1025b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1), 0));
1026b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1027b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1028b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GP_SCRATCH8), 0));
1029b843c749SSergey Zigachev 	amdgpu_ring_write(ring, mask);
1030b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1031b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
1032b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_REG_READ_COND_WAIT << 1);
1033b843c749SSergey Zigachev }
1034b843c749SSergey Zigachev 
vcn_v1_0_dec_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr)1035b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_emit_vm_flush(struct amdgpu_ring *ring,
1036b843c749SSergey Zigachev 					    unsigned vmid, uint64_t pd_addr)
1037b843c749SSergey Zigachev {
1038b843c749SSergey Zigachev 	struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->funcs->vmhub];
1039b843c749SSergey Zigachev 	uint32_t data0, data1, mask;
1040b843c749SSergey Zigachev 
1041b843c749SSergey Zigachev 	pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
1042b843c749SSergey Zigachev 
1043b843c749SSergey Zigachev 	/* wait for register write */
1044b843c749SSergey Zigachev 	data0 = hub->ctx0_ptb_addr_lo32 + vmid * 2;
1045b843c749SSergey Zigachev 	data1 = lower_32_bits(pd_addr);
1046b843c749SSergey Zigachev 	mask = 0xffffffff;
1047b843c749SSergey Zigachev 	vcn_v1_0_dec_ring_emit_reg_wait(ring, data0, data1, mask);
1048b843c749SSergey Zigachev }
1049b843c749SSergey Zigachev 
vcn_v1_0_dec_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val)1050b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_emit_wreg(struct amdgpu_ring *ring,
1051b843c749SSergey Zigachev 					uint32_t reg, uint32_t val)
1052b843c749SSergey Zigachev {
1053b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1054b843c749SSergey Zigachev 
1055b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1056b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA0), 0));
1057b843c749SSergey Zigachev 	amdgpu_ring_write(ring, reg << 2);
1058b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1059b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_DATA1), 0));
1060b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1061b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1062b843c749SSergey Zigachev 		PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_GPCOM_VCPU_CMD), 0));
1063b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_DEC_CMD_WRITE_REG << 1);
1064b843c749SSergey Zigachev }
1065b843c749SSergey Zigachev 
1066b843c749SSergey Zigachev /**
1067b843c749SSergey Zigachev  * vcn_v1_0_enc_ring_get_rptr - get enc read pointer
1068b843c749SSergey Zigachev  *
1069b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1070b843c749SSergey Zigachev  *
1071b843c749SSergey Zigachev  * Returns the current hardware enc read pointer
1072b843c749SSergey Zigachev  */
vcn_v1_0_enc_ring_get_rptr(struct amdgpu_ring * ring)1073b843c749SSergey Zigachev static uint64_t vcn_v1_0_enc_ring_get_rptr(struct amdgpu_ring *ring)
1074b843c749SSergey Zigachev {
1075b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1076b843c749SSergey Zigachev 
1077b843c749SSergey Zigachev 	if (ring == &adev->vcn.ring_enc[0])
1078b843c749SSergey Zigachev 		return RREG32_SOC15(UVD, 0, mmUVD_RB_RPTR);
1079b843c749SSergey Zigachev 	else
1080b843c749SSergey Zigachev 		return RREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2);
1081b843c749SSergey Zigachev }
1082b843c749SSergey Zigachev 
1083b843c749SSergey Zigachev  /**
1084b843c749SSergey Zigachev  * vcn_v1_0_enc_ring_get_wptr - get enc write pointer
1085b843c749SSergey Zigachev  *
1086b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1087b843c749SSergey Zigachev  *
1088b843c749SSergey Zigachev  * Returns the current hardware enc write pointer
1089b843c749SSergey Zigachev  */
vcn_v1_0_enc_ring_get_wptr(struct amdgpu_ring * ring)1090b843c749SSergey Zigachev static uint64_t vcn_v1_0_enc_ring_get_wptr(struct amdgpu_ring *ring)
1091b843c749SSergey Zigachev {
1092b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1093b843c749SSergey Zigachev 
1094b843c749SSergey Zigachev 	if (ring == &adev->vcn.ring_enc[0])
1095b843c749SSergey Zigachev 		return RREG32_SOC15(UVD, 0, mmUVD_RB_WPTR);
1096b843c749SSergey Zigachev 	else
1097b843c749SSergey Zigachev 		return RREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2);
1098b843c749SSergey Zigachev }
1099b843c749SSergey Zigachev 
1100b843c749SSergey Zigachev  /**
1101b843c749SSergey Zigachev  * vcn_v1_0_enc_ring_set_wptr - set enc write pointer
1102b843c749SSergey Zigachev  *
1103b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1104b843c749SSergey Zigachev  *
1105b843c749SSergey Zigachev  * Commits the enc write pointer to the hardware
1106b843c749SSergey Zigachev  */
vcn_v1_0_enc_ring_set_wptr(struct amdgpu_ring * ring)1107b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_set_wptr(struct amdgpu_ring *ring)
1108b843c749SSergey Zigachev {
1109b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1110b843c749SSergey Zigachev 
1111b843c749SSergey Zigachev 	if (ring == &adev->vcn.ring_enc[0])
1112b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR,
1113b843c749SSergey Zigachev 			lower_32_bits(ring->wptr));
1114b843c749SSergey Zigachev 	else
1115b843c749SSergey Zigachev 		WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2,
1116b843c749SSergey Zigachev 			lower_32_bits(ring->wptr));
1117b843c749SSergey Zigachev }
1118b843c749SSergey Zigachev 
1119b843c749SSergey Zigachev /**
1120b843c749SSergey Zigachev  * vcn_v1_0_enc_ring_emit_fence - emit an enc fence & trap command
1121b843c749SSergey Zigachev  *
1122b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1123b843c749SSergey Zigachev  * @fence: fence to emit
1124b843c749SSergey Zigachev  *
1125b843c749SSergey Zigachev  * Write enc a fence and a trap command to the ring.
1126b843c749SSergey Zigachev  */
vcn_v1_0_enc_ring_emit_fence(struct amdgpu_ring * ring,uint64_t addr,uint64_t seq,unsigned flags)1127*78973132SSergey Zigachev static void vcn_v1_0_enc_ring_emit_fence(struct amdgpu_ring *ring, uint64_t addr,
1128*78973132SSergey Zigachev 			uint64_t seq, unsigned flags)
1129b843c749SSergey Zigachev {
1130b843c749SSergey Zigachev 	WARN_ON(flags & AMDGPU_FENCE_FLAG_64BIT);
1131b843c749SSergey Zigachev 
1132b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_FENCE);
1133b843c749SSergey Zigachev 	amdgpu_ring_write(ring, addr);
1134b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(addr));
1135b843c749SSergey Zigachev 	amdgpu_ring_write(ring, seq);
1136b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_TRAP);
1137b843c749SSergey Zigachev }
1138b843c749SSergey Zigachev 
vcn_v1_0_enc_ring_insert_end(struct amdgpu_ring * ring)1139b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_insert_end(struct amdgpu_ring *ring)
1140b843c749SSergey Zigachev {
1141b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_END);
1142b843c749SSergey Zigachev }
1143b843c749SSergey Zigachev 
1144b843c749SSergey Zigachev /**
1145b843c749SSergey Zigachev  * vcn_v1_0_enc_ring_emit_ib - enc execute indirect buffer
1146b843c749SSergey Zigachev  *
1147b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1148b843c749SSergey Zigachev  * @ib: indirect buffer to execute
1149b843c749SSergey Zigachev  *
1150b843c749SSergey Zigachev  * Write enc ring commands to execute the indirect buffer
1151b843c749SSergey Zigachev  */
vcn_v1_0_enc_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib,unsigned int vmid,bool ctx_switch)1152b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_emit_ib(struct amdgpu_ring *ring,
1153b843c749SSergey Zigachev 		struct amdgpu_ib *ib, unsigned int vmid, bool ctx_switch)
1154b843c749SSergey Zigachev {
1155b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_IB);
1156b843c749SSergey Zigachev 	amdgpu_ring_write(ring, vmid);
1157b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
1158b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
1159b843c749SSergey Zigachev 	amdgpu_ring_write(ring, ib->length_dw);
1160b843c749SSergey Zigachev }
1161b843c749SSergey Zigachev 
vcn_v1_0_enc_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask)1162b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_emit_reg_wait(struct amdgpu_ring *ring,
1163b843c749SSergey Zigachev 					    uint32_t reg, uint32_t val,
1164b843c749SSergey Zigachev 					    uint32_t mask)
1165b843c749SSergey Zigachev {
1166b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WAIT);
1167b843c749SSergey Zigachev 	amdgpu_ring_write(ring, reg << 2);
1168b843c749SSergey Zigachev 	amdgpu_ring_write(ring, mask);
1169b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1170b843c749SSergey Zigachev }
1171b843c749SSergey Zigachev 
vcn_v1_0_enc_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned int vmid,uint64_t pd_addr)1172b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_emit_vm_flush(struct amdgpu_ring *ring,
1173b843c749SSergey Zigachev 					    unsigned int vmid, uint64_t pd_addr)
1174b843c749SSergey Zigachev {
1175b843c749SSergey Zigachev 	struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->funcs->vmhub];
1176b843c749SSergey Zigachev 
1177b843c749SSergey Zigachev 	pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
1178b843c749SSergey Zigachev 
1179b843c749SSergey Zigachev 	/* wait for reg writes */
1180b843c749SSergey Zigachev 	vcn_v1_0_enc_ring_emit_reg_wait(ring, hub->ctx0_ptb_addr_lo32 + vmid * 2,
1181b843c749SSergey Zigachev 					lower_32_bits(pd_addr), 0xffffffff);
1182b843c749SSergey Zigachev }
1183b843c749SSergey Zigachev 
vcn_v1_0_enc_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val)1184b843c749SSergey Zigachev static void vcn_v1_0_enc_ring_emit_wreg(struct amdgpu_ring *ring,
1185b843c749SSergey Zigachev 					uint32_t reg, uint32_t val)
1186b843c749SSergey Zigachev {
1187b843c749SSergey Zigachev 	amdgpu_ring_write(ring, VCN_ENC_CMD_REG_WRITE);
1188b843c749SSergey Zigachev 	amdgpu_ring_write(ring,	reg << 2);
1189b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1190b843c749SSergey Zigachev }
1191b843c749SSergey Zigachev 
1192b843c749SSergey Zigachev 
1193b843c749SSergey Zigachev /**
1194b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_get_rptr - get read pointer
1195b843c749SSergey Zigachev  *
1196b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1197b843c749SSergey Zigachev  *
1198b843c749SSergey Zigachev  * Returns the current hardware read pointer
1199b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_get_rptr(struct amdgpu_ring * ring)1200b843c749SSergey Zigachev static uint64_t vcn_v1_0_jpeg_ring_get_rptr(struct amdgpu_ring *ring)
1201b843c749SSergey Zigachev {
1202b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1203b843c749SSergey Zigachev 
1204b843c749SSergey Zigachev 	return RREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_RPTR);
1205b843c749SSergey Zigachev }
1206b843c749SSergey Zigachev 
1207b843c749SSergey Zigachev /**
1208b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_get_wptr - get write pointer
1209b843c749SSergey Zigachev  *
1210b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1211b843c749SSergey Zigachev  *
1212b843c749SSergey Zigachev  * Returns the current hardware write pointer
1213b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_get_wptr(struct amdgpu_ring * ring)1214b843c749SSergey Zigachev static uint64_t vcn_v1_0_jpeg_ring_get_wptr(struct amdgpu_ring *ring)
1215b843c749SSergey Zigachev {
1216b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1217b843c749SSergey Zigachev 
1218b843c749SSergey Zigachev 	return RREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_WPTR);
1219b843c749SSergey Zigachev }
1220b843c749SSergey Zigachev 
1221b843c749SSergey Zigachev /**
1222b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_set_wptr - set write pointer
1223b843c749SSergey Zigachev  *
1224b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1225b843c749SSergey Zigachev  *
1226b843c749SSergey Zigachev  * Commits the write pointer to the hardware
1227b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_set_wptr(struct amdgpu_ring * ring)1228b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_set_wptr(struct amdgpu_ring *ring)
1229b843c749SSergey Zigachev {
1230b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1231b843c749SSergey Zigachev 
1232b843c749SSergey Zigachev 	WREG32_SOC15(UVD, 0, mmUVD_JRBC_RB_WPTR, lower_32_bits(ring->wptr));
1233b843c749SSergey Zigachev }
1234b843c749SSergey Zigachev 
1235b843c749SSergey Zigachev /**
1236b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_insert_start - insert a start command
1237b843c749SSergey Zigachev  *
1238b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1239b843c749SSergey Zigachev  *
1240b843c749SSergey Zigachev  * Write a start command to the ring.
1241b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_insert_start(struct amdgpu_ring * ring)1242b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_insert_start(struct amdgpu_ring *ring)
1243b843c749SSergey Zigachev {
1244b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1245b843c749SSergey Zigachev 
1246b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1247b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0));
1248b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x68e04);
1249b843c749SSergey Zigachev 
1250b843c749SSergey Zigachev 	amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE0));
1251b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x80010000);
1252b843c749SSergey Zigachev }
1253b843c749SSergey Zigachev 
1254b843c749SSergey Zigachev /**
1255b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_insert_end - insert a end command
1256b843c749SSergey Zigachev  *
1257b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1258b843c749SSergey Zigachev  *
1259b843c749SSergey Zigachev  * Write a end command to the ring.
1260b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_insert_end(struct amdgpu_ring * ring)1261b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_insert_end(struct amdgpu_ring *ring)
1262b843c749SSergey Zigachev {
1263b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1264b843c749SSergey Zigachev 
1265b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1266b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0));
1267b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x68e04);
1268b843c749SSergey Zigachev 
1269b843c749SSergey Zigachev 	amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE0));
1270b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x00010000);
1271b843c749SSergey Zigachev }
1272b843c749SSergey Zigachev 
1273b843c749SSergey Zigachev /**
1274b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_emit_fence - emit an fence & trap command
1275b843c749SSergey Zigachev  *
1276b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1277b843c749SSergey Zigachev  * @fence: fence to emit
1278b843c749SSergey Zigachev  *
1279b843c749SSergey Zigachev  * Write a fence and a trap command to the ring.
1280b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_emit_fence(struct amdgpu_ring * ring,uint64_t addr,uint64_t seq,unsigned flags)1281*78973132SSergey Zigachev static void vcn_v1_0_jpeg_ring_emit_fence(struct amdgpu_ring *ring, uint64_t addr, uint64_t seq,
1282b843c749SSergey Zigachev 				     unsigned flags)
1283b843c749SSergey Zigachev {
1284b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1285b843c749SSergey Zigachev 
1286b843c749SSergey Zigachev 	WARN_ON(flags & AMDGPU_FENCE_FLAG_64BIT);
1287b843c749SSergey Zigachev 
1288b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1289b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JPEG_GPCOM_DATA0), 0, 0, PACKETJ_TYPE0));
1290b843c749SSergey Zigachev 	amdgpu_ring_write(ring, seq);
1291b843c749SSergey Zigachev 
1292b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1293b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JPEG_GPCOM_DATA1), 0, 0, PACKETJ_TYPE0));
1294b843c749SSergey Zigachev 	amdgpu_ring_write(ring, seq);
1295b843c749SSergey Zigachev 
1296b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1297b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_LOW), 0, 0, PACKETJ_TYPE0));
1298b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(addr));
1299b843c749SSergey Zigachev 
1300b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1301b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_WR_64BIT_BAR_HIGH), 0, 0, PACKETJ_TYPE0));
1302b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(addr));
1303b843c749SSergey Zigachev 
1304b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1305b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JPEG_GPCOM_CMD), 0, 0, PACKETJ_TYPE0));
1306b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x8);
1307b843c749SSergey Zigachev 
1308b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1309b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JPEG_GPCOM_CMD), 0, PACKETJ_CONDITION_CHECK0, PACKETJ_TYPE4));
1310b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0);
1311b843c749SSergey Zigachev 
1312b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1313b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_COND_RD_TIMER), 0, 0, PACKETJ_TYPE0));
1314b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x01400200);
1315b843c749SSergey Zigachev 
1316b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1317b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_REF_DATA), 0, 0, PACKETJ_TYPE0));
1318b843c749SSergey Zigachev 	amdgpu_ring_write(ring, seq);
1319b843c749SSergey Zigachev 
1320b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1321b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW), 0, 0, PACKETJ_TYPE0));
1322b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(addr));
1323b843c749SSergey Zigachev 
1324b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1325b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH), 0, 0, PACKETJ_TYPE0));
1326b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(addr));
1327b843c749SSergey Zigachev 
1328b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1329b843c749SSergey Zigachev 		PACKETJ(0, 0, PACKETJ_CONDITION_CHECK3, PACKETJ_TYPE2));
1330b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0xffffffff);
1331b843c749SSergey Zigachev 
1332b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1333b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0));
1334b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x3fbc);
1335b843c749SSergey Zigachev 
1336b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1337b843c749SSergey Zigachev 		PACKETJ(0, 0, 0, PACKETJ_TYPE0));
1338b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x1);
1339b843c749SSergey Zigachev }
1340b843c749SSergey Zigachev 
1341b843c749SSergey Zigachev /**
1342b843c749SSergey Zigachev  * vcn_v1_0_jpeg_ring_emit_ib - execute indirect buffer
1343b843c749SSergey Zigachev  *
1344b843c749SSergey Zigachev  * @ring: amdgpu_ring pointer
1345b843c749SSergey Zigachev  * @ib: indirect buffer to execute
1346b843c749SSergey Zigachev  *
1347b843c749SSergey Zigachev  * Write ring commands to execute the indirect buffer.
1348b843c749SSergey Zigachev  */
vcn_v1_0_jpeg_ring_emit_ib(struct amdgpu_ring * ring,struct amdgpu_ib * ib,unsigned vmid,bool ctx_switch)1349b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_emit_ib(struct amdgpu_ring *ring,
1350b843c749SSergey Zigachev 				  struct amdgpu_ib *ib,
1351b843c749SSergey Zigachev 				  unsigned vmid, bool ctx_switch)
1352b843c749SSergey Zigachev {
1353b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1354b843c749SSergey Zigachev 
1355b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1356b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_IB_VMID), 0, 0, PACKETJ_TYPE0));
1357b843c749SSergey Zigachev 	amdgpu_ring_write(ring, (vmid | (vmid << 4)));
1358b843c749SSergey Zigachev 
1359b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1360b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JPEG_VMID), 0, 0, PACKETJ_TYPE0));
1361b843c749SSergey Zigachev 	amdgpu_ring_write(ring, (vmid | (vmid << 4)));
1362b843c749SSergey Zigachev 
1363b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1364b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_IB_64BIT_BAR_LOW), 0, 0, PACKETJ_TYPE0));
1365b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr));
1366b843c749SSergey Zigachev 
1367b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1368b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_IB_64BIT_BAR_HIGH), 0, 0, PACKETJ_TYPE0));
1369b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr));
1370b843c749SSergey Zigachev 
1371b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1372b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_IB_SIZE), 0, 0, PACKETJ_TYPE0));
1373b843c749SSergey Zigachev 	amdgpu_ring_write(ring, ib->length_dw);
1374b843c749SSergey Zigachev 
1375b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1376b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW), 0, 0, PACKETJ_TYPE0));
1377b843c749SSergey Zigachev 	amdgpu_ring_write(ring, lower_32_bits(ring->gpu_addr));
1378b843c749SSergey Zigachev 
1379b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1380b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH), 0, 0, PACKETJ_TYPE0));
1381b843c749SSergey Zigachev 	amdgpu_ring_write(ring, upper_32_bits(ring->gpu_addr));
1382b843c749SSergey Zigachev 
1383b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1384b843c749SSergey Zigachev 		PACKETJ(0, 0, PACKETJ_CONDITION_CHECK0, PACKETJ_TYPE2));
1385b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0);
1386b843c749SSergey Zigachev 
1387b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1388b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_COND_RD_TIMER), 0, 0, PACKETJ_TYPE0));
1389b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x01400200);
1390b843c749SSergey Zigachev 
1391b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1392b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_REF_DATA), 0, 0, PACKETJ_TYPE0));
1393b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x2);
1394b843c749SSergey Zigachev 
1395b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1396b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_STATUS), 0, PACKETJ_CONDITION_CHECK3, PACKETJ_TYPE3));
1397b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x2);
1398b843c749SSergey Zigachev }
1399b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_emit_reg_wait(struct amdgpu_ring * ring,uint32_t reg,uint32_t val,uint32_t mask)1400b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_emit_reg_wait(struct amdgpu_ring *ring,
1401b843c749SSergey Zigachev 					    uint32_t reg, uint32_t val,
1402b843c749SSergey Zigachev 					    uint32_t mask)
1403b843c749SSergey Zigachev {
1404b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1405b843c749SSergey Zigachev 	uint32_t reg_offset = (reg << 2);
1406b843c749SSergey Zigachev 
1407b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1408b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_COND_RD_TIMER), 0, 0, PACKETJ_TYPE0));
1409b843c749SSergey Zigachev 	amdgpu_ring_write(ring, 0x01400200);
1410b843c749SSergey Zigachev 
1411b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1412b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_REF_DATA), 0, 0, PACKETJ_TYPE0));
1413b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1414b843c749SSergey Zigachev 
1415b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1416b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0));
1417b843c749SSergey Zigachev 	if (((reg_offset >= 0x1f800) && (reg_offset <= 0x21fff)) ||
1418b843c749SSergey Zigachev 		((reg_offset >= 0x1e000) && (reg_offset <= 0x1e1ff))) {
1419b843c749SSergey Zigachev 		amdgpu_ring_write(ring, 0);
1420b843c749SSergey Zigachev 		amdgpu_ring_write(ring,
1421b843c749SSergey Zigachev 			PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE3));
1422b843c749SSergey Zigachev 	} else {
1423b843c749SSergey Zigachev 		amdgpu_ring_write(ring, reg_offset);
1424b843c749SSergey Zigachev 		amdgpu_ring_write(ring,
1425b843c749SSergey Zigachev 			PACKETJ(0, 0, 0, PACKETJ_TYPE3));
1426b843c749SSergey Zigachev 	}
1427b843c749SSergey Zigachev 	amdgpu_ring_write(ring, mask);
1428b843c749SSergey Zigachev }
1429b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_emit_vm_flush(struct amdgpu_ring * ring,unsigned vmid,uint64_t pd_addr)1430b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_emit_vm_flush(struct amdgpu_ring *ring,
1431b843c749SSergey Zigachev 		unsigned vmid, uint64_t pd_addr)
1432b843c749SSergey Zigachev {
1433b843c749SSergey Zigachev 	struct amdgpu_vmhub *hub = &ring->adev->vmhub[ring->funcs->vmhub];
1434b843c749SSergey Zigachev 	uint32_t data0, data1, mask;
1435b843c749SSergey Zigachev 
1436b843c749SSergey Zigachev 	pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr);
1437b843c749SSergey Zigachev 
1438b843c749SSergey Zigachev 	/* wait for register write */
1439b843c749SSergey Zigachev 	data0 = hub->ctx0_ptb_addr_lo32 + vmid * 2;
1440b843c749SSergey Zigachev 	data1 = lower_32_bits(pd_addr);
1441b843c749SSergey Zigachev 	mask = 0xffffffff;
1442b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_emit_reg_wait(ring, data0, data1, mask);
1443b843c749SSergey Zigachev }
1444b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_emit_wreg(struct amdgpu_ring * ring,uint32_t reg,uint32_t val)1445b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_emit_wreg(struct amdgpu_ring *ring,
1446b843c749SSergey Zigachev 					uint32_t reg, uint32_t val)
1447b843c749SSergey Zigachev {
1448b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1449b843c749SSergey Zigachev 	uint32_t reg_offset = (reg << 2);
1450b843c749SSergey Zigachev 
1451b843c749SSergey Zigachev 	amdgpu_ring_write(ring,
1452b843c749SSergey Zigachev 		PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0));
1453b843c749SSergey Zigachev 	if (((reg_offset >= 0x1f800) && (reg_offset <= 0x21fff)) ||
1454b843c749SSergey Zigachev 			((reg_offset >= 0x1e000) && (reg_offset <= 0x1e1ff))) {
1455b843c749SSergey Zigachev 		amdgpu_ring_write(ring, 0);
1456b843c749SSergey Zigachev 		amdgpu_ring_write(ring,
1457b843c749SSergey Zigachev 			PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE0));
1458b843c749SSergey Zigachev 	} else {
1459b843c749SSergey Zigachev 		amdgpu_ring_write(ring, reg_offset);
1460b843c749SSergey Zigachev 		amdgpu_ring_write(ring,
1461b843c749SSergey Zigachev 			PACKETJ(0, 0, 0, PACKETJ_TYPE0));
1462b843c749SSergey Zigachev 	}
1463b843c749SSergey Zigachev 	amdgpu_ring_write(ring, val);
1464b843c749SSergey Zigachev }
1465b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_nop(struct amdgpu_ring * ring,uint32_t count)1466b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_nop(struct amdgpu_ring *ring, uint32_t count)
1467b843c749SSergey Zigachev {
1468b843c749SSergey Zigachev 	int i;
1469b843c749SSergey Zigachev 
1470b843c749SSergey Zigachev 	WARN_ON(ring->wptr % 2 || count % 2);
1471b843c749SSergey Zigachev 
1472b843c749SSergey Zigachev 	for (i = 0; i < count / 2; i++) {
1473b843c749SSergey Zigachev 		amdgpu_ring_write(ring, PACKETJ(0, 0, 0, PACKETJ_TYPE6));
1474b843c749SSergey Zigachev 		amdgpu_ring_write(ring, 0);
1475b843c749SSergey Zigachev 	}
1476b843c749SSergey Zigachev }
1477b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_patch_wreg(struct amdgpu_ring * ring,uint32_t * ptr,uint32_t reg_offset,uint32_t val)1478b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_patch_wreg(struct amdgpu_ring *ring, uint32_t *ptr, uint32_t reg_offset, uint32_t val)
1479b843c749SSergey Zigachev {
1480b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1481b843c749SSergey Zigachev 	ring->ring[(*ptr)++] = PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0);
1482b843c749SSergey Zigachev 	if (((reg_offset >= 0x1f800) && (reg_offset <= 0x21fff)) ||
1483b843c749SSergey Zigachev 		((reg_offset >= 0x1e000) && (reg_offset <= 0x1e1ff))) {
1484b843c749SSergey Zigachev 		ring->ring[(*ptr)++] = 0;
1485b843c749SSergey Zigachev 		ring->ring[(*ptr)++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE0);
1486b843c749SSergey Zigachev 	} else {
1487b843c749SSergey Zigachev 		ring->ring[(*ptr)++] = reg_offset;
1488b843c749SSergey Zigachev 		ring->ring[(*ptr)++] = PACKETJ(0, 0, 0, PACKETJ_TYPE0);
1489b843c749SSergey Zigachev 	}
1490b843c749SSergey Zigachev 	ring->ring[(*ptr)++] = val;
1491b843c749SSergey Zigachev }
1492b843c749SSergey Zigachev 
vcn_v1_0_jpeg_ring_set_patch_ring(struct amdgpu_ring * ring,uint32_t ptr)1493b843c749SSergey Zigachev static void vcn_v1_0_jpeg_ring_set_patch_ring(struct amdgpu_ring *ring, uint32_t ptr)
1494b843c749SSergey Zigachev {
1495b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1496b843c749SSergey Zigachev 
1497b843c749SSergey Zigachev 	uint32_t reg, reg_offset, val, mask, i;
1498b843c749SSergey Zigachev 
1499b843c749SSergey Zigachev 	// 1st: program mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW
1500b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_LOW);
1501b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1502b843c749SSergey Zigachev 	val = lower_32_bits(ring->gpu_addr);
1503b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1504b843c749SSergey Zigachev 
1505b843c749SSergey Zigachev 	// 2nd: program mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH
1506b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_LMI_JRBC_RB_MEM_RD_64BIT_BAR_HIGH);
1507b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1508b843c749SSergey Zigachev 	val = upper_32_bits(ring->gpu_addr);
1509b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1510b843c749SSergey Zigachev 
1511b843c749SSergey Zigachev 	// 3rd to 5th: issue MEM_READ commands
1512b843c749SSergey Zigachev 	for (i = 0; i <= 2; i++) {
1513b843c749SSergey Zigachev 		ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE2);
1514b843c749SSergey Zigachev 		ring->ring[ptr++] = 0;
1515b843c749SSergey Zigachev 	}
1516b843c749SSergey Zigachev 
1517b843c749SSergey Zigachev 	// 6th: program mmUVD_JRBC_RB_CNTL register to enable NO_FETCH and RPTR write ability
1518b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_CNTL);
1519b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1520b843c749SSergey Zigachev 	val = 0x13;
1521b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1522b843c749SSergey Zigachev 
1523b843c749SSergey Zigachev 	// 7th: program mmUVD_JRBC_RB_REF_DATA
1524b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_REF_DATA);
1525b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1526b843c749SSergey Zigachev 	val = 0x1;
1527b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1528b843c749SSergey Zigachev 
1529b843c749SSergey Zigachev 	// 8th: issue conditional register read mmUVD_JRBC_RB_CNTL
1530b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_CNTL);
1531b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1532b843c749SSergey Zigachev 	val = 0x1;
1533b843c749SSergey Zigachev 	mask = 0x1;
1534b843c749SSergey Zigachev 
1535b843c749SSergey Zigachev 	ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_COND_RD_TIMER), 0, 0, PACKETJ_TYPE0);
1536b843c749SSergey Zigachev 	ring->ring[ptr++] = 0x01400200;
1537b843c749SSergey Zigachev 	ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_REF_DATA), 0, 0, PACKETJ_TYPE0);
1538b843c749SSergey Zigachev 	ring->ring[ptr++] = val;
1539b843c749SSergey Zigachev 	ring->ring[ptr++] = PACKETJ(SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACKETJ_TYPE0);
1540b843c749SSergey Zigachev 	if (((reg_offset >= 0x1f800) && (reg_offset <= 0x21fff)) ||
1541b843c749SSergey Zigachev 		((reg_offset >= 0x1e000) && (reg_offset <= 0x1e1ff))) {
1542b843c749SSergey Zigachev 		ring->ring[ptr++] = 0;
1543b843c749SSergey Zigachev 		ring->ring[ptr++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE3);
1544b843c749SSergey Zigachev 	} else {
1545b843c749SSergey Zigachev 		ring->ring[ptr++] = reg_offset;
1546b843c749SSergey Zigachev 		ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE3);
1547b843c749SSergey Zigachev 	}
1548b843c749SSergey Zigachev 	ring->ring[ptr++] = mask;
1549b843c749SSergey Zigachev 
1550b843c749SSergey Zigachev 	//9th to 21st: insert no-op
1551b843c749SSergey Zigachev 	for (i = 0; i <= 12; i++) {
1552b843c749SSergey Zigachev 		ring->ring[ptr++] = PACKETJ(0, 0, 0, PACKETJ_TYPE6);
1553b843c749SSergey Zigachev 		ring->ring[ptr++] = 0;
1554b843c749SSergey Zigachev 	}
1555b843c749SSergey Zigachev 
1556b843c749SSergey Zigachev 	//22nd: reset mmUVD_JRBC_RB_RPTR
1557b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_RPTR);
1558b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1559b843c749SSergey Zigachev 	val = 0;
1560b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1561b843c749SSergey Zigachev 
1562b843c749SSergey Zigachev 	//23rd: program mmUVD_JRBC_RB_CNTL to disable no_fetch
1563b843c749SSergey Zigachev 	reg = SOC15_REG_OFFSET(UVD, 0, mmUVD_JRBC_RB_CNTL);
1564b843c749SSergey Zigachev 	reg_offset = (reg << 2);
1565b843c749SSergey Zigachev 	val = 0x12;
1566b843c749SSergey Zigachev 	vcn_v1_0_jpeg_ring_patch_wreg(ring, &ptr, reg_offset, val);
1567b843c749SSergey Zigachev }
1568b843c749SSergey Zigachev 
vcn_v1_0_set_interrupt_state(struct amdgpu_device * adev,struct amdgpu_irq_src * source,unsigned type,enum amdgpu_interrupt_state state)1569b843c749SSergey Zigachev static int vcn_v1_0_set_interrupt_state(struct amdgpu_device *adev,
1570b843c749SSergey Zigachev 					struct amdgpu_irq_src *source,
1571b843c749SSergey Zigachev 					unsigned type,
1572b843c749SSergey Zigachev 					enum amdgpu_interrupt_state state)
1573b843c749SSergey Zigachev {
1574b843c749SSergey Zigachev 	return 0;
1575b843c749SSergey Zigachev }
1576b843c749SSergey Zigachev 
vcn_v1_0_process_interrupt(struct amdgpu_device * adev,struct amdgpu_irq_src * source,struct amdgpu_iv_entry * entry)1577b843c749SSergey Zigachev static int vcn_v1_0_process_interrupt(struct amdgpu_device *adev,
1578b843c749SSergey Zigachev 				      struct amdgpu_irq_src *source,
1579b843c749SSergey Zigachev 				      struct amdgpu_iv_entry *entry)
1580b843c749SSergey Zigachev {
1581b843c749SSergey Zigachev 	DRM_DEBUG("IH: VCN TRAP\n");
1582b843c749SSergey Zigachev 
1583b843c749SSergey Zigachev 	switch (entry->src_id) {
1584b843c749SSergey Zigachev 	case 124:
1585b843c749SSergey Zigachev 		amdgpu_fence_process(&adev->vcn.ring_dec);
1586b843c749SSergey Zigachev 		break;
1587b843c749SSergey Zigachev 	case 119:
1588b843c749SSergey Zigachev 		amdgpu_fence_process(&adev->vcn.ring_enc[0]);
1589b843c749SSergey Zigachev 		break;
1590b843c749SSergey Zigachev 	case 120:
1591b843c749SSergey Zigachev 		amdgpu_fence_process(&adev->vcn.ring_enc[1]);
1592b843c749SSergey Zigachev 		break;
1593b843c749SSergey Zigachev 	case 126:
1594b843c749SSergey Zigachev 		amdgpu_fence_process(&adev->vcn.ring_jpeg);
1595b843c749SSergey Zigachev 		break;
1596b843c749SSergey Zigachev 	default:
1597b843c749SSergey Zigachev 		DRM_ERROR("Unhandled interrupt: %d %d\n",
1598b843c749SSergey Zigachev 			  entry->src_id, entry->src_data[0]);
1599b843c749SSergey Zigachev 		break;
1600b843c749SSergey Zigachev 	}
1601b843c749SSergey Zigachev 
1602b843c749SSergey Zigachev 	return 0;
1603b843c749SSergey Zigachev }
1604b843c749SSergey Zigachev 
vcn_v1_0_dec_ring_insert_nop(struct amdgpu_ring * ring,uint32_t count)1605b843c749SSergey Zigachev static void vcn_v1_0_dec_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count)
1606b843c749SSergey Zigachev {
1607b843c749SSergey Zigachev 	struct amdgpu_device *adev = ring->adev;
1608b843c749SSergey Zigachev 	int i;
1609b843c749SSergey Zigachev 
1610b843c749SSergey Zigachev 	WARN_ON(ring->wptr % 2 || count % 2);
1611b843c749SSergey Zigachev 
1612b843c749SSergey Zigachev 	for (i = 0; i < count / 2; i++) {
1613b843c749SSergey Zigachev 		amdgpu_ring_write(ring, PACKET0(SOC15_REG_OFFSET(UVD, 0, mmUVD_NO_OP), 0));
1614b843c749SSergey Zigachev 		amdgpu_ring_write(ring, 0);
1615b843c749SSergey Zigachev 	}
1616b843c749SSergey Zigachev }
1617b843c749SSergey Zigachev 
vcn_v1_0_set_powergating_state(void * handle,enum amd_powergating_state state)1618b843c749SSergey Zigachev static int vcn_v1_0_set_powergating_state(void *handle,
1619b843c749SSergey Zigachev 					  enum amd_powergating_state state)
1620b843c749SSergey Zigachev {
1621b843c749SSergey Zigachev 	/* This doesn't actually powergate the VCN block.
1622b843c749SSergey Zigachev 	 * That's done in the dpm code via the SMC.  This
1623b843c749SSergey Zigachev 	 * just re-inits the block as necessary.  The actual
1624b843c749SSergey Zigachev 	 * gating still happens in the dpm code.  We should
1625b843c749SSergey Zigachev 	 * revisit this when there is a cleaner line between
1626b843c749SSergey Zigachev 	 * the smc and the hw blocks
1627b843c749SSergey Zigachev 	 */
1628b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1629b843c749SSergey Zigachev 
1630b843c749SSergey Zigachev 	if (state == AMD_PG_STATE_GATE)
1631b843c749SSergey Zigachev 		return vcn_v1_0_stop(adev);
1632b843c749SSergey Zigachev 	else
1633b843c749SSergey Zigachev 		return vcn_v1_0_start(adev);
1634b843c749SSergey Zigachev }
1635b843c749SSergey Zigachev 
1636b843c749SSergey Zigachev static const struct amd_ip_funcs vcn_v1_0_ip_funcs = {
1637b843c749SSergey Zigachev 	.name = "vcn_v1_0",
1638b843c749SSergey Zigachev 	.early_init = vcn_v1_0_early_init,
1639b843c749SSergey Zigachev 	.late_init = NULL,
1640b843c749SSergey Zigachev 	.sw_init = vcn_v1_0_sw_init,
1641b843c749SSergey Zigachev 	.sw_fini = vcn_v1_0_sw_fini,
1642b843c749SSergey Zigachev 	.hw_init = vcn_v1_0_hw_init,
1643b843c749SSergey Zigachev 	.hw_fini = vcn_v1_0_hw_fini,
1644b843c749SSergey Zigachev 	.suspend = vcn_v1_0_suspend,
1645b843c749SSergey Zigachev 	.resume = vcn_v1_0_resume,
1646b843c749SSergey Zigachev 	.is_idle = vcn_v1_0_is_idle,
1647b843c749SSergey Zigachev 	.wait_for_idle = vcn_v1_0_wait_for_idle,
1648b843c749SSergey Zigachev 	.check_soft_reset = NULL /* vcn_v1_0_check_soft_reset */,
1649b843c749SSergey Zigachev 	.pre_soft_reset = NULL /* vcn_v1_0_pre_soft_reset */,
1650b843c749SSergey Zigachev 	.soft_reset = NULL /* vcn_v1_0_soft_reset */,
1651b843c749SSergey Zigachev 	.post_soft_reset = NULL /* vcn_v1_0_post_soft_reset */,
1652b843c749SSergey Zigachev 	.set_clockgating_state = vcn_v1_0_set_clockgating_state,
1653b843c749SSergey Zigachev 	.set_powergating_state = vcn_v1_0_set_powergating_state,
1654b843c749SSergey Zigachev };
1655b843c749SSergey Zigachev 
1656b843c749SSergey Zigachev static const struct amdgpu_ring_funcs vcn_v1_0_dec_ring_vm_funcs = {
1657b843c749SSergey Zigachev 	.type = AMDGPU_RING_TYPE_VCN_DEC,
1658b843c749SSergey Zigachev 	.align_mask = 0xf,
1659b843c749SSergey Zigachev 	.support_64bit_ptrs = false,
1660b843c749SSergey Zigachev 	.vmhub = AMDGPU_MMHUB,
1661b843c749SSergey Zigachev 	.get_rptr = vcn_v1_0_dec_ring_get_rptr,
1662b843c749SSergey Zigachev 	.get_wptr = vcn_v1_0_dec_ring_get_wptr,
1663b843c749SSergey Zigachev 	.set_wptr = vcn_v1_0_dec_ring_set_wptr,
1664b843c749SSergey Zigachev 	.emit_frame_size =
1665b843c749SSergey Zigachev 		6 + 6 + /* hdp invalidate / flush */
1666b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_WREG * 6 +
1667b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_REG_WAIT * 8 +
1668b843c749SSergey Zigachev 		8 + /* vcn_v1_0_dec_ring_emit_vm_flush */
1669b843c749SSergey Zigachev 		14 + 14 + /* vcn_v1_0_dec_ring_emit_fence x2 vm fence */
1670b843c749SSergey Zigachev 		6,
1671b843c749SSergey Zigachev 	.emit_ib_size = 8, /* vcn_v1_0_dec_ring_emit_ib */
1672b843c749SSergey Zigachev 	.emit_ib = vcn_v1_0_dec_ring_emit_ib,
1673b843c749SSergey Zigachev 	.emit_fence = vcn_v1_0_dec_ring_emit_fence,
1674b843c749SSergey Zigachev 	.emit_vm_flush = vcn_v1_0_dec_ring_emit_vm_flush,
1675b843c749SSergey Zigachev 	.test_ring = amdgpu_vcn_dec_ring_test_ring,
1676b843c749SSergey Zigachev 	.test_ib = amdgpu_vcn_dec_ring_test_ib,
1677b843c749SSergey Zigachev 	.insert_nop = vcn_v1_0_dec_ring_insert_nop,
1678b843c749SSergey Zigachev 	.insert_start = vcn_v1_0_dec_ring_insert_start,
1679b843c749SSergey Zigachev 	.insert_end = vcn_v1_0_dec_ring_insert_end,
1680b843c749SSergey Zigachev 	.pad_ib = amdgpu_ring_generic_pad_ib,
1681b843c749SSergey Zigachev 	.begin_use = amdgpu_vcn_ring_begin_use,
1682b843c749SSergey Zigachev 	.end_use = amdgpu_vcn_ring_end_use,
1683b843c749SSergey Zigachev 	.emit_wreg = vcn_v1_0_dec_ring_emit_wreg,
1684b843c749SSergey Zigachev 	.emit_reg_wait = vcn_v1_0_dec_ring_emit_reg_wait,
1685b843c749SSergey Zigachev 	.emit_reg_write_reg_wait = amdgpu_ring_emit_reg_write_reg_wait_helper,
1686b843c749SSergey Zigachev };
1687b843c749SSergey Zigachev 
1688b843c749SSergey Zigachev static const struct amdgpu_ring_funcs vcn_v1_0_enc_ring_vm_funcs = {
1689b843c749SSergey Zigachev 	.type = AMDGPU_RING_TYPE_VCN_ENC,
1690b843c749SSergey Zigachev 	.align_mask = 0x3f,
1691b843c749SSergey Zigachev 	.nop = VCN_ENC_CMD_NO_OP,
1692b843c749SSergey Zigachev 	.support_64bit_ptrs = false,
1693b843c749SSergey Zigachev 	.vmhub = AMDGPU_MMHUB,
1694b843c749SSergey Zigachev 	.get_rptr = vcn_v1_0_enc_ring_get_rptr,
1695b843c749SSergey Zigachev 	.get_wptr = vcn_v1_0_enc_ring_get_wptr,
1696b843c749SSergey Zigachev 	.set_wptr = vcn_v1_0_enc_ring_set_wptr,
1697b843c749SSergey Zigachev 	.emit_frame_size =
1698b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_WREG * 3 +
1699b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_REG_WAIT * 4 +
1700b843c749SSergey Zigachev 		4 + /* vcn_v1_0_enc_ring_emit_vm_flush */
1701b843c749SSergey Zigachev 		5 + 5 + /* vcn_v1_0_enc_ring_emit_fence x2 vm fence */
1702b843c749SSergey Zigachev 		1, /* vcn_v1_0_enc_ring_insert_end */
1703b843c749SSergey Zigachev 	.emit_ib_size = 5, /* vcn_v1_0_enc_ring_emit_ib */
1704b843c749SSergey Zigachev 	.emit_ib = vcn_v1_0_enc_ring_emit_ib,
1705b843c749SSergey Zigachev 	.emit_fence = vcn_v1_0_enc_ring_emit_fence,
1706b843c749SSergey Zigachev 	.emit_vm_flush = vcn_v1_0_enc_ring_emit_vm_flush,
1707b843c749SSergey Zigachev 	.test_ring = amdgpu_vcn_enc_ring_test_ring,
1708b843c749SSergey Zigachev 	.test_ib = amdgpu_vcn_enc_ring_test_ib,
1709b843c749SSergey Zigachev 	.insert_nop = amdgpu_ring_insert_nop,
1710b843c749SSergey Zigachev 	.insert_end = vcn_v1_0_enc_ring_insert_end,
1711b843c749SSergey Zigachev 	.pad_ib = amdgpu_ring_generic_pad_ib,
1712b843c749SSergey Zigachev 	.begin_use = amdgpu_vcn_ring_begin_use,
1713b843c749SSergey Zigachev 	.end_use = amdgpu_vcn_ring_end_use,
1714b843c749SSergey Zigachev 	.emit_wreg = vcn_v1_0_enc_ring_emit_wreg,
1715b843c749SSergey Zigachev 	.emit_reg_wait = vcn_v1_0_enc_ring_emit_reg_wait,
1716b843c749SSergey Zigachev 	.emit_reg_write_reg_wait = amdgpu_ring_emit_reg_write_reg_wait_helper,
1717b843c749SSergey Zigachev };
1718b843c749SSergey Zigachev 
1719b843c749SSergey Zigachev static const struct amdgpu_ring_funcs vcn_v1_0_jpeg_ring_vm_funcs = {
1720b843c749SSergey Zigachev 	.type = AMDGPU_RING_TYPE_VCN_JPEG,
1721b843c749SSergey Zigachev 	.align_mask = 0xf,
1722b843c749SSergey Zigachev 	.nop = PACKET0(0x81ff, 0),
1723b843c749SSergey Zigachev 	.support_64bit_ptrs = false,
1724b843c749SSergey Zigachev 	.vmhub = AMDGPU_MMHUB,
1725b843c749SSergey Zigachev 	.extra_dw = 64,
1726b843c749SSergey Zigachev 	.get_rptr = vcn_v1_0_jpeg_ring_get_rptr,
1727b843c749SSergey Zigachev 	.get_wptr = vcn_v1_0_jpeg_ring_get_wptr,
1728b843c749SSergey Zigachev 	.set_wptr = vcn_v1_0_jpeg_ring_set_wptr,
1729b843c749SSergey Zigachev 	.emit_frame_size =
1730b843c749SSergey Zigachev 		6 + 6 + /* hdp invalidate / flush */
1731b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_WREG * 6 +
1732b843c749SSergey Zigachev 		SOC15_FLUSH_GPU_TLB_NUM_REG_WAIT * 8 +
1733b843c749SSergey Zigachev 		8 + /* vcn_v1_0_dec_ring_emit_vm_flush */
1734b843c749SSergey Zigachev 		14 + 14 + /* vcn_v1_0_dec_ring_emit_fence x2 vm fence */
1735b843c749SSergey Zigachev 		6,
1736b843c749SSergey Zigachev 	.emit_ib_size = 22, /* vcn_v1_0_dec_ring_emit_ib */
1737b843c749SSergey Zigachev 	.emit_ib = vcn_v1_0_jpeg_ring_emit_ib,
1738b843c749SSergey Zigachev 	.emit_fence = vcn_v1_0_jpeg_ring_emit_fence,
1739b843c749SSergey Zigachev 	.emit_vm_flush = vcn_v1_0_jpeg_ring_emit_vm_flush,
1740b843c749SSergey Zigachev 	.test_ring = amdgpu_vcn_jpeg_ring_test_ring,
1741b843c749SSergey Zigachev 	.test_ib = amdgpu_vcn_jpeg_ring_test_ib,
1742b843c749SSergey Zigachev 	.insert_nop = vcn_v1_0_jpeg_ring_nop,
1743b843c749SSergey Zigachev 	.insert_start = vcn_v1_0_jpeg_ring_insert_start,
1744b843c749SSergey Zigachev 	.insert_end = vcn_v1_0_jpeg_ring_insert_end,
1745b843c749SSergey Zigachev 	.pad_ib = amdgpu_ring_generic_pad_ib,
1746b843c749SSergey Zigachev 	.begin_use = amdgpu_vcn_ring_begin_use,
1747b843c749SSergey Zigachev 	.end_use = amdgpu_vcn_ring_end_use,
1748b843c749SSergey Zigachev 	.emit_wreg = vcn_v1_0_jpeg_ring_emit_wreg,
1749b843c749SSergey Zigachev 	.emit_reg_wait = vcn_v1_0_jpeg_ring_emit_reg_wait,
1750b843c749SSergey Zigachev };
1751b843c749SSergey Zigachev 
vcn_v1_0_set_dec_ring_funcs(struct amdgpu_device * adev)1752b843c749SSergey Zigachev static void vcn_v1_0_set_dec_ring_funcs(struct amdgpu_device *adev)
1753b843c749SSergey Zigachev {
1754b843c749SSergey Zigachev 	adev->vcn.ring_dec.funcs = &vcn_v1_0_dec_ring_vm_funcs;
1755b843c749SSergey Zigachev 	DRM_INFO("VCN decode is enabled in VM mode\n");
1756b843c749SSergey Zigachev }
1757b843c749SSergey Zigachev 
vcn_v1_0_set_enc_ring_funcs(struct amdgpu_device * adev)1758b843c749SSergey Zigachev static void vcn_v1_0_set_enc_ring_funcs(struct amdgpu_device *adev)
1759b843c749SSergey Zigachev {
1760b843c749SSergey Zigachev 	int i;
1761b843c749SSergey Zigachev 
1762b843c749SSergey Zigachev 	for (i = 0; i < adev->vcn.num_enc_rings; ++i)
1763b843c749SSergey Zigachev 		adev->vcn.ring_enc[i].funcs = &vcn_v1_0_enc_ring_vm_funcs;
1764b843c749SSergey Zigachev 
1765b843c749SSergey Zigachev 	DRM_INFO("VCN encode is enabled in VM mode\n");
1766b843c749SSergey Zigachev }
1767b843c749SSergey Zigachev 
vcn_v1_0_set_jpeg_ring_funcs(struct amdgpu_device * adev)1768b843c749SSergey Zigachev static void vcn_v1_0_set_jpeg_ring_funcs(struct amdgpu_device *adev)
1769b843c749SSergey Zigachev {
1770b843c749SSergey Zigachev 	adev->vcn.ring_jpeg.funcs = &vcn_v1_0_jpeg_ring_vm_funcs;
1771b843c749SSergey Zigachev 	DRM_INFO("VCN jpeg decode is enabled in VM mode\n");
1772b843c749SSergey Zigachev }
1773b843c749SSergey Zigachev 
1774b843c749SSergey Zigachev static const struct amdgpu_irq_src_funcs vcn_v1_0_irq_funcs = {
1775b843c749SSergey Zigachev 	.set = vcn_v1_0_set_interrupt_state,
1776b843c749SSergey Zigachev 	.process = vcn_v1_0_process_interrupt,
1777b843c749SSergey Zigachev };
1778b843c749SSergey Zigachev 
vcn_v1_0_set_irq_funcs(struct amdgpu_device * adev)1779b843c749SSergey Zigachev static void vcn_v1_0_set_irq_funcs(struct amdgpu_device *adev)
1780b843c749SSergey Zigachev {
1781b843c749SSergey Zigachev 	adev->vcn.irq.num_types = adev->vcn.num_enc_rings + 1;
1782b843c749SSergey Zigachev 	adev->vcn.irq.funcs = &vcn_v1_0_irq_funcs;
1783b843c749SSergey Zigachev }
1784b843c749SSergey Zigachev 
1785b843c749SSergey Zigachev const struct amdgpu_ip_block_version vcn_v1_0_ip_block =
1786b843c749SSergey Zigachev {
1787b843c749SSergey Zigachev 		.type = AMD_IP_BLOCK_TYPE_VCN,
1788b843c749SSergey Zigachev 		.major = 1,
1789b843c749SSergey Zigachev 		.minor = 0,
1790b843c749SSergey Zigachev 		.rev = 0,
1791b843c749SSergey Zigachev 		.funcs = &vcn_v1_0_ip_funcs,
1792b843c749SSergey Zigachev };
1793