xref: /dflybsd-src/sys/dev/drm/amd/amdgpu/amdgpu_dpm.c (revision b843c749addef9340ee7d4e250b09fdd492602a1)
1*b843c749SSergey Zigachev /*
2*b843c749SSergey Zigachev  * Copyright 2011 Advanced Micro Devices, Inc.
3*b843c749SSergey Zigachev  *
4*b843c749SSergey Zigachev  * Permission is hereby granted, free of charge, to any person obtaining a
5*b843c749SSergey Zigachev  * copy of this software and associated documentation files (the "Software"),
6*b843c749SSergey Zigachev  * to deal in the Software without restriction, including without limitation
7*b843c749SSergey Zigachev  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*b843c749SSergey Zigachev  * and/or sell copies of the Software, and to permit persons to whom the
9*b843c749SSergey Zigachev  * Software is furnished to do so, subject to the following conditions:
10*b843c749SSergey Zigachev  *
11*b843c749SSergey Zigachev  * The above copyright notice and this permission notice shall be included in
12*b843c749SSergey Zigachev  * all copies or substantial portions of the Software.
13*b843c749SSergey Zigachev  *
14*b843c749SSergey Zigachev  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*b843c749SSergey Zigachev  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*b843c749SSergey Zigachev  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17*b843c749SSergey Zigachev  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*b843c749SSergey Zigachev  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*b843c749SSergey Zigachev  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*b843c749SSergey Zigachev  * OTHER DEALINGS IN THE SOFTWARE.
21*b843c749SSergey Zigachev  *
22*b843c749SSergey Zigachev  * Authors: Alex Deucher
23*b843c749SSergey Zigachev  */
24*b843c749SSergey Zigachev 
25*b843c749SSergey Zigachev #include <drm/drmP.h>
26*b843c749SSergey Zigachev #include "amdgpu.h"
27*b843c749SSergey Zigachev #include "amdgpu_atombios.h"
28*b843c749SSergey Zigachev #include "amdgpu_i2c.h"
29*b843c749SSergey Zigachev #include "amdgpu_dpm.h"
30*b843c749SSergey Zigachev #include "atom.h"
31*b843c749SSergey Zigachev #include "amd_pcie.h"
32*b843c749SSergey Zigachev 
amdgpu_dpm_print_class_info(u32 class,u32 class2)33*b843c749SSergey Zigachev void amdgpu_dpm_print_class_info(u32 class, u32 class2)
34*b843c749SSergey Zigachev {
35*b843c749SSergey Zigachev 	const char *s;
36*b843c749SSergey Zigachev 
37*b843c749SSergey Zigachev 	switch (class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) {
38*b843c749SSergey Zigachev 	case ATOM_PPLIB_CLASSIFICATION_UI_NONE:
39*b843c749SSergey Zigachev 	default:
40*b843c749SSergey Zigachev 		s = "none";
41*b843c749SSergey Zigachev 		break;
42*b843c749SSergey Zigachev 	case ATOM_PPLIB_CLASSIFICATION_UI_BATTERY:
43*b843c749SSergey Zigachev 		s = "battery";
44*b843c749SSergey Zigachev 		break;
45*b843c749SSergey Zigachev 	case ATOM_PPLIB_CLASSIFICATION_UI_BALANCED:
46*b843c749SSergey Zigachev 		s = "balanced";
47*b843c749SSergey Zigachev 		break;
48*b843c749SSergey Zigachev 	case ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE:
49*b843c749SSergey Zigachev 		s = "performance";
50*b843c749SSergey Zigachev 		break;
51*b843c749SSergey Zigachev 	}
52*b843c749SSergey Zigachev 	printk("\tui class: %s\n", s);
53*b843c749SSergey Zigachev 	printk("\tinternal class:");
54*b843c749SSergey Zigachev 	if (((class & ~ATOM_PPLIB_CLASSIFICATION_UI_MASK) == 0) &&
55*b843c749SSergey Zigachev 	    (class2 == 0))
56*b843c749SSergey Zigachev 		pr_cont(" none");
57*b843c749SSergey Zigachev 	else {
58*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_BOOT)
59*b843c749SSergey Zigachev 			pr_cont(" boot");
60*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_THERMAL)
61*b843c749SSergey Zigachev 			pr_cont(" thermal");
62*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_LIMITEDPOWERSOURCE)
63*b843c749SSergey Zigachev 			pr_cont(" limited_pwr");
64*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_REST)
65*b843c749SSergey Zigachev 			pr_cont(" rest");
66*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_FORCED)
67*b843c749SSergey Zigachev 			pr_cont(" forced");
68*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_3DPERFORMANCE)
69*b843c749SSergey Zigachev 			pr_cont(" 3d_perf");
70*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_OVERDRIVETEMPLATE)
71*b843c749SSergey Zigachev 			pr_cont(" ovrdrv");
72*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
73*b843c749SSergey Zigachev 			pr_cont(" uvd");
74*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_3DLOW)
75*b843c749SSergey Zigachev 			pr_cont(" 3d_low");
76*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_ACPI)
77*b843c749SSergey Zigachev 			pr_cont(" acpi");
78*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_HD2STATE)
79*b843c749SSergey Zigachev 			pr_cont(" uvd_hd2");
80*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_HDSTATE)
81*b843c749SSergey Zigachev 			pr_cont(" uvd_hd");
82*b843c749SSergey Zigachev 		if (class & ATOM_PPLIB_CLASSIFICATION_SDSTATE)
83*b843c749SSergey Zigachev 			pr_cont(" uvd_sd");
84*b843c749SSergey Zigachev 		if (class2 & ATOM_PPLIB_CLASSIFICATION2_LIMITEDPOWERSOURCE_2)
85*b843c749SSergey Zigachev 			pr_cont(" limited_pwr2");
86*b843c749SSergey Zigachev 		if (class2 & ATOM_PPLIB_CLASSIFICATION2_ULV)
87*b843c749SSergey Zigachev 			pr_cont(" ulv");
88*b843c749SSergey Zigachev 		if (class2 & ATOM_PPLIB_CLASSIFICATION2_MVC)
89*b843c749SSergey Zigachev 			pr_cont(" uvd_mvc");
90*b843c749SSergey Zigachev 	}
91*b843c749SSergey Zigachev 	pr_cont("\n");
92*b843c749SSergey Zigachev }
93*b843c749SSergey Zigachev 
amdgpu_dpm_print_cap_info(u32 caps)94*b843c749SSergey Zigachev void amdgpu_dpm_print_cap_info(u32 caps)
95*b843c749SSergey Zigachev {
96*b843c749SSergey Zigachev 	printk("\tcaps:");
97*b843c749SSergey Zigachev 	if (caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY)
98*b843c749SSergey Zigachev 		pr_cont(" single_disp");
99*b843c749SSergey Zigachev 	if (caps & ATOM_PPLIB_SUPPORTS_VIDEO_PLAYBACK)
100*b843c749SSergey Zigachev 		pr_cont(" video");
101*b843c749SSergey Zigachev 	if (caps & ATOM_PPLIB_DISALLOW_ON_DC)
102*b843c749SSergey Zigachev 		pr_cont(" no_dc");
103*b843c749SSergey Zigachev 	pr_cont("\n");
104*b843c749SSergey Zigachev }
105*b843c749SSergey Zigachev 
amdgpu_dpm_print_ps_status(struct amdgpu_device * adev,struct amdgpu_ps * rps)106*b843c749SSergey Zigachev void amdgpu_dpm_print_ps_status(struct amdgpu_device *adev,
107*b843c749SSergey Zigachev 				struct amdgpu_ps *rps)
108*b843c749SSergey Zigachev {
109*b843c749SSergey Zigachev 	printk("\tstatus:");
110*b843c749SSergey Zigachev 	if (rps == adev->pm.dpm.current_ps)
111*b843c749SSergey Zigachev 		pr_cont(" c");
112*b843c749SSergey Zigachev 	if (rps == adev->pm.dpm.requested_ps)
113*b843c749SSergey Zigachev 		pr_cont(" r");
114*b843c749SSergey Zigachev 	if (rps == adev->pm.dpm.boot_ps)
115*b843c749SSergey Zigachev 		pr_cont(" b");
116*b843c749SSergey Zigachev 	pr_cont("\n");
117*b843c749SSergey Zigachev }
118*b843c749SSergey Zigachev 
amdgpu_dpm_get_active_displays(struct amdgpu_device * adev)119*b843c749SSergey Zigachev void amdgpu_dpm_get_active_displays(struct amdgpu_device *adev)
120*b843c749SSergey Zigachev {
121*b843c749SSergey Zigachev 	struct drm_device *ddev = adev->ddev;
122*b843c749SSergey Zigachev 	struct drm_crtc *crtc;
123*b843c749SSergey Zigachev 	struct amdgpu_crtc *amdgpu_crtc;
124*b843c749SSergey Zigachev 
125*b843c749SSergey Zigachev 	adev->pm.dpm.new_active_crtcs = 0;
126*b843c749SSergey Zigachev 	adev->pm.dpm.new_active_crtc_count = 0;
127*b843c749SSergey Zigachev 	if (adev->mode_info.num_crtc && adev->mode_info.mode_config_initialized) {
128*b843c749SSergey Zigachev 		list_for_each_entry(crtc,
129*b843c749SSergey Zigachev 				    &ddev->mode_config.crtc_list, head) {
130*b843c749SSergey Zigachev 			amdgpu_crtc = to_amdgpu_crtc(crtc);
131*b843c749SSergey Zigachev 			if (amdgpu_crtc->enabled) {
132*b843c749SSergey Zigachev 				adev->pm.dpm.new_active_crtcs |= (1 << amdgpu_crtc->crtc_id);
133*b843c749SSergey Zigachev 				adev->pm.dpm.new_active_crtc_count++;
134*b843c749SSergey Zigachev 			}
135*b843c749SSergey Zigachev 		}
136*b843c749SSergey Zigachev 	}
137*b843c749SSergey Zigachev }
138*b843c749SSergey Zigachev 
139*b843c749SSergey Zigachev 
amdgpu_dpm_get_vblank_time(struct amdgpu_device * adev)140*b843c749SSergey Zigachev u32 amdgpu_dpm_get_vblank_time(struct amdgpu_device *adev)
141*b843c749SSergey Zigachev {
142*b843c749SSergey Zigachev 	struct drm_device *dev = adev->ddev;
143*b843c749SSergey Zigachev 	struct drm_crtc *crtc;
144*b843c749SSergey Zigachev 	struct amdgpu_crtc *amdgpu_crtc;
145*b843c749SSergey Zigachev 	u32 vblank_in_pixels;
146*b843c749SSergey Zigachev 	u32 vblank_time_us = 0xffffffff; /* if the displays are off, vblank time is max */
147*b843c749SSergey Zigachev 
148*b843c749SSergey Zigachev 	if (adev->mode_info.num_crtc && adev->mode_info.mode_config_initialized) {
149*b843c749SSergey Zigachev 		list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
150*b843c749SSergey Zigachev 			amdgpu_crtc = to_amdgpu_crtc(crtc);
151*b843c749SSergey Zigachev 			if (crtc->enabled && amdgpu_crtc->enabled && amdgpu_crtc->hw_mode.clock) {
152*b843c749SSergey Zigachev 				vblank_in_pixels =
153*b843c749SSergey Zigachev 					amdgpu_crtc->hw_mode.crtc_htotal *
154*b843c749SSergey Zigachev 					(amdgpu_crtc->hw_mode.crtc_vblank_end -
155*b843c749SSergey Zigachev 					amdgpu_crtc->hw_mode.crtc_vdisplay +
156*b843c749SSergey Zigachev 					(amdgpu_crtc->v_border * 2));
157*b843c749SSergey Zigachev 
158*b843c749SSergey Zigachev 				vblank_time_us = vblank_in_pixels * 1000 / amdgpu_crtc->hw_mode.clock;
159*b843c749SSergey Zigachev 				break;
160*b843c749SSergey Zigachev 			}
161*b843c749SSergey Zigachev 		}
162*b843c749SSergey Zigachev 	}
163*b843c749SSergey Zigachev 
164*b843c749SSergey Zigachev 	return vblank_time_us;
165*b843c749SSergey Zigachev }
166*b843c749SSergey Zigachev 
amdgpu_dpm_get_vrefresh(struct amdgpu_device * adev)167*b843c749SSergey Zigachev u32 amdgpu_dpm_get_vrefresh(struct amdgpu_device *adev)
168*b843c749SSergey Zigachev {
169*b843c749SSergey Zigachev 	struct drm_device *dev = adev->ddev;
170*b843c749SSergey Zigachev 	struct drm_crtc *crtc;
171*b843c749SSergey Zigachev 	struct amdgpu_crtc *amdgpu_crtc;
172*b843c749SSergey Zigachev 	u32 vrefresh = 0;
173*b843c749SSergey Zigachev 
174*b843c749SSergey Zigachev 	if (adev->mode_info.num_crtc && adev->mode_info.mode_config_initialized) {
175*b843c749SSergey Zigachev 		list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
176*b843c749SSergey Zigachev 			amdgpu_crtc = to_amdgpu_crtc(crtc);
177*b843c749SSergey Zigachev 			if (crtc->enabled && amdgpu_crtc->enabled && amdgpu_crtc->hw_mode.clock) {
178*b843c749SSergey Zigachev 				vrefresh = drm_mode_vrefresh(&amdgpu_crtc->hw_mode);
179*b843c749SSergey Zigachev 				break;
180*b843c749SSergey Zigachev 			}
181*b843c749SSergey Zigachev 		}
182*b843c749SSergey Zigachev 	}
183*b843c749SSergey Zigachev 
184*b843c749SSergey Zigachev 	return vrefresh;
185*b843c749SSergey Zigachev }
186*b843c749SSergey Zigachev 
amdgpu_calculate_u_and_p(u32 i,u32 r_c,u32 p_b,u32 * p,u32 * u)187*b843c749SSergey Zigachev void amdgpu_calculate_u_and_p(u32 i, u32 r_c, u32 p_b,
188*b843c749SSergey Zigachev 			      u32 *p, u32 *u)
189*b843c749SSergey Zigachev {
190*b843c749SSergey Zigachev 	u32 b_c = 0;
191*b843c749SSergey Zigachev 	u32 i_c;
192*b843c749SSergey Zigachev 	u32 tmp;
193*b843c749SSergey Zigachev 
194*b843c749SSergey Zigachev 	i_c = (i * r_c) / 100;
195*b843c749SSergey Zigachev 	tmp = i_c >> p_b;
196*b843c749SSergey Zigachev 
197*b843c749SSergey Zigachev 	while (tmp) {
198*b843c749SSergey Zigachev 		b_c++;
199*b843c749SSergey Zigachev 		tmp >>= 1;
200*b843c749SSergey Zigachev 	}
201*b843c749SSergey Zigachev 
202*b843c749SSergey Zigachev 	*u = (b_c + 1) / 2;
203*b843c749SSergey Zigachev 	*p = i_c / (1 << (2 * (*u)));
204*b843c749SSergey Zigachev }
205*b843c749SSergey Zigachev 
amdgpu_calculate_at(u32 t,u32 h,u32 fh,u32 fl,u32 * tl,u32 * th)206*b843c749SSergey Zigachev int amdgpu_calculate_at(u32 t, u32 h, u32 fh, u32 fl, u32 *tl, u32 *th)
207*b843c749SSergey Zigachev {
208*b843c749SSergey Zigachev 	u32 k, a, ah, al;
209*b843c749SSergey Zigachev 	u32 t1;
210*b843c749SSergey Zigachev 
211*b843c749SSergey Zigachev 	if ((fl == 0) || (fh == 0) || (fl > fh))
212*b843c749SSergey Zigachev 		return -EINVAL;
213*b843c749SSergey Zigachev 
214*b843c749SSergey Zigachev 	k = (100 * fh) / fl;
215*b843c749SSergey Zigachev 	t1 = (t * (k - 100));
216*b843c749SSergey Zigachev 	a = (1000 * (100 * h + t1)) / (10000 + (t1 / 100));
217*b843c749SSergey Zigachev 	a = (a + 5) / 10;
218*b843c749SSergey Zigachev 	ah = ((a * t) + 5000) / 10000;
219*b843c749SSergey Zigachev 	al = a - ah;
220*b843c749SSergey Zigachev 
221*b843c749SSergey Zigachev 	*th = t - ah;
222*b843c749SSergey Zigachev 	*tl = t + al;
223*b843c749SSergey Zigachev 
224*b843c749SSergey Zigachev 	return 0;
225*b843c749SSergey Zigachev }
226*b843c749SSergey Zigachev 
amdgpu_is_uvd_state(u32 class,u32 class2)227*b843c749SSergey Zigachev bool amdgpu_is_uvd_state(u32 class, u32 class2)
228*b843c749SSergey Zigachev {
229*b843c749SSergey Zigachev 	if (class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
230*b843c749SSergey Zigachev 		return true;
231*b843c749SSergey Zigachev 	if (class & ATOM_PPLIB_CLASSIFICATION_HD2STATE)
232*b843c749SSergey Zigachev 		return true;
233*b843c749SSergey Zigachev 	if (class & ATOM_PPLIB_CLASSIFICATION_HDSTATE)
234*b843c749SSergey Zigachev 		return true;
235*b843c749SSergey Zigachev 	if (class & ATOM_PPLIB_CLASSIFICATION_SDSTATE)
236*b843c749SSergey Zigachev 		return true;
237*b843c749SSergey Zigachev 	if (class2 & ATOM_PPLIB_CLASSIFICATION2_MVC)
238*b843c749SSergey Zigachev 		return true;
239*b843c749SSergey Zigachev 	return false;
240*b843c749SSergey Zigachev }
241*b843c749SSergey Zigachev 
amdgpu_is_internal_thermal_sensor(enum amdgpu_int_thermal_type sensor)242*b843c749SSergey Zigachev bool amdgpu_is_internal_thermal_sensor(enum amdgpu_int_thermal_type sensor)
243*b843c749SSergey Zigachev {
244*b843c749SSergey Zigachev 	switch (sensor) {
245*b843c749SSergey Zigachev 	case THERMAL_TYPE_RV6XX:
246*b843c749SSergey Zigachev 	case THERMAL_TYPE_RV770:
247*b843c749SSergey Zigachev 	case THERMAL_TYPE_EVERGREEN:
248*b843c749SSergey Zigachev 	case THERMAL_TYPE_SUMO:
249*b843c749SSergey Zigachev 	case THERMAL_TYPE_NI:
250*b843c749SSergey Zigachev 	case THERMAL_TYPE_SI:
251*b843c749SSergey Zigachev 	case THERMAL_TYPE_CI:
252*b843c749SSergey Zigachev 	case THERMAL_TYPE_KV:
253*b843c749SSergey Zigachev 		return true;
254*b843c749SSergey Zigachev 	case THERMAL_TYPE_ADT7473_WITH_INTERNAL:
255*b843c749SSergey Zigachev 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
256*b843c749SSergey Zigachev 		return false; /* need special handling */
257*b843c749SSergey Zigachev 	case THERMAL_TYPE_NONE:
258*b843c749SSergey Zigachev 	case THERMAL_TYPE_EXTERNAL:
259*b843c749SSergey Zigachev 	case THERMAL_TYPE_EXTERNAL_GPIO:
260*b843c749SSergey Zigachev 	default:
261*b843c749SSergey Zigachev 		return false;
262*b843c749SSergey Zigachev 	}
263*b843c749SSergey Zigachev }
264*b843c749SSergey Zigachev 
265*b843c749SSergey Zigachev union power_info {
266*b843c749SSergey Zigachev 	struct _ATOM_POWERPLAY_INFO info;
267*b843c749SSergey Zigachev 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
268*b843c749SSergey Zigachev 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
269*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
270*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
271*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
272*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_POWERPLAYTABLE4 pplib4;
273*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_POWERPLAYTABLE5 pplib5;
274*b843c749SSergey Zigachev };
275*b843c749SSergey Zigachev 
276*b843c749SSergey Zigachev union fan_info {
277*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_FANTABLE fan;
278*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_FANTABLE2 fan2;
279*b843c749SSergey Zigachev 	struct _ATOM_PPLIB_FANTABLE3 fan3;
280*b843c749SSergey Zigachev };
281*b843c749SSergey Zigachev 
amdgpu_parse_clk_voltage_dep_table(struct amdgpu_clock_voltage_dependency_table * amdgpu_table,ATOM_PPLIB_Clock_Voltage_Dependency_Table * atom_table)282*b843c749SSergey Zigachev static int amdgpu_parse_clk_voltage_dep_table(struct amdgpu_clock_voltage_dependency_table *amdgpu_table,
283*b843c749SSergey Zigachev 					      ATOM_PPLIB_Clock_Voltage_Dependency_Table *atom_table)
284*b843c749SSergey Zigachev {
285*b843c749SSergey Zigachev 	u32 size = atom_table->ucNumEntries *
286*b843c749SSergey Zigachev 		sizeof(struct amdgpu_clock_voltage_dependency_entry);
287*b843c749SSergey Zigachev 	int i;
288*b843c749SSergey Zigachev 	ATOM_PPLIB_Clock_Voltage_Dependency_Record *entry;
289*b843c749SSergey Zigachev 
290*b843c749SSergey Zigachev 	amdgpu_table->entries = kzalloc(size, GFP_KERNEL);
291*b843c749SSergey Zigachev 	if (!amdgpu_table->entries)
292*b843c749SSergey Zigachev 		return -ENOMEM;
293*b843c749SSergey Zigachev 
294*b843c749SSergey Zigachev 	entry = &atom_table->entries[0];
295*b843c749SSergey Zigachev 	for (i = 0; i < atom_table->ucNumEntries; i++) {
296*b843c749SSergey Zigachev 		amdgpu_table->entries[i].clk = le16_to_cpu(entry->usClockLow) |
297*b843c749SSergey Zigachev 			(entry->ucClockHigh << 16);
298*b843c749SSergey Zigachev 		amdgpu_table->entries[i].v = le16_to_cpu(entry->usVoltage);
299*b843c749SSergey Zigachev 		entry = (ATOM_PPLIB_Clock_Voltage_Dependency_Record *)
300*b843c749SSergey Zigachev 			((u8 *)entry + sizeof(ATOM_PPLIB_Clock_Voltage_Dependency_Record));
301*b843c749SSergey Zigachev 	}
302*b843c749SSergey Zigachev 	amdgpu_table->count = atom_table->ucNumEntries;
303*b843c749SSergey Zigachev 
304*b843c749SSergey Zigachev 	return 0;
305*b843c749SSergey Zigachev }
306*b843c749SSergey Zigachev 
amdgpu_get_platform_caps(struct amdgpu_device * adev)307*b843c749SSergey Zigachev int amdgpu_get_platform_caps(struct amdgpu_device *adev)
308*b843c749SSergey Zigachev {
309*b843c749SSergey Zigachev 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
310*b843c749SSergey Zigachev 	union power_info *power_info;
311*b843c749SSergey Zigachev 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
312*b843c749SSergey Zigachev 	u16 data_offset;
313*b843c749SSergey Zigachev 	u8 frev, crev;
314*b843c749SSergey Zigachev 
315*b843c749SSergey Zigachev 	if (!amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
316*b843c749SSergey Zigachev 				   &frev, &crev, &data_offset))
317*b843c749SSergey Zigachev 		return -EINVAL;
318*b843c749SSergey Zigachev 	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
319*b843c749SSergey Zigachev 
320*b843c749SSergey Zigachev 	adev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
321*b843c749SSergey Zigachev 	adev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
322*b843c749SSergey Zigachev 	adev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
323*b843c749SSergey Zigachev 
324*b843c749SSergey Zigachev 	return 0;
325*b843c749SSergey Zigachev }
326*b843c749SSergey Zigachev 
327*b843c749SSergey Zigachev /* sizeof(ATOM_PPLIB_EXTENDEDHEADER) */
328*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V2 12
329*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V3 14
330*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V4 16
331*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V5 18
332*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V6 20
333*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V7 22
334*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V8 24
335*b843c749SSergey Zigachev #define SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V9 26
336*b843c749SSergey Zigachev 
amdgpu_parse_extended_power_table(struct amdgpu_device * adev)337*b843c749SSergey Zigachev int amdgpu_parse_extended_power_table(struct amdgpu_device *adev)
338*b843c749SSergey Zigachev {
339*b843c749SSergey Zigachev 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
340*b843c749SSergey Zigachev 	union power_info *power_info;
341*b843c749SSergey Zigachev 	union fan_info *fan_info;
342*b843c749SSergey Zigachev 	ATOM_PPLIB_Clock_Voltage_Dependency_Table *dep_table;
343*b843c749SSergey Zigachev 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
344*b843c749SSergey Zigachev 	u16 data_offset;
345*b843c749SSergey Zigachev 	u8 frev, crev;
346*b843c749SSergey Zigachev 	int ret, i;
347*b843c749SSergey Zigachev 
348*b843c749SSergey Zigachev 	if (!amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
349*b843c749SSergey Zigachev 				   &frev, &crev, &data_offset))
350*b843c749SSergey Zigachev 		return -EINVAL;
351*b843c749SSergey Zigachev 	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
352*b843c749SSergey Zigachev 
353*b843c749SSergey Zigachev 	/* fan table */
354*b843c749SSergey Zigachev 	if (le16_to_cpu(power_info->pplib.usTableSize) >=
355*b843c749SSergey Zigachev 	    sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE3)) {
356*b843c749SSergey Zigachev 		if (power_info->pplib3.usFanTableOffset) {
357*b843c749SSergey Zigachev 			fan_info = (union fan_info *)(mode_info->atom_context->bios + data_offset +
358*b843c749SSergey Zigachev 						      le16_to_cpu(power_info->pplib3.usFanTableOffset));
359*b843c749SSergey Zigachev 			adev->pm.dpm.fan.t_hyst = fan_info->fan.ucTHyst;
360*b843c749SSergey Zigachev 			adev->pm.dpm.fan.t_min = le16_to_cpu(fan_info->fan.usTMin);
361*b843c749SSergey Zigachev 			adev->pm.dpm.fan.t_med = le16_to_cpu(fan_info->fan.usTMed);
362*b843c749SSergey Zigachev 			adev->pm.dpm.fan.t_high = le16_to_cpu(fan_info->fan.usTHigh);
363*b843c749SSergey Zigachev 			adev->pm.dpm.fan.pwm_min = le16_to_cpu(fan_info->fan.usPWMMin);
364*b843c749SSergey Zigachev 			adev->pm.dpm.fan.pwm_med = le16_to_cpu(fan_info->fan.usPWMMed);
365*b843c749SSergey Zigachev 			adev->pm.dpm.fan.pwm_high = le16_to_cpu(fan_info->fan.usPWMHigh);
366*b843c749SSergey Zigachev 			if (fan_info->fan.ucFanTableFormat >= 2)
367*b843c749SSergey Zigachev 				adev->pm.dpm.fan.t_max = le16_to_cpu(fan_info->fan2.usTMax);
368*b843c749SSergey Zigachev 			else
369*b843c749SSergey Zigachev 				adev->pm.dpm.fan.t_max = 10900;
370*b843c749SSergey Zigachev 			adev->pm.dpm.fan.cycle_delay = 100000;
371*b843c749SSergey Zigachev 			if (fan_info->fan.ucFanTableFormat >= 3) {
372*b843c749SSergey Zigachev 				adev->pm.dpm.fan.control_mode = fan_info->fan3.ucFanControlMode;
373*b843c749SSergey Zigachev 				adev->pm.dpm.fan.default_max_fan_pwm =
374*b843c749SSergey Zigachev 					le16_to_cpu(fan_info->fan3.usFanPWMMax);
375*b843c749SSergey Zigachev 				adev->pm.dpm.fan.default_fan_output_sensitivity = 4836;
376*b843c749SSergey Zigachev 				adev->pm.dpm.fan.fan_output_sensitivity =
377*b843c749SSergey Zigachev 					le16_to_cpu(fan_info->fan3.usFanOutputSensitivity);
378*b843c749SSergey Zigachev 			}
379*b843c749SSergey Zigachev 			adev->pm.dpm.fan.ucode_fan_control = true;
380*b843c749SSergey Zigachev 		}
381*b843c749SSergey Zigachev 	}
382*b843c749SSergey Zigachev 
383*b843c749SSergey Zigachev 	/* clock dependancy tables, shedding tables */
384*b843c749SSergey Zigachev 	if (le16_to_cpu(power_info->pplib.usTableSize) >=
385*b843c749SSergey Zigachev 	    sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE4)) {
386*b843c749SSergey Zigachev 		if (power_info->pplib4.usVddcDependencyOnSCLKOffset) {
387*b843c749SSergey Zigachev 			dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *)
388*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
389*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usVddcDependencyOnSCLKOffset));
390*b843c749SSergey Zigachev 			ret = amdgpu_parse_clk_voltage_dep_table(&adev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
391*b843c749SSergey Zigachev 								 dep_table);
392*b843c749SSergey Zigachev 			if (ret) {
393*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
394*b843c749SSergey Zigachev 				return ret;
395*b843c749SSergey Zigachev 			}
396*b843c749SSergey Zigachev 		}
397*b843c749SSergey Zigachev 		if (power_info->pplib4.usVddciDependencyOnMCLKOffset) {
398*b843c749SSergey Zigachev 			dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *)
399*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
400*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usVddciDependencyOnMCLKOffset));
401*b843c749SSergey Zigachev 			ret = amdgpu_parse_clk_voltage_dep_table(&adev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
402*b843c749SSergey Zigachev 								 dep_table);
403*b843c749SSergey Zigachev 			if (ret) {
404*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
405*b843c749SSergey Zigachev 				return ret;
406*b843c749SSergey Zigachev 			}
407*b843c749SSergey Zigachev 		}
408*b843c749SSergey Zigachev 		if (power_info->pplib4.usVddcDependencyOnMCLKOffset) {
409*b843c749SSergey Zigachev 			dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *)
410*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
411*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usVddcDependencyOnMCLKOffset));
412*b843c749SSergey Zigachev 			ret = amdgpu_parse_clk_voltage_dep_table(&adev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
413*b843c749SSergey Zigachev 								 dep_table);
414*b843c749SSergey Zigachev 			if (ret) {
415*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
416*b843c749SSergey Zigachev 				return ret;
417*b843c749SSergey Zigachev 			}
418*b843c749SSergey Zigachev 		}
419*b843c749SSergey Zigachev 		if (power_info->pplib4.usMvddDependencyOnMCLKOffset) {
420*b843c749SSergey Zigachev 			dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *)
421*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
422*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usMvddDependencyOnMCLKOffset));
423*b843c749SSergey Zigachev 			ret = amdgpu_parse_clk_voltage_dep_table(&adev->pm.dpm.dyn_state.mvdd_dependency_on_mclk,
424*b843c749SSergey Zigachev 								 dep_table);
425*b843c749SSergey Zigachev 			if (ret) {
426*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
427*b843c749SSergey Zigachev 				return ret;
428*b843c749SSergey Zigachev 			}
429*b843c749SSergey Zigachev 		}
430*b843c749SSergey Zigachev 		if (power_info->pplib4.usMaxClockVoltageOnDCOffset) {
431*b843c749SSergey Zigachev 			ATOM_PPLIB_Clock_Voltage_Limit_Table *clk_v =
432*b843c749SSergey Zigachev 				(ATOM_PPLIB_Clock_Voltage_Limit_Table *)
433*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
434*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usMaxClockVoltageOnDCOffset));
435*b843c749SSergey Zigachev 			if (clk_v->ucNumEntries) {
436*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk =
437*b843c749SSergey Zigachev 					le16_to_cpu(clk_v->entries[0].usSclkLow) |
438*b843c749SSergey Zigachev 					(clk_v->entries[0].ucSclkHigh << 16);
439*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk =
440*b843c749SSergey Zigachev 					le16_to_cpu(clk_v->entries[0].usMclkLow) |
441*b843c749SSergey Zigachev 					(clk_v->entries[0].ucMclkHigh << 16);
442*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc =
443*b843c749SSergey Zigachev 					le16_to_cpu(clk_v->entries[0].usVddc);
444*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddci =
445*b843c749SSergey Zigachev 					le16_to_cpu(clk_v->entries[0].usVddci);
446*b843c749SSergey Zigachev 			}
447*b843c749SSergey Zigachev 		}
448*b843c749SSergey Zigachev 		if (power_info->pplib4.usVddcPhaseShedLimitsTableOffset) {
449*b843c749SSergey Zigachev 			ATOM_PPLIB_PhaseSheddingLimits_Table *psl =
450*b843c749SSergey Zigachev 				(ATOM_PPLIB_PhaseSheddingLimits_Table *)
451*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
452*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib4.usVddcPhaseShedLimitsTableOffset));
453*b843c749SSergey Zigachev 			ATOM_PPLIB_PhaseSheddingLimits_Record *entry;
454*b843c749SSergey Zigachev 
455*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries =
456*b843c749SSergey Zigachev 				kcalloc(psl->ucNumEntries,
457*b843c749SSergey Zigachev 					sizeof(struct amdgpu_phase_shedding_limits_entry),
458*b843c749SSergey Zigachev 					GFP_KERNEL);
459*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries) {
460*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
461*b843c749SSergey Zigachev 				return -ENOMEM;
462*b843c749SSergey Zigachev 			}
463*b843c749SSergey Zigachev 
464*b843c749SSergey Zigachev 			entry = &psl->entries[0];
465*b843c749SSergey Zigachev 			for (i = 0; i < psl->ucNumEntries; i++) {
466*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].sclk =
467*b843c749SSergey Zigachev 					le16_to_cpu(entry->usSclkLow) | (entry->ucSclkHigh << 16);
468*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].mclk =
469*b843c749SSergey Zigachev 					le16_to_cpu(entry->usMclkLow) | (entry->ucMclkHigh << 16);
470*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].voltage =
471*b843c749SSergey Zigachev 					le16_to_cpu(entry->usVoltage);
472*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_PhaseSheddingLimits_Record *)
473*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_PhaseSheddingLimits_Record));
474*b843c749SSergey Zigachev 			}
475*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.phase_shedding_limits_table.count =
476*b843c749SSergey Zigachev 				psl->ucNumEntries;
477*b843c749SSergey Zigachev 		}
478*b843c749SSergey Zigachev 	}
479*b843c749SSergey Zigachev 
480*b843c749SSergey Zigachev 	/* cac data */
481*b843c749SSergey Zigachev 	if (le16_to_cpu(power_info->pplib.usTableSize) >=
482*b843c749SSergey Zigachev 	    sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE5)) {
483*b843c749SSergey Zigachev 		adev->pm.dpm.tdp_limit = le32_to_cpu(power_info->pplib5.ulTDPLimit);
484*b843c749SSergey Zigachev 		adev->pm.dpm.near_tdp_limit = le32_to_cpu(power_info->pplib5.ulNearTDPLimit);
485*b843c749SSergey Zigachev 		adev->pm.dpm.near_tdp_limit_adjusted = adev->pm.dpm.near_tdp_limit;
486*b843c749SSergey Zigachev 		adev->pm.dpm.tdp_od_limit = le16_to_cpu(power_info->pplib5.usTDPODLimit);
487*b843c749SSergey Zigachev 		if (adev->pm.dpm.tdp_od_limit)
488*b843c749SSergey Zigachev 			adev->pm.dpm.power_control = true;
489*b843c749SSergey Zigachev 		else
490*b843c749SSergey Zigachev 			adev->pm.dpm.power_control = false;
491*b843c749SSergey Zigachev 		adev->pm.dpm.tdp_adjustment = 0;
492*b843c749SSergey Zigachev 		adev->pm.dpm.sq_ramping_threshold = le32_to_cpu(power_info->pplib5.ulSQRampingThreshold);
493*b843c749SSergey Zigachev 		adev->pm.dpm.cac_leakage = le32_to_cpu(power_info->pplib5.ulCACLeakage);
494*b843c749SSergey Zigachev 		adev->pm.dpm.load_line_slope = le16_to_cpu(power_info->pplib5.usLoadLineSlope);
495*b843c749SSergey Zigachev 		if (power_info->pplib5.usCACLeakageTableOffset) {
496*b843c749SSergey Zigachev 			ATOM_PPLIB_CAC_Leakage_Table *cac_table =
497*b843c749SSergey Zigachev 				(ATOM_PPLIB_CAC_Leakage_Table *)
498*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
499*b843c749SSergey Zigachev 				 le16_to_cpu(power_info->pplib5.usCACLeakageTableOffset));
500*b843c749SSergey Zigachev 			ATOM_PPLIB_CAC_Leakage_Record *entry;
501*b843c749SSergey Zigachev 			u32 size = cac_table->ucNumEntries * sizeof(struct amdgpu_cac_leakage_table);
502*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_leakage_table.entries = kzalloc(size, GFP_KERNEL);
503*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.cac_leakage_table.entries) {
504*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
505*b843c749SSergey Zigachev 				return -ENOMEM;
506*b843c749SSergey Zigachev 			}
507*b843c749SSergey Zigachev 			entry = &cac_table->entries[0];
508*b843c749SSergey Zigachev 			for (i = 0; i < cac_table->ucNumEntries; i++) {
509*b843c749SSergey Zigachev 				if (adev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_EVV) {
510*b843c749SSergey Zigachev 					adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1 =
511*b843c749SSergey Zigachev 						le16_to_cpu(entry->usVddc1);
512*b843c749SSergey Zigachev 					adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2 =
513*b843c749SSergey Zigachev 						le16_to_cpu(entry->usVddc2);
514*b843c749SSergey Zigachev 					adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3 =
515*b843c749SSergey Zigachev 						le16_to_cpu(entry->usVddc3);
516*b843c749SSergey Zigachev 				} else {
517*b843c749SSergey Zigachev 					adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc =
518*b843c749SSergey Zigachev 						le16_to_cpu(entry->usVddc);
519*b843c749SSergey Zigachev 					adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage =
520*b843c749SSergey Zigachev 						le32_to_cpu(entry->ulLeakageValue);
521*b843c749SSergey Zigachev 				}
522*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_CAC_Leakage_Record *)
523*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_CAC_Leakage_Record));
524*b843c749SSergey Zigachev 			}
525*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_leakage_table.count = cac_table->ucNumEntries;
526*b843c749SSergey Zigachev 		}
527*b843c749SSergey Zigachev 	}
528*b843c749SSergey Zigachev 
529*b843c749SSergey Zigachev 	/* ext tables */
530*b843c749SSergey Zigachev 	if (le16_to_cpu(power_info->pplib.usTableSize) >=
531*b843c749SSergey Zigachev 	    sizeof(struct _ATOM_PPLIB_POWERPLAYTABLE3)) {
532*b843c749SSergey Zigachev 		ATOM_PPLIB_EXTENDEDHEADER *ext_hdr = (ATOM_PPLIB_EXTENDEDHEADER *)
533*b843c749SSergey Zigachev 			(mode_info->atom_context->bios + data_offset +
534*b843c749SSergey Zigachev 			 le16_to_cpu(power_info->pplib3.usExtendendedHeaderOffset));
535*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V2) &&
536*b843c749SSergey Zigachev 			ext_hdr->usVCETableOffset) {
537*b843c749SSergey Zigachev 			VCEClockInfoArray *array = (VCEClockInfoArray *)
538*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
539*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usVCETableOffset) + 1);
540*b843c749SSergey Zigachev 			ATOM_PPLIB_VCE_Clock_Voltage_Limit_Table *limits =
541*b843c749SSergey Zigachev 				(ATOM_PPLIB_VCE_Clock_Voltage_Limit_Table *)
542*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
543*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usVCETableOffset) + 1 +
544*b843c749SSergey Zigachev 				 1 + array->ucNumEntries * sizeof(VCEClockInfo));
545*b843c749SSergey Zigachev 			ATOM_PPLIB_VCE_State_Table *states =
546*b843c749SSergey Zigachev 				(ATOM_PPLIB_VCE_State_Table *)
547*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
548*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usVCETableOffset) + 1 +
549*b843c749SSergey Zigachev 				 1 + (array->ucNumEntries * sizeof (VCEClockInfo)) +
550*b843c749SSergey Zigachev 				 1 + (limits->numEntries * sizeof(ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record)));
551*b843c749SSergey Zigachev 			ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record *entry;
552*b843c749SSergey Zigachev 			ATOM_PPLIB_VCE_State_Record *state_entry;
553*b843c749SSergey Zigachev 			VCEClockInfo *vce_clk;
554*b843c749SSergey Zigachev 			u32 size = limits->numEntries *
555*b843c749SSergey Zigachev 				sizeof(struct amdgpu_vce_clock_voltage_dependency_entry);
556*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries =
557*b843c749SSergey Zigachev 				kzalloc(size, GFP_KERNEL);
558*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries) {
559*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
560*b843c749SSergey Zigachev 				return -ENOMEM;
561*b843c749SSergey Zigachev 			}
562*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.count =
563*b843c749SSergey Zigachev 				limits->numEntries;
564*b843c749SSergey Zigachev 			entry = &limits->entries[0];
565*b843c749SSergey Zigachev 			state_entry = &states->entries[0];
566*b843c749SSergey Zigachev 			for (i = 0; i < limits->numEntries; i++) {
567*b843c749SSergey Zigachev 				vce_clk = (VCEClockInfo *)
568*b843c749SSergey Zigachev 					((u8 *)&array->entries[0] +
569*b843c749SSergey Zigachev 					 (entry->ucVCEClockInfoIndex * sizeof(VCEClockInfo)));
570*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].evclk =
571*b843c749SSergey Zigachev 					le16_to_cpu(vce_clk->usEVClkLow) | (vce_clk->ucEVClkHigh << 16);
572*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].ecclk =
573*b843c749SSergey Zigachev 					le16_to_cpu(vce_clk->usECClkLow) | (vce_clk->ucECClkHigh << 16);
574*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v =
575*b843c749SSergey Zigachev 					le16_to_cpu(entry->usVoltage);
576*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record *)
577*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record));
578*b843c749SSergey Zigachev 			}
579*b843c749SSergey Zigachev 			adev->pm.dpm.num_of_vce_states =
580*b843c749SSergey Zigachev 					states->numEntries > AMD_MAX_VCE_LEVELS ?
581*b843c749SSergey Zigachev 					AMD_MAX_VCE_LEVELS : states->numEntries;
582*b843c749SSergey Zigachev 			for (i = 0; i < adev->pm.dpm.num_of_vce_states; i++) {
583*b843c749SSergey Zigachev 				vce_clk = (VCEClockInfo *)
584*b843c749SSergey Zigachev 					((u8 *)&array->entries[0] +
585*b843c749SSergey Zigachev 					 (state_entry->ucVCEClockInfoIndex * sizeof(VCEClockInfo)));
586*b843c749SSergey Zigachev 				adev->pm.dpm.vce_states[i].evclk =
587*b843c749SSergey Zigachev 					le16_to_cpu(vce_clk->usEVClkLow) | (vce_clk->ucEVClkHigh << 16);
588*b843c749SSergey Zigachev 				adev->pm.dpm.vce_states[i].ecclk =
589*b843c749SSergey Zigachev 					le16_to_cpu(vce_clk->usECClkLow) | (vce_clk->ucECClkHigh << 16);
590*b843c749SSergey Zigachev 				adev->pm.dpm.vce_states[i].clk_idx =
591*b843c749SSergey Zigachev 					state_entry->ucClockInfoIndex & 0x3f;
592*b843c749SSergey Zigachev 				adev->pm.dpm.vce_states[i].pstate =
593*b843c749SSergey Zigachev 					(state_entry->ucClockInfoIndex & 0xc0) >> 6;
594*b843c749SSergey Zigachev 				state_entry = (ATOM_PPLIB_VCE_State_Record *)
595*b843c749SSergey Zigachev 					((u8 *)state_entry + sizeof(ATOM_PPLIB_VCE_State_Record));
596*b843c749SSergey Zigachev 			}
597*b843c749SSergey Zigachev 		}
598*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V3) &&
599*b843c749SSergey Zigachev 			ext_hdr->usUVDTableOffset) {
600*b843c749SSergey Zigachev 			UVDClockInfoArray *array = (UVDClockInfoArray *)
601*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
602*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usUVDTableOffset) + 1);
603*b843c749SSergey Zigachev 			ATOM_PPLIB_UVD_Clock_Voltage_Limit_Table *limits =
604*b843c749SSergey Zigachev 				(ATOM_PPLIB_UVD_Clock_Voltage_Limit_Table *)
605*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
606*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usUVDTableOffset) + 1 +
607*b843c749SSergey Zigachev 				 1 + (array->ucNumEntries * sizeof (UVDClockInfo)));
608*b843c749SSergey Zigachev 			ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record *entry;
609*b843c749SSergey Zigachev 			u32 size = limits->numEntries *
610*b843c749SSergey Zigachev 				sizeof(struct amdgpu_uvd_clock_voltage_dependency_entry);
611*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries =
612*b843c749SSergey Zigachev 				kzalloc(size, GFP_KERNEL);
613*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries) {
614*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
615*b843c749SSergey Zigachev 				return -ENOMEM;
616*b843c749SSergey Zigachev 			}
617*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.count =
618*b843c749SSergey Zigachev 				limits->numEntries;
619*b843c749SSergey Zigachev 			entry = &limits->entries[0];
620*b843c749SSergey Zigachev 			for (i = 0; i < limits->numEntries; i++) {
621*b843c749SSergey Zigachev 				UVDClockInfo *uvd_clk = (UVDClockInfo *)
622*b843c749SSergey Zigachev 					((u8 *)&array->entries[0] +
623*b843c749SSergey Zigachev 					 (entry->ucUVDClockInfoIndex * sizeof(UVDClockInfo)));
624*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].vclk =
625*b843c749SSergey Zigachev 					le16_to_cpu(uvd_clk->usVClkLow) | (uvd_clk->ucVClkHigh << 16);
626*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].dclk =
627*b843c749SSergey Zigachev 					le16_to_cpu(uvd_clk->usDClkLow) | (uvd_clk->ucDClkHigh << 16);
628*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v =
629*b843c749SSergey Zigachev 					le16_to_cpu(entry->usVoltage);
630*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record *)
631*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record));
632*b843c749SSergey Zigachev 			}
633*b843c749SSergey Zigachev 		}
634*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V4) &&
635*b843c749SSergey Zigachev 			ext_hdr->usSAMUTableOffset) {
636*b843c749SSergey Zigachev 			ATOM_PPLIB_SAMClk_Voltage_Limit_Table *limits =
637*b843c749SSergey Zigachev 				(ATOM_PPLIB_SAMClk_Voltage_Limit_Table *)
638*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
639*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usSAMUTableOffset) + 1);
640*b843c749SSergey Zigachev 			ATOM_PPLIB_SAMClk_Voltage_Limit_Record *entry;
641*b843c749SSergey Zigachev 			u32 size = limits->numEntries *
642*b843c749SSergey Zigachev 				sizeof(struct amdgpu_clock_voltage_dependency_entry);
643*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries =
644*b843c749SSergey Zigachev 				kzalloc(size, GFP_KERNEL);
645*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries) {
646*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
647*b843c749SSergey Zigachev 				return -ENOMEM;
648*b843c749SSergey Zigachev 			}
649*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.count =
650*b843c749SSergey Zigachev 				limits->numEntries;
651*b843c749SSergey Zigachev 			entry = &limits->entries[0];
652*b843c749SSergey Zigachev 			for (i = 0; i < limits->numEntries; i++) {
653*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].clk =
654*b843c749SSergey Zigachev 					le16_to_cpu(entry->usSAMClockLow) | (entry->ucSAMClockHigh << 16);
655*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v =
656*b843c749SSergey Zigachev 					le16_to_cpu(entry->usVoltage);
657*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_SAMClk_Voltage_Limit_Record *)
658*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_SAMClk_Voltage_Limit_Record));
659*b843c749SSergey Zigachev 			}
660*b843c749SSergey Zigachev 		}
661*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V5) &&
662*b843c749SSergey Zigachev 		    ext_hdr->usPPMTableOffset) {
663*b843c749SSergey Zigachev 			ATOM_PPLIB_PPM_Table *ppm = (ATOM_PPLIB_PPM_Table *)
664*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
665*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usPPMTableOffset));
666*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table =
667*b843c749SSergey Zigachev 				kzalloc(sizeof(struct amdgpu_ppm_table), GFP_KERNEL);
668*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.ppm_table) {
669*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
670*b843c749SSergey Zigachev 				return -ENOMEM;
671*b843c749SSergey Zigachev 			}
672*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->ppm_design = ppm->ucPpmDesign;
673*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->cpu_core_number =
674*b843c749SSergey Zigachev 				le16_to_cpu(ppm->usCpuCoreNumber);
675*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->platform_tdp =
676*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulPlatformTDP);
677*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdp =
678*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulSmallACPlatformTDP);
679*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->platform_tdc =
680*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulPlatformTDC);
681*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->small_ac_platform_tdc =
682*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulSmallACPlatformTDC);
683*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->apu_tdp =
684*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulApuTDP);
685*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->dgpu_tdp =
686*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulDGpuTDP);
687*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->dgpu_ulv_power =
688*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulDGpuUlvPower);
689*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.ppm_table->tj_max =
690*b843c749SSergey Zigachev 				le32_to_cpu(ppm->ulTjmax);
691*b843c749SSergey Zigachev 		}
692*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V6) &&
693*b843c749SSergey Zigachev 			ext_hdr->usACPTableOffset) {
694*b843c749SSergey Zigachev 			ATOM_PPLIB_ACPClk_Voltage_Limit_Table *limits =
695*b843c749SSergey Zigachev 				(ATOM_PPLIB_ACPClk_Voltage_Limit_Table *)
696*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
697*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usACPTableOffset) + 1);
698*b843c749SSergey Zigachev 			ATOM_PPLIB_ACPClk_Voltage_Limit_Record *entry;
699*b843c749SSergey Zigachev 			u32 size = limits->numEntries *
700*b843c749SSergey Zigachev 				sizeof(struct amdgpu_clock_voltage_dependency_entry);
701*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries =
702*b843c749SSergey Zigachev 				kzalloc(size, GFP_KERNEL);
703*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries) {
704*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
705*b843c749SSergey Zigachev 				return -ENOMEM;
706*b843c749SSergey Zigachev 			}
707*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.count =
708*b843c749SSergey Zigachev 				limits->numEntries;
709*b843c749SSergey Zigachev 			entry = &limits->entries[0];
710*b843c749SSergey Zigachev 			for (i = 0; i < limits->numEntries; i++) {
711*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].clk =
712*b843c749SSergey Zigachev 					le16_to_cpu(entry->usACPClockLow) | (entry->ucACPClockHigh << 16);
713*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v =
714*b843c749SSergey Zigachev 					le16_to_cpu(entry->usVoltage);
715*b843c749SSergey Zigachev 				entry = (ATOM_PPLIB_ACPClk_Voltage_Limit_Record *)
716*b843c749SSergey Zigachev 					((u8 *)entry + sizeof(ATOM_PPLIB_ACPClk_Voltage_Limit_Record));
717*b843c749SSergey Zigachev 			}
718*b843c749SSergey Zigachev 		}
719*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V7) &&
720*b843c749SSergey Zigachev 			ext_hdr->usPowerTuneTableOffset) {
721*b843c749SSergey Zigachev 			u8 rev = *(u8 *)(mode_info->atom_context->bios + data_offset +
722*b843c749SSergey Zigachev 					 le16_to_cpu(ext_hdr->usPowerTuneTableOffset));
723*b843c749SSergey Zigachev 			ATOM_PowerTune_Table *pt;
724*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table =
725*b843c749SSergey Zigachev 				kzalloc(sizeof(struct amdgpu_cac_tdp_table), GFP_KERNEL);
726*b843c749SSergey Zigachev 			if (!adev->pm.dpm.dyn_state.cac_tdp_table) {
727*b843c749SSergey Zigachev 				amdgpu_free_extended_power_table(adev);
728*b843c749SSergey Zigachev 				return -ENOMEM;
729*b843c749SSergey Zigachev 			}
730*b843c749SSergey Zigachev 			if (rev > 0) {
731*b843c749SSergey Zigachev 				ATOM_PPLIB_POWERTUNE_Table_V1 *ppt = (ATOM_PPLIB_POWERTUNE_Table_V1 *)
732*b843c749SSergey Zigachev 					(mode_info->atom_context->bios + data_offset +
733*b843c749SSergey Zigachev 					 le16_to_cpu(ext_hdr->usPowerTuneTableOffset));
734*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit =
735*b843c749SSergey Zigachev 					ppt->usMaximumPowerDeliveryLimit;
736*b843c749SSergey Zigachev 				pt = &ppt->power_tune_table;
737*b843c749SSergey Zigachev 			} else {
738*b843c749SSergey Zigachev 				ATOM_PPLIB_POWERTUNE_Table *ppt = (ATOM_PPLIB_POWERTUNE_Table *)
739*b843c749SSergey Zigachev 					(mode_info->atom_context->bios + data_offset +
740*b843c749SSergey Zigachev 					 le16_to_cpu(ext_hdr->usPowerTuneTableOffset));
741*b843c749SSergey Zigachev 				adev->pm.dpm.dyn_state.cac_tdp_table->maximum_power_delivery_limit = 255;
742*b843c749SSergey Zigachev 				pt = &ppt->power_tune_table;
743*b843c749SSergey Zigachev 			}
744*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->tdp = le16_to_cpu(pt->usTDP);
745*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->configurable_tdp =
746*b843c749SSergey Zigachev 				le16_to_cpu(pt->usConfigurableTDP);
747*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->tdc = le16_to_cpu(pt->usTDC);
748*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->battery_power_limit =
749*b843c749SSergey Zigachev 				le16_to_cpu(pt->usBatteryPowerLimit);
750*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->small_power_limit =
751*b843c749SSergey Zigachev 				le16_to_cpu(pt->usSmallPowerLimit);
752*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->low_cac_leakage =
753*b843c749SSergey Zigachev 				le16_to_cpu(pt->usLowCACLeakage);
754*b843c749SSergey Zigachev 			adev->pm.dpm.dyn_state.cac_tdp_table->high_cac_leakage =
755*b843c749SSergey Zigachev 				le16_to_cpu(pt->usHighCACLeakage);
756*b843c749SSergey Zigachev 		}
757*b843c749SSergey Zigachev 		if ((le16_to_cpu(ext_hdr->usSize) >= SIZE_OF_ATOM_PPLIB_EXTENDEDHEADER_V8) &&
758*b843c749SSergey Zigachev 				ext_hdr->usSclkVddgfxTableOffset) {
759*b843c749SSergey Zigachev 			dep_table = (ATOM_PPLIB_Clock_Voltage_Dependency_Table *)
760*b843c749SSergey Zigachev 				(mode_info->atom_context->bios + data_offset +
761*b843c749SSergey Zigachev 				 le16_to_cpu(ext_hdr->usSclkVddgfxTableOffset));
762*b843c749SSergey Zigachev 			ret = amdgpu_parse_clk_voltage_dep_table(
763*b843c749SSergey Zigachev 					&adev->pm.dpm.dyn_state.vddgfx_dependency_on_sclk,
764*b843c749SSergey Zigachev 					dep_table);
765*b843c749SSergey Zigachev 			if (ret) {
766*b843c749SSergey Zigachev 				kfree(adev->pm.dpm.dyn_state.vddgfx_dependency_on_sclk.entries);
767*b843c749SSergey Zigachev 				return ret;
768*b843c749SSergey Zigachev 			}
769*b843c749SSergey Zigachev 		}
770*b843c749SSergey Zigachev 	}
771*b843c749SSergey Zigachev 
772*b843c749SSergey Zigachev 	return 0;
773*b843c749SSergey Zigachev }
774*b843c749SSergey Zigachev 
amdgpu_free_extended_power_table(struct amdgpu_device * adev)775*b843c749SSergey Zigachev void amdgpu_free_extended_power_table(struct amdgpu_device *adev)
776*b843c749SSergey Zigachev {
777*b843c749SSergey Zigachev 	struct amdgpu_dpm_dynamic_state *dyn_state = &adev->pm.dpm.dyn_state;
778*b843c749SSergey Zigachev 
779*b843c749SSergey Zigachev 	kfree(dyn_state->vddc_dependency_on_sclk.entries);
780*b843c749SSergey Zigachev 	kfree(dyn_state->vddci_dependency_on_mclk.entries);
781*b843c749SSergey Zigachev 	kfree(dyn_state->vddc_dependency_on_mclk.entries);
782*b843c749SSergey Zigachev 	kfree(dyn_state->mvdd_dependency_on_mclk.entries);
783*b843c749SSergey Zigachev 	kfree(dyn_state->cac_leakage_table.entries);
784*b843c749SSergey Zigachev 	kfree(dyn_state->phase_shedding_limits_table.entries);
785*b843c749SSergey Zigachev 	kfree(dyn_state->ppm_table);
786*b843c749SSergey Zigachev 	kfree(dyn_state->cac_tdp_table);
787*b843c749SSergey Zigachev 	kfree(dyn_state->vce_clock_voltage_dependency_table.entries);
788*b843c749SSergey Zigachev 	kfree(dyn_state->uvd_clock_voltage_dependency_table.entries);
789*b843c749SSergey Zigachev 	kfree(dyn_state->samu_clock_voltage_dependency_table.entries);
790*b843c749SSergey Zigachev 	kfree(dyn_state->acp_clock_voltage_dependency_table.entries);
791*b843c749SSergey Zigachev 	kfree(dyn_state->vddgfx_dependency_on_sclk.entries);
792*b843c749SSergey Zigachev }
793*b843c749SSergey Zigachev 
794*b843c749SSergey Zigachev static const char *pp_lib_thermal_controller_names[] = {
795*b843c749SSergey Zigachev 	"NONE",
796*b843c749SSergey Zigachev 	"lm63",
797*b843c749SSergey Zigachev 	"adm1032",
798*b843c749SSergey Zigachev 	"adm1030",
799*b843c749SSergey Zigachev 	"max6649",
800*b843c749SSergey Zigachev 	"lm64",
801*b843c749SSergey Zigachev 	"f75375",
802*b843c749SSergey Zigachev 	"RV6xx",
803*b843c749SSergey Zigachev 	"RV770",
804*b843c749SSergey Zigachev 	"adt7473",
805*b843c749SSergey Zigachev 	"NONE",
806*b843c749SSergey Zigachev 	"External GPIO",
807*b843c749SSergey Zigachev 	"Evergreen",
808*b843c749SSergey Zigachev 	"emc2103",
809*b843c749SSergey Zigachev 	"Sumo",
810*b843c749SSergey Zigachev 	"Northern Islands",
811*b843c749SSergey Zigachev 	"Southern Islands",
812*b843c749SSergey Zigachev 	"lm96163",
813*b843c749SSergey Zigachev 	"Sea Islands",
814*b843c749SSergey Zigachev 	"Kaveri/Kabini",
815*b843c749SSergey Zigachev };
816*b843c749SSergey Zigachev 
amdgpu_add_thermal_controller(struct amdgpu_device * adev)817*b843c749SSergey Zigachev void amdgpu_add_thermal_controller(struct amdgpu_device *adev)
818*b843c749SSergey Zigachev {
819*b843c749SSergey Zigachev 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
820*b843c749SSergey Zigachev 	ATOM_PPLIB_POWERPLAYTABLE *power_table;
821*b843c749SSergey Zigachev 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
822*b843c749SSergey Zigachev 	ATOM_PPLIB_THERMALCONTROLLER *controller;
823*b843c749SSergey Zigachev 	struct amdgpu_i2c_bus_rec i2c_bus;
824*b843c749SSergey Zigachev 	u16 data_offset;
825*b843c749SSergey Zigachev 	u8 frev, crev;
826*b843c749SSergey Zigachev 
827*b843c749SSergey Zigachev 	if (!amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
828*b843c749SSergey Zigachev 				   &frev, &crev, &data_offset))
829*b843c749SSergey Zigachev 		return;
830*b843c749SSergey Zigachev 	power_table = (ATOM_PPLIB_POWERPLAYTABLE *)
831*b843c749SSergey Zigachev 		(mode_info->atom_context->bios + data_offset);
832*b843c749SSergey Zigachev 	controller = &power_table->sThermalController;
833*b843c749SSergey Zigachev 
834*b843c749SSergey Zigachev 	/* add the i2c bus for thermal/fan chip */
835*b843c749SSergey Zigachev 	if (controller->ucType > 0) {
836*b843c749SSergey Zigachev 		if (controller->ucFanParameters & ATOM_PP_FANPARAMETERS_NOFAN)
837*b843c749SSergey Zigachev 			adev->pm.no_fan = true;
838*b843c749SSergey Zigachev 		adev->pm.fan_pulses_per_revolution =
839*b843c749SSergey Zigachev 			controller->ucFanParameters & ATOM_PP_FANPARAMETERS_TACHOMETER_PULSES_PER_REVOLUTION_MASK;
840*b843c749SSergey Zigachev 		if (adev->pm.fan_pulses_per_revolution) {
841*b843c749SSergey Zigachev 			adev->pm.fan_min_rpm = controller->ucFanMinRPM;
842*b843c749SSergey Zigachev 			adev->pm.fan_max_rpm = controller->ucFanMaxRPM;
843*b843c749SSergey Zigachev 		}
844*b843c749SSergey Zigachev 		if (controller->ucType == ATOM_PP_THERMALCONTROLLER_RV6xx) {
845*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
846*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
847*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
848*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_RV6XX;
849*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_RV770) {
850*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
851*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
852*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
853*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_RV770;
854*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_EVERGREEN) {
855*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
856*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
857*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
858*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_EVERGREEN;
859*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_SUMO) {
860*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
861*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
862*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
863*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_SUMO;
864*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_NISLANDS) {
865*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
866*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
867*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
868*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_NI;
869*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_SISLANDS) {
870*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
871*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
872*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
873*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_SI;
874*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_CISLANDS) {
875*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
876*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
877*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
878*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_CI;
879*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_KAVERI) {
880*b843c749SSergey Zigachev 			DRM_INFO("Internal thermal controller %s fan control\n",
881*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
882*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
883*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_KV;
884*b843c749SSergey Zigachev 		} else if (controller->ucType == ATOM_PP_THERMALCONTROLLER_EXTERNAL_GPIO) {
885*b843c749SSergey Zigachev 			DRM_INFO("External GPIO thermal controller %s fan control\n",
886*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
887*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
888*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_EXTERNAL_GPIO;
889*b843c749SSergey Zigachev 		} else if (controller->ucType ==
890*b843c749SSergey Zigachev 			   ATOM_PP_THERMALCONTROLLER_ADT7473_WITH_INTERNAL) {
891*b843c749SSergey Zigachev 			DRM_INFO("ADT7473 with internal thermal controller %s fan control\n",
892*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
893*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
894*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_ADT7473_WITH_INTERNAL;
895*b843c749SSergey Zigachev 		} else if (controller->ucType ==
896*b843c749SSergey Zigachev 			   ATOM_PP_THERMALCONTROLLER_EMC2103_WITH_INTERNAL) {
897*b843c749SSergey Zigachev 			DRM_INFO("EMC2103 with internal thermal controller %s fan control\n",
898*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
899*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
900*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_EMC2103_WITH_INTERNAL;
901*b843c749SSergey Zigachev 		} else if (controller->ucType < ARRAY_SIZE(pp_lib_thermal_controller_names)) {
902*b843c749SSergey Zigachev 			DRM_INFO("Possible %s thermal controller at 0x%02x %s fan control\n",
903*b843c749SSergey Zigachev 				 pp_lib_thermal_controller_names[controller->ucType],
904*b843c749SSergey Zigachev 				 controller->ucI2cAddress >> 1,
905*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
906*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
907*b843c749SSergey Zigachev 			adev->pm.int_thermal_type = THERMAL_TYPE_EXTERNAL;
908*b843c749SSergey Zigachev 			i2c_bus = amdgpu_atombios_lookup_i2c_gpio(adev, controller->ucI2cLine);
909*b843c749SSergey Zigachev 			adev->pm.i2c_bus = amdgpu_i2c_lookup(adev, &i2c_bus);
910*b843c749SSergey Zigachev 			if (adev->pm.i2c_bus) {
911*b843c749SSergey Zigachev 				struct i2c_board_info info = { };
912*b843c749SSergey Zigachev 				const char *name = pp_lib_thermal_controller_names[controller->ucType];
913*b843c749SSergey Zigachev 				info.addr = controller->ucI2cAddress >> 1;
914*b843c749SSergey Zigachev 				strlcpy(info.type, name, sizeof(info.type));
915*b843c749SSergey Zigachev 				i2c_new_device(&adev->pm.i2c_bus->adapter, &info);
916*b843c749SSergey Zigachev 			}
917*b843c749SSergey Zigachev 		} else {
918*b843c749SSergey Zigachev 			DRM_INFO("Unknown thermal controller type %d at 0x%02x %s fan control\n",
919*b843c749SSergey Zigachev 				 controller->ucType,
920*b843c749SSergey Zigachev 				 controller->ucI2cAddress >> 1,
921*b843c749SSergey Zigachev 				 (controller->ucFanParameters &
922*b843c749SSergey Zigachev 				  ATOM_PP_FANPARAMETERS_NOFAN) ? "without" : "with");
923*b843c749SSergey Zigachev 		}
924*b843c749SSergey Zigachev 	}
925*b843c749SSergey Zigachev }
926*b843c749SSergey Zigachev 
amdgpu_get_pcie_gen_support(struct amdgpu_device * adev,u32 sys_mask,enum amdgpu_pcie_gen asic_gen,enum amdgpu_pcie_gen default_gen)927*b843c749SSergey Zigachev enum amdgpu_pcie_gen amdgpu_get_pcie_gen_support(struct amdgpu_device *adev,
928*b843c749SSergey Zigachev 						 u32 sys_mask,
929*b843c749SSergey Zigachev 						 enum amdgpu_pcie_gen asic_gen,
930*b843c749SSergey Zigachev 						 enum amdgpu_pcie_gen default_gen)
931*b843c749SSergey Zigachev {
932*b843c749SSergey Zigachev 	switch (asic_gen) {
933*b843c749SSergey Zigachev 	case AMDGPU_PCIE_GEN1:
934*b843c749SSergey Zigachev 		return AMDGPU_PCIE_GEN1;
935*b843c749SSergey Zigachev 	case AMDGPU_PCIE_GEN2:
936*b843c749SSergey Zigachev 		return AMDGPU_PCIE_GEN2;
937*b843c749SSergey Zigachev 	case AMDGPU_PCIE_GEN3:
938*b843c749SSergey Zigachev 		return AMDGPU_PCIE_GEN3;
939*b843c749SSergey Zigachev 	default:
940*b843c749SSergey Zigachev 		if ((sys_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) &&
941*b843c749SSergey Zigachev 		    (default_gen == AMDGPU_PCIE_GEN3))
942*b843c749SSergey Zigachev 			return AMDGPU_PCIE_GEN3;
943*b843c749SSergey Zigachev 		else if ((sys_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) &&
944*b843c749SSergey Zigachev 			 (default_gen == AMDGPU_PCIE_GEN2))
945*b843c749SSergey Zigachev 			return AMDGPU_PCIE_GEN2;
946*b843c749SSergey Zigachev 		else
947*b843c749SSergey Zigachev 			return AMDGPU_PCIE_GEN1;
948*b843c749SSergey Zigachev 	}
949*b843c749SSergey Zigachev 	return AMDGPU_PCIE_GEN1;
950*b843c749SSergey Zigachev }
951*b843c749SSergey Zigachev 
amdgpu_get_pcie_lane_support(struct amdgpu_device * adev,u16 asic_lanes,u16 default_lanes)952*b843c749SSergey Zigachev u16 amdgpu_get_pcie_lane_support(struct amdgpu_device *adev,
953*b843c749SSergey Zigachev 				 u16 asic_lanes,
954*b843c749SSergey Zigachev 				 u16 default_lanes)
955*b843c749SSergey Zigachev {
956*b843c749SSergey Zigachev 	switch (asic_lanes) {
957*b843c749SSergey Zigachev 	case 0:
958*b843c749SSergey Zigachev 	default:
959*b843c749SSergey Zigachev 		return default_lanes;
960*b843c749SSergey Zigachev 	case 1:
961*b843c749SSergey Zigachev 		return 1;
962*b843c749SSergey Zigachev 	case 2:
963*b843c749SSergey Zigachev 		return 2;
964*b843c749SSergey Zigachev 	case 4:
965*b843c749SSergey Zigachev 		return 4;
966*b843c749SSergey Zigachev 	case 8:
967*b843c749SSergey Zigachev 		return 8;
968*b843c749SSergey Zigachev 	case 12:
969*b843c749SSergey Zigachev 		return 12;
970*b843c749SSergey Zigachev 	case 16:
971*b843c749SSergey Zigachev 		return 16;
972*b843c749SSergey Zigachev 	}
973*b843c749SSergey Zigachev }
974*b843c749SSergey Zigachev 
amdgpu_encode_pci_lane_width(u32 lanes)975*b843c749SSergey Zigachev u8 amdgpu_encode_pci_lane_width(u32 lanes)
976*b843c749SSergey Zigachev {
977*b843c749SSergey Zigachev 	u8 encoded_lanes[] = { 0, 1, 2, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0, 0, 6 };
978*b843c749SSergey Zigachev 
979*b843c749SSergey Zigachev 	if (lanes > 16)
980*b843c749SSergey Zigachev 		return 0;
981*b843c749SSergey Zigachev 
982*b843c749SSergey Zigachev 	return encoded_lanes[lanes];
983*b843c749SSergey Zigachev }
984*b843c749SSergey Zigachev 
985*b843c749SSergey Zigachev struct amd_vce_state*
amdgpu_get_vce_clock_state(void * handle,u32 idx)986*b843c749SSergey Zigachev amdgpu_get_vce_clock_state(void *handle, u32 idx)
987*b843c749SSergey Zigachev {
988*b843c749SSergey Zigachev 	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
989*b843c749SSergey Zigachev 
990*b843c749SSergey Zigachev 	if (idx < adev->pm.dpm.num_of_vce_states)
991*b843c749SSergey Zigachev 		return &adev->pm.dpm.vce_states[idx];
992*b843c749SSergey Zigachev 
993*b843c749SSergey Zigachev 	return NULL;
994*b843c749SSergey Zigachev }
995