xref: /dflybsd-src/sys/dev/drm/radeon/ni_dpm.c (revision 28637087e34e261812af71c17a10136d03b207b6)
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 
24 #include <drm/drmP.h>
25 #include "radeon.h"
26 #include "radeon_asic.h"
27 #include "nid.h"
28 #include "r600_dpm.h"
29 #include "ni_dpm.h"
30 #include "atom.h"
31 #include <linux/math64.h>
32 #include <linux/seq_file.h>
33 
34 #define MC_CG_ARB_FREQ_F0           0x0a
35 #define MC_CG_ARB_FREQ_F1           0x0b
36 #define MC_CG_ARB_FREQ_F2           0x0c
37 #define MC_CG_ARB_FREQ_F3           0x0d
38 
39 #define SMC_RAM_END 0xC000
40 
41 static const struct ni_cac_weights cac_weights_cayman_xt =
42 {
43 	0x15,
44 	0x2,
45 	0x19,
46 	0x2,
47 	0x8,
48 	0x14,
49 	0x2,
50 	0x16,
51 	0xE,
52 	0x17,
53 	0x13,
54 	0x2B,
55 	0x10,
56 	0x7,
57 	0x5,
58 	0x5,
59 	0x5,
60 	0x2,
61 	0x3,
62 	0x9,
63 	0x10,
64 	0x10,
65 	0x2B,
66 	0xA,
67 	0x9,
68 	0x4,
69 	0xD,
70 	0xD,
71 	0x3E,
72 	0x18,
73 	0x14,
74 	0,
75 	0x3,
76 	0x3,
77 	0x5,
78 	0,
79 	0x2,
80 	0,
81 	0,
82 	0,
83 	0,
84 	0,
85 	0,
86 	0,
87 	0,
88 	0,
89 	0x1CC,
90 	0,
91 	0x164,
92 	1,
93 	1,
94 	1,
95 	1,
96 	12,
97 	12,
98 	12,
99 	0x12,
100 	0x1F,
101 	132,
102 	5,
103 	7,
104 	0,
105 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
106 	{ 0, 0, 0, 0 },
107 	true
108 };
109 
110 static const struct ni_cac_weights cac_weights_cayman_pro =
111 {
112 	0x16,
113 	0x4,
114 	0x10,
115 	0x2,
116 	0xA,
117 	0x16,
118 	0x2,
119 	0x18,
120 	0x10,
121 	0x1A,
122 	0x16,
123 	0x2D,
124 	0x12,
125 	0xA,
126 	0x6,
127 	0x6,
128 	0x6,
129 	0x2,
130 	0x4,
131 	0xB,
132 	0x11,
133 	0x11,
134 	0x2D,
135 	0xC,
136 	0xC,
137 	0x7,
138 	0x10,
139 	0x10,
140 	0x3F,
141 	0x1A,
142 	0x16,
143 	0,
144 	0x7,
145 	0x4,
146 	0x6,
147 	1,
148 	0x2,
149 	0x1,
150 	0,
151 	0,
152 	0,
153 	0,
154 	0,
155 	0,
156 	0x30,
157 	0,
158 	0x1CF,
159 	0,
160 	0x166,
161 	1,
162 	1,
163 	1,
164 	1,
165 	12,
166 	12,
167 	12,
168 	0x15,
169 	0x1F,
170 	132,
171 	6,
172 	6,
173 	0,
174 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
175 	{ 0, 0, 0, 0 },
176 	true
177 };
178 
179 static const struct ni_cac_weights cac_weights_cayman_le =
180 {
181 	0x7,
182 	0xE,
183 	0x1,
184 	0xA,
185 	0x1,
186 	0x3F,
187 	0x2,
188 	0x18,
189 	0x10,
190 	0x1A,
191 	0x1,
192 	0x3F,
193 	0x1,
194 	0xE,
195 	0x6,
196 	0x6,
197 	0x6,
198 	0x2,
199 	0x4,
200 	0x9,
201 	0x1A,
202 	0x1A,
203 	0x2C,
204 	0xA,
205 	0x11,
206 	0x8,
207 	0x19,
208 	0x19,
209 	0x1,
210 	0x1,
211 	0x1A,
212 	0,
213 	0x8,
214 	0x5,
215 	0x8,
216 	0x1,
217 	0x3,
218 	0x1,
219 	0,
220 	0,
221 	0,
222 	0,
223 	0,
224 	0,
225 	0x38,
226 	0x38,
227 	0x239,
228 	0x3,
229 	0x18A,
230 	1,
231 	1,
232 	1,
233 	1,
234 	12,
235 	12,
236 	12,
237 	0x15,
238 	0x22,
239 	132,
240 	6,
241 	6,
242 	0,
243 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
244 	{ 0, 0, 0, 0 },
245 	true
246 };
247 
248 #define NISLANDS_MGCG_SEQUENCE  300
249 
250 static const u32 cayman_cgcg_cgls_default[] =
251 {
252 	0x000008f8, 0x00000010, 0xffffffff,
253 	0x000008fc, 0x00000000, 0xffffffff,
254 	0x000008f8, 0x00000011, 0xffffffff,
255 	0x000008fc, 0x00000000, 0xffffffff,
256 	0x000008f8, 0x00000012, 0xffffffff,
257 	0x000008fc, 0x00000000, 0xffffffff,
258 	0x000008f8, 0x00000013, 0xffffffff,
259 	0x000008fc, 0x00000000, 0xffffffff,
260 	0x000008f8, 0x00000014, 0xffffffff,
261 	0x000008fc, 0x00000000, 0xffffffff,
262 	0x000008f8, 0x00000015, 0xffffffff,
263 	0x000008fc, 0x00000000, 0xffffffff,
264 	0x000008f8, 0x00000016, 0xffffffff,
265 	0x000008fc, 0x00000000, 0xffffffff,
266 	0x000008f8, 0x00000017, 0xffffffff,
267 	0x000008fc, 0x00000000, 0xffffffff,
268 	0x000008f8, 0x00000018, 0xffffffff,
269 	0x000008fc, 0x00000000, 0xffffffff,
270 	0x000008f8, 0x00000019, 0xffffffff,
271 	0x000008fc, 0x00000000, 0xffffffff,
272 	0x000008f8, 0x0000001a, 0xffffffff,
273 	0x000008fc, 0x00000000, 0xffffffff,
274 	0x000008f8, 0x0000001b, 0xffffffff,
275 	0x000008fc, 0x00000000, 0xffffffff,
276 	0x000008f8, 0x00000020, 0xffffffff,
277 	0x000008fc, 0x00000000, 0xffffffff,
278 	0x000008f8, 0x00000021, 0xffffffff,
279 	0x000008fc, 0x00000000, 0xffffffff,
280 	0x000008f8, 0x00000022, 0xffffffff,
281 	0x000008fc, 0x00000000, 0xffffffff,
282 	0x000008f8, 0x00000023, 0xffffffff,
283 	0x000008fc, 0x00000000, 0xffffffff,
284 	0x000008f8, 0x00000024, 0xffffffff,
285 	0x000008fc, 0x00000000, 0xffffffff,
286 	0x000008f8, 0x00000025, 0xffffffff,
287 	0x000008fc, 0x00000000, 0xffffffff,
288 	0x000008f8, 0x00000026, 0xffffffff,
289 	0x000008fc, 0x00000000, 0xffffffff,
290 	0x000008f8, 0x00000027, 0xffffffff,
291 	0x000008fc, 0x00000000, 0xffffffff,
292 	0x000008f8, 0x00000028, 0xffffffff,
293 	0x000008fc, 0x00000000, 0xffffffff,
294 	0x000008f8, 0x00000029, 0xffffffff,
295 	0x000008fc, 0x00000000, 0xffffffff,
296 	0x000008f8, 0x0000002a, 0xffffffff,
297 	0x000008fc, 0x00000000, 0xffffffff,
298 	0x000008f8, 0x0000002b, 0xffffffff,
299 	0x000008fc, 0x00000000, 0xffffffff
300 };
301 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
302 
303 static const u32 cayman_cgcg_cgls_disable[] =
304 {
305 	0x000008f8, 0x00000010, 0xffffffff,
306 	0x000008fc, 0xffffffff, 0xffffffff,
307 	0x000008f8, 0x00000011, 0xffffffff,
308 	0x000008fc, 0xffffffff, 0xffffffff,
309 	0x000008f8, 0x00000012, 0xffffffff,
310 	0x000008fc, 0xffffffff, 0xffffffff,
311 	0x000008f8, 0x00000013, 0xffffffff,
312 	0x000008fc, 0xffffffff, 0xffffffff,
313 	0x000008f8, 0x00000014, 0xffffffff,
314 	0x000008fc, 0xffffffff, 0xffffffff,
315 	0x000008f8, 0x00000015, 0xffffffff,
316 	0x000008fc, 0xffffffff, 0xffffffff,
317 	0x000008f8, 0x00000016, 0xffffffff,
318 	0x000008fc, 0xffffffff, 0xffffffff,
319 	0x000008f8, 0x00000017, 0xffffffff,
320 	0x000008fc, 0xffffffff, 0xffffffff,
321 	0x000008f8, 0x00000018, 0xffffffff,
322 	0x000008fc, 0xffffffff, 0xffffffff,
323 	0x000008f8, 0x00000019, 0xffffffff,
324 	0x000008fc, 0xffffffff, 0xffffffff,
325 	0x000008f8, 0x0000001a, 0xffffffff,
326 	0x000008fc, 0xffffffff, 0xffffffff,
327 	0x000008f8, 0x0000001b, 0xffffffff,
328 	0x000008fc, 0xffffffff, 0xffffffff,
329 	0x000008f8, 0x00000020, 0xffffffff,
330 	0x000008fc, 0x00000000, 0xffffffff,
331 	0x000008f8, 0x00000021, 0xffffffff,
332 	0x000008fc, 0x00000000, 0xffffffff,
333 	0x000008f8, 0x00000022, 0xffffffff,
334 	0x000008fc, 0x00000000, 0xffffffff,
335 	0x000008f8, 0x00000023, 0xffffffff,
336 	0x000008fc, 0x00000000, 0xffffffff,
337 	0x000008f8, 0x00000024, 0xffffffff,
338 	0x000008fc, 0x00000000, 0xffffffff,
339 	0x000008f8, 0x00000025, 0xffffffff,
340 	0x000008fc, 0x00000000, 0xffffffff,
341 	0x000008f8, 0x00000026, 0xffffffff,
342 	0x000008fc, 0x00000000, 0xffffffff,
343 	0x000008f8, 0x00000027, 0xffffffff,
344 	0x000008fc, 0x00000000, 0xffffffff,
345 	0x000008f8, 0x00000028, 0xffffffff,
346 	0x000008fc, 0x00000000, 0xffffffff,
347 	0x000008f8, 0x00000029, 0xffffffff,
348 	0x000008fc, 0x00000000, 0xffffffff,
349 	0x000008f8, 0x0000002a, 0xffffffff,
350 	0x000008fc, 0x00000000, 0xffffffff,
351 	0x000008f8, 0x0000002b, 0xffffffff,
352 	0x000008fc, 0x00000000, 0xffffffff,
353 	0x00000644, 0x000f7902, 0x001f4180,
354 	0x00000644, 0x000f3802, 0x001f4180
355 };
356 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
357 
358 static const u32 cayman_cgcg_cgls_enable[] =
359 {
360 	0x00000644, 0x000f7882, 0x001f4080,
361 	0x000008f8, 0x00000010, 0xffffffff,
362 	0x000008fc, 0x00000000, 0xffffffff,
363 	0x000008f8, 0x00000011, 0xffffffff,
364 	0x000008fc, 0x00000000, 0xffffffff,
365 	0x000008f8, 0x00000012, 0xffffffff,
366 	0x000008fc, 0x00000000, 0xffffffff,
367 	0x000008f8, 0x00000013, 0xffffffff,
368 	0x000008fc, 0x00000000, 0xffffffff,
369 	0x000008f8, 0x00000014, 0xffffffff,
370 	0x000008fc, 0x00000000, 0xffffffff,
371 	0x000008f8, 0x00000015, 0xffffffff,
372 	0x000008fc, 0x00000000, 0xffffffff,
373 	0x000008f8, 0x00000016, 0xffffffff,
374 	0x000008fc, 0x00000000, 0xffffffff,
375 	0x000008f8, 0x00000017, 0xffffffff,
376 	0x000008fc, 0x00000000, 0xffffffff,
377 	0x000008f8, 0x00000018, 0xffffffff,
378 	0x000008fc, 0x00000000, 0xffffffff,
379 	0x000008f8, 0x00000019, 0xffffffff,
380 	0x000008fc, 0x00000000, 0xffffffff,
381 	0x000008f8, 0x0000001a, 0xffffffff,
382 	0x000008fc, 0x00000000, 0xffffffff,
383 	0x000008f8, 0x0000001b, 0xffffffff,
384 	0x000008fc, 0x00000000, 0xffffffff,
385 	0x000008f8, 0x00000020, 0xffffffff,
386 	0x000008fc, 0xffffffff, 0xffffffff,
387 	0x000008f8, 0x00000021, 0xffffffff,
388 	0x000008fc, 0xffffffff, 0xffffffff,
389 	0x000008f8, 0x00000022, 0xffffffff,
390 	0x000008fc, 0xffffffff, 0xffffffff,
391 	0x000008f8, 0x00000023, 0xffffffff,
392 	0x000008fc, 0xffffffff, 0xffffffff,
393 	0x000008f8, 0x00000024, 0xffffffff,
394 	0x000008fc, 0xffffffff, 0xffffffff,
395 	0x000008f8, 0x00000025, 0xffffffff,
396 	0x000008fc, 0xffffffff, 0xffffffff,
397 	0x000008f8, 0x00000026, 0xffffffff,
398 	0x000008fc, 0xffffffff, 0xffffffff,
399 	0x000008f8, 0x00000027, 0xffffffff,
400 	0x000008fc, 0xffffffff, 0xffffffff,
401 	0x000008f8, 0x00000028, 0xffffffff,
402 	0x000008fc, 0xffffffff, 0xffffffff,
403 	0x000008f8, 0x00000029, 0xffffffff,
404 	0x000008fc, 0xffffffff, 0xffffffff,
405 	0x000008f8, 0x0000002a, 0xffffffff,
406 	0x000008fc, 0xffffffff, 0xffffffff,
407 	0x000008f8, 0x0000002b, 0xffffffff,
408 	0x000008fc, 0xffffffff, 0xffffffff
409 };
410 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
411 
412 static const u32 cayman_mgcg_default[] =
413 {
414 	0x0000802c, 0xc0000000, 0xffffffff,
415 	0x00003fc4, 0xc0000000, 0xffffffff,
416 	0x00005448, 0x00000100, 0xffffffff,
417 	0x000055e4, 0x00000100, 0xffffffff,
418 	0x0000160c, 0x00000100, 0xffffffff,
419 	0x00008984, 0x06000100, 0xffffffff,
420 	0x0000c164, 0x00000100, 0xffffffff,
421 	0x00008a18, 0x00000100, 0xffffffff,
422 	0x0000897c, 0x06000100, 0xffffffff,
423 	0x00008b28, 0x00000100, 0xffffffff,
424 	0x00009144, 0x00800200, 0xffffffff,
425 	0x00009a60, 0x00000100, 0xffffffff,
426 	0x00009868, 0x00000100, 0xffffffff,
427 	0x00008d58, 0x00000100, 0xffffffff,
428 	0x00009510, 0x00000100, 0xffffffff,
429 	0x0000949c, 0x00000100, 0xffffffff,
430 	0x00009654, 0x00000100, 0xffffffff,
431 	0x00009030, 0x00000100, 0xffffffff,
432 	0x00009034, 0x00000100, 0xffffffff,
433 	0x00009038, 0x00000100, 0xffffffff,
434 	0x0000903c, 0x00000100, 0xffffffff,
435 	0x00009040, 0x00000100, 0xffffffff,
436 	0x0000a200, 0x00000100, 0xffffffff,
437 	0x0000a204, 0x00000100, 0xffffffff,
438 	0x0000a208, 0x00000100, 0xffffffff,
439 	0x0000a20c, 0x00000100, 0xffffffff,
440 	0x00009744, 0x00000100, 0xffffffff,
441 	0x00003f80, 0x00000100, 0xffffffff,
442 	0x0000a210, 0x00000100, 0xffffffff,
443 	0x0000a214, 0x00000100, 0xffffffff,
444 	0x000004d8, 0x00000100, 0xffffffff,
445 	0x00009664, 0x00000100, 0xffffffff,
446 	0x00009698, 0x00000100, 0xffffffff,
447 	0x000004d4, 0x00000200, 0xffffffff,
448 	0x000004d0, 0x00000000, 0xffffffff,
449 	0x000030cc, 0x00000104, 0xffffffff,
450 	0x0000d0c0, 0x00000100, 0xffffffff,
451 	0x0000d8c0, 0x00000100, 0xffffffff,
452 	0x0000802c, 0x40000000, 0xffffffff,
453 	0x00003fc4, 0x40000000, 0xffffffff,
454 	0x0000915c, 0x00010000, 0xffffffff,
455 	0x00009160, 0x00030002, 0xffffffff,
456 	0x00009164, 0x00050004, 0xffffffff,
457 	0x00009168, 0x00070006, 0xffffffff,
458 	0x00009178, 0x00070000, 0xffffffff,
459 	0x0000917c, 0x00030002, 0xffffffff,
460 	0x00009180, 0x00050004, 0xffffffff,
461 	0x0000918c, 0x00010006, 0xffffffff,
462 	0x00009190, 0x00090008, 0xffffffff,
463 	0x00009194, 0x00070000, 0xffffffff,
464 	0x00009198, 0x00030002, 0xffffffff,
465 	0x0000919c, 0x00050004, 0xffffffff,
466 	0x000091a8, 0x00010006, 0xffffffff,
467 	0x000091ac, 0x00090008, 0xffffffff,
468 	0x000091b0, 0x00070000, 0xffffffff,
469 	0x000091b4, 0x00030002, 0xffffffff,
470 	0x000091b8, 0x00050004, 0xffffffff,
471 	0x000091c4, 0x00010006, 0xffffffff,
472 	0x000091c8, 0x00090008, 0xffffffff,
473 	0x000091cc, 0x00070000, 0xffffffff,
474 	0x000091d0, 0x00030002, 0xffffffff,
475 	0x000091d4, 0x00050004, 0xffffffff,
476 	0x000091e0, 0x00010006, 0xffffffff,
477 	0x000091e4, 0x00090008, 0xffffffff,
478 	0x000091e8, 0x00000000, 0xffffffff,
479 	0x000091ec, 0x00070000, 0xffffffff,
480 	0x000091f0, 0x00030002, 0xffffffff,
481 	0x000091f4, 0x00050004, 0xffffffff,
482 	0x00009200, 0x00010006, 0xffffffff,
483 	0x00009204, 0x00090008, 0xffffffff,
484 	0x00009208, 0x00070000, 0xffffffff,
485 	0x0000920c, 0x00030002, 0xffffffff,
486 	0x00009210, 0x00050004, 0xffffffff,
487 	0x0000921c, 0x00010006, 0xffffffff,
488 	0x00009220, 0x00090008, 0xffffffff,
489 	0x00009224, 0x00070000, 0xffffffff,
490 	0x00009228, 0x00030002, 0xffffffff,
491 	0x0000922c, 0x00050004, 0xffffffff,
492 	0x00009238, 0x00010006, 0xffffffff,
493 	0x0000923c, 0x00090008, 0xffffffff,
494 	0x00009240, 0x00070000, 0xffffffff,
495 	0x00009244, 0x00030002, 0xffffffff,
496 	0x00009248, 0x00050004, 0xffffffff,
497 	0x00009254, 0x00010006, 0xffffffff,
498 	0x00009258, 0x00090008, 0xffffffff,
499 	0x0000925c, 0x00070000, 0xffffffff,
500 	0x00009260, 0x00030002, 0xffffffff,
501 	0x00009264, 0x00050004, 0xffffffff,
502 	0x00009270, 0x00010006, 0xffffffff,
503 	0x00009274, 0x00090008, 0xffffffff,
504 	0x00009278, 0x00070000, 0xffffffff,
505 	0x0000927c, 0x00030002, 0xffffffff,
506 	0x00009280, 0x00050004, 0xffffffff,
507 	0x0000928c, 0x00010006, 0xffffffff,
508 	0x00009290, 0x00090008, 0xffffffff,
509 	0x000092a8, 0x00070000, 0xffffffff,
510 	0x000092ac, 0x00030002, 0xffffffff,
511 	0x000092b0, 0x00050004, 0xffffffff,
512 	0x000092bc, 0x00010006, 0xffffffff,
513 	0x000092c0, 0x00090008, 0xffffffff,
514 	0x000092c4, 0x00070000, 0xffffffff,
515 	0x000092c8, 0x00030002, 0xffffffff,
516 	0x000092cc, 0x00050004, 0xffffffff,
517 	0x000092d8, 0x00010006, 0xffffffff,
518 	0x000092dc, 0x00090008, 0xffffffff,
519 	0x00009294, 0x00000000, 0xffffffff,
520 	0x0000802c, 0x40010000, 0xffffffff,
521 	0x00003fc4, 0x40010000, 0xffffffff,
522 	0x0000915c, 0x00010000, 0xffffffff,
523 	0x00009160, 0x00030002, 0xffffffff,
524 	0x00009164, 0x00050004, 0xffffffff,
525 	0x00009168, 0x00070006, 0xffffffff,
526 	0x00009178, 0x00070000, 0xffffffff,
527 	0x0000917c, 0x00030002, 0xffffffff,
528 	0x00009180, 0x00050004, 0xffffffff,
529 	0x0000918c, 0x00010006, 0xffffffff,
530 	0x00009190, 0x00090008, 0xffffffff,
531 	0x00009194, 0x00070000, 0xffffffff,
532 	0x00009198, 0x00030002, 0xffffffff,
533 	0x0000919c, 0x00050004, 0xffffffff,
534 	0x000091a8, 0x00010006, 0xffffffff,
535 	0x000091ac, 0x00090008, 0xffffffff,
536 	0x000091b0, 0x00070000, 0xffffffff,
537 	0x000091b4, 0x00030002, 0xffffffff,
538 	0x000091b8, 0x00050004, 0xffffffff,
539 	0x000091c4, 0x00010006, 0xffffffff,
540 	0x000091c8, 0x00090008, 0xffffffff,
541 	0x000091cc, 0x00070000, 0xffffffff,
542 	0x000091d0, 0x00030002, 0xffffffff,
543 	0x000091d4, 0x00050004, 0xffffffff,
544 	0x000091e0, 0x00010006, 0xffffffff,
545 	0x000091e4, 0x00090008, 0xffffffff,
546 	0x000091e8, 0x00000000, 0xffffffff,
547 	0x000091ec, 0x00070000, 0xffffffff,
548 	0x000091f0, 0x00030002, 0xffffffff,
549 	0x000091f4, 0x00050004, 0xffffffff,
550 	0x00009200, 0x00010006, 0xffffffff,
551 	0x00009204, 0x00090008, 0xffffffff,
552 	0x00009208, 0x00070000, 0xffffffff,
553 	0x0000920c, 0x00030002, 0xffffffff,
554 	0x00009210, 0x00050004, 0xffffffff,
555 	0x0000921c, 0x00010006, 0xffffffff,
556 	0x00009220, 0x00090008, 0xffffffff,
557 	0x00009224, 0x00070000, 0xffffffff,
558 	0x00009228, 0x00030002, 0xffffffff,
559 	0x0000922c, 0x00050004, 0xffffffff,
560 	0x00009238, 0x00010006, 0xffffffff,
561 	0x0000923c, 0x00090008, 0xffffffff,
562 	0x00009240, 0x00070000, 0xffffffff,
563 	0x00009244, 0x00030002, 0xffffffff,
564 	0x00009248, 0x00050004, 0xffffffff,
565 	0x00009254, 0x00010006, 0xffffffff,
566 	0x00009258, 0x00090008, 0xffffffff,
567 	0x0000925c, 0x00070000, 0xffffffff,
568 	0x00009260, 0x00030002, 0xffffffff,
569 	0x00009264, 0x00050004, 0xffffffff,
570 	0x00009270, 0x00010006, 0xffffffff,
571 	0x00009274, 0x00090008, 0xffffffff,
572 	0x00009278, 0x00070000, 0xffffffff,
573 	0x0000927c, 0x00030002, 0xffffffff,
574 	0x00009280, 0x00050004, 0xffffffff,
575 	0x0000928c, 0x00010006, 0xffffffff,
576 	0x00009290, 0x00090008, 0xffffffff,
577 	0x000092a8, 0x00070000, 0xffffffff,
578 	0x000092ac, 0x00030002, 0xffffffff,
579 	0x000092b0, 0x00050004, 0xffffffff,
580 	0x000092bc, 0x00010006, 0xffffffff,
581 	0x000092c0, 0x00090008, 0xffffffff,
582 	0x000092c4, 0x00070000, 0xffffffff,
583 	0x000092c8, 0x00030002, 0xffffffff,
584 	0x000092cc, 0x00050004, 0xffffffff,
585 	0x000092d8, 0x00010006, 0xffffffff,
586 	0x000092dc, 0x00090008, 0xffffffff,
587 	0x00009294, 0x00000000, 0xffffffff,
588 	0x0000802c, 0xc0000000, 0xffffffff,
589 	0x00003fc4, 0xc0000000, 0xffffffff,
590 	0x000008f8, 0x00000010, 0xffffffff,
591 	0x000008fc, 0x00000000, 0xffffffff,
592 	0x000008f8, 0x00000011, 0xffffffff,
593 	0x000008fc, 0x00000000, 0xffffffff,
594 	0x000008f8, 0x00000012, 0xffffffff,
595 	0x000008fc, 0x00000000, 0xffffffff,
596 	0x000008f8, 0x00000013, 0xffffffff,
597 	0x000008fc, 0x00000000, 0xffffffff,
598 	0x000008f8, 0x00000014, 0xffffffff,
599 	0x000008fc, 0x00000000, 0xffffffff,
600 	0x000008f8, 0x00000015, 0xffffffff,
601 	0x000008fc, 0x00000000, 0xffffffff,
602 	0x000008f8, 0x00000016, 0xffffffff,
603 	0x000008fc, 0x00000000, 0xffffffff,
604 	0x000008f8, 0x00000017, 0xffffffff,
605 	0x000008fc, 0x00000000, 0xffffffff,
606 	0x000008f8, 0x00000018, 0xffffffff,
607 	0x000008fc, 0x00000000, 0xffffffff,
608 	0x000008f8, 0x00000019, 0xffffffff,
609 	0x000008fc, 0x00000000, 0xffffffff,
610 	0x000008f8, 0x0000001a, 0xffffffff,
611 	0x000008fc, 0x00000000, 0xffffffff,
612 	0x000008f8, 0x0000001b, 0xffffffff,
613 	0x000008fc, 0x00000000, 0xffffffff
614 };
615 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
616 
617 static const u32 cayman_mgcg_disable[] =
618 {
619 	0x0000802c, 0xc0000000, 0xffffffff,
620 	0x000008f8, 0x00000000, 0xffffffff,
621 	0x000008fc, 0xffffffff, 0xffffffff,
622 	0x000008f8, 0x00000001, 0xffffffff,
623 	0x000008fc, 0xffffffff, 0xffffffff,
624 	0x000008f8, 0x00000002, 0xffffffff,
625 	0x000008fc, 0xffffffff, 0xffffffff,
626 	0x000008f8, 0x00000003, 0xffffffff,
627 	0x000008fc, 0xffffffff, 0xffffffff,
628 	0x00009150, 0x00600000, 0xffffffff
629 };
630 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
631 
632 static const u32 cayman_mgcg_enable[] =
633 {
634 	0x0000802c, 0xc0000000, 0xffffffff,
635 	0x000008f8, 0x00000000, 0xffffffff,
636 	0x000008fc, 0x00000000, 0xffffffff,
637 	0x000008f8, 0x00000001, 0xffffffff,
638 	0x000008fc, 0x00000000, 0xffffffff,
639 	0x000008f8, 0x00000002, 0xffffffff,
640 	0x000008fc, 0x00600000, 0xffffffff,
641 	0x000008f8, 0x00000003, 0xffffffff,
642 	0x000008fc, 0x00000000, 0xffffffff,
643 	0x00009150, 0x96944200, 0xffffffff
644 };
645 
646 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
647 
648 #define NISLANDS_SYSLS_SEQUENCE  100
649 
650 static const u32 cayman_sysls_default[] =
651 {
652 	/* Register,   Value,     Mask bits */
653 	0x000055e8, 0x00000000, 0xffffffff,
654 	0x0000d0bc, 0x00000000, 0xffffffff,
655 	0x0000d8bc, 0x00000000, 0xffffffff,
656 	0x000015c0, 0x000c1401, 0xffffffff,
657 	0x0000264c, 0x000c0400, 0xffffffff,
658 	0x00002648, 0x000c0400, 0xffffffff,
659 	0x00002650, 0x000c0400, 0xffffffff,
660 	0x000020b8, 0x000c0400, 0xffffffff,
661 	0x000020bc, 0x000c0400, 0xffffffff,
662 	0x000020c0, 0x000c0c80, 0xffffffff,
663 	0x0000f4a0, 0x000000c0, 0xffffffff,
664 	0x0000f4a4, 0x00680fff, 0xffffffff,
665 	0x00002f50, 0x00000404, 0xffffffff,
666 	0x000004c8, 0x00000001, 0xffffffff,
667 	0x000064ec, 0x00000000, 0xffffffff,
668 	0x00000c7c, 0x00000000, 0xffffffff,
669 	0x00008dfc, 0x00000000, 0xffffffff
670 };
671 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
672 
673 static const u32 cayman_sysls_disable[] =
674 {
675 	/* Register,   Value,     Mask bits */
676 	0x0000d0c0, 0x00000000, 0xffffffff,
677 	0x0000d8c0, 0x00000000, 0xffffffff,
678 	0x000055e8, 0x00000000, 0xffffffff,
679 	0x0000d0bc, 0x00000000, 0xffffffff,
680 	0x0000d8bc, 0x00000000, 0xffffffff,
681 	0x000015c0, 0x00041401, 0xffffffff,
682 	0x0000264c, 0x00040400, 0xffffffff,
683 	0x00002648, 0x00040400, 0xffffffff,
684 	0x00002650, 0x00040400, 0xffffffff,
685 	0x000020b8, 0x00040400, 0xffffffff,
686 	0x000020bc, 0x00040400, 0xffffffff,
687 	0x000020c0, 0x00040c80, 0xffffffff,
688 	0x0000f4a0, 0x000000c0, 0xffffffff,
689 	0x0000f4a4, 0x00680000, 0xffffffff,
690 	0x00002f50, 0x00000404, 0xffffffff,
691 	0x000004c8, 0x00000001, 0xffffffff,
692 	0x000064ec, 0x00007ffd, 0xffffffff,
693 	0x00000c7c, 0x0000ff00, 0xffffffff,
694 	0x00008dfc, 0x0000007f, 0xffffffff
695 };
696 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
697 
698 static const u32 cayman_sysls_enable[] =
699 {
700 	/* Register,   Value,     Mask bits */
701 	0x000055e8, 0x00000001, 0xffffffff,
702 	0x0000d0bc, 0x00000100, 0xffffffff,
703 	0x0000d8bc, 0x00000100, 0xffffffff,
704 	0x000015c0, 0x000c1401, 0xffffffff,
705 	0x0000264c, 0x000c0400, 0xffffffff,
706 	0x00002648, 0x000c0400, 0xffffffff,
707 	0x00002650, 0x000c0400, 0xffffffff,
708 	0x000020b8, 0x000c0400, 0xffffffff,
709 	0x000020bc, 0x000c0400, 0xffffffff,
710 	0x000020c0, 0x000c0c80, 0xffffffff,
711 	0x0000f4a0, 0x000000c0, 0xffffffff,
712 	0x0000f4a4, 0x00680fff, 0xffffffff,
713 	0x00002f50, 0x00000903, 0xffffffff,
714 	0x000004c8, 0x00000000, 0xffffffff,
715 	0x000064ec, 0x00000000, 0xffffffff,
716 	0x00000c7c, 0x00000000, 0xffffffff,
717 	0x00008dfc, 0x00000000, 0xffffffff
718 };
719 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
720 
721 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
722 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
724 struct ni_ps *ni_get_ps(struct radeon_ps *rps);
725 void ni_dpm_reset_asic(struct radeon_device *rdev);
726 
727 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
728 {
729         struct ni_power_info *pi = rdev->pm.dpm.priv;
730 
731         return pi;
732 }
733 
734 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
735 {
736 	struct ni_ps *ps = rps->ps_priv;
737 
738 	return ps;
739 }
740 
741 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
742 						     u16 v, s32 t,
743 						     u32 ileakage,
744 						     u32 *leakage)
745 {
746 	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
747 
748 	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
749 	vddc = div64_s64(drm_int2fixp(v), 1000);
750 	temperature = div64_s64(drm_int2fixp(t), 1000);
751 
752 	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
753 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
754 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
755 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
756 
757 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
758 
759 	*leakage = drm_fixp2int(leakage_w * 1000);
760 }
761 
762 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
763 					     const struct ni_leakage_coeffients *coeff,
764 					     u16 v,
765 					     s32 t,
766 					     u32 i_leakage,
767 					     u32 *leakage)
768 {
769 	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
770 }
771 
772 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
773 {
774 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
775 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
776 	/* we never hit the non-gddr5 limit so disable it */
777 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
778 
779 	if (vblank_time < switch_limit)
780 		return true;
781 	else
782 		return false;
783 
784 }
785 
786 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
787 					struct radeon_ps *rps)
788 {
789 	struct ni_ps *ps = ni_get_ps(rps);
790 	struct radeon_clock_and_voltage_limits *max_limits;
791 	bool disable_mclk_switching;
792 	u32 mclk, sclk;
793 	u16 vddc, vddci;
794 	u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc;
795 	int i;
796 
797 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
798 	    ni_dpm_vblank_too_short(rdev))
799 		disable_mclk_switching = true;
800 	else
801 		disable_mclk_switching = false;
802 
803 	if (rdev->pm.dpm.ac_power)
804 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
805 	else
806 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
807 
808 	if (rdev->pm.dpm.ac_power == false) {
809 		for (i = 0; i < ps->performance_level_count; i++) {
810 			if (ps->performance_levels[i].mclk > max_limits->mclk)
811 				ps->performance_levels[i].mclk = max_limits->mclk;
812 			if (ps->performance_levels[i].sclk > max_limits->sclk)
813 				ps->performance_levels[i].sclk = max_limits->sclk;
814 			if (ps->performance_levels[i].vddc > max_limits->vddc)
815 				ps->performance_levels[i].vddc = max_limits->vddc;
816 			if (ps->performance_levels[i].vddci > max_limits->vddci)
817 				ps->performance_levels[i].vddci = max_limits->vddci;
818 		}
819 	}
820 
821 	/* limit clocks to max supported clocks based on voltage dependency tables */
822 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
823 							&max_sclk_vddc);
824 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
825 							&max_mclk_vddci);
826 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
827 							&max_mclk_vddc);
828 
829 	for (i = 0; i < ps->performance_level_count; i++) {
830 		if (max_sclk_vddc) {
831 			if (ps->performance_levels[i].sclk > max_sclk_vddc)
832 				ps->performance_levels[i].sclk = max_sclk_vddc;
833 		}
834 		if (max_mclk_vddci) {
835 			if (ps->performance_levels[i].mclk > max_mclk_vddci)
836 				ps->performance_levels[i].mclk = max_mclk_vddci;
837 		}
838 		if (max_mclk_vddc) {
839 			if (ps->performance_levels[i].mclk > max_mclk_vddc)
840 				ps->performance_levels[i].mclk = max_mclk_vddc;
841 		}
842 	}
843 
844 	/* XXX validate the min clocks required for display */
845 
846 	if (disable_mclk_switching) {
847 		mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
848 		sclk = ps->performance_levels[0].sclk;
849 		vddc = ps->performance_levels[0].vddc;
850 		vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
851 	} else {
852 		sclk = ps->performance_levels[0].sclk;
853 		mclk = ps->performance_levels[0].mclk;
854 		vddc = ps->performance_levels[0].vddc;
855 		vddci = ps->performance_levels[0].vddci;
856 	}
857 
858 	/* adjusted low state */
859 	ps->performance_levels[0].sclk = sclk;
860 	ps->performance_levels[0].mclk = mclk;
861 	ps->performance_levels[0].vddc = vddc;
862 	ps->performance_levels[0].vddci = vddci;
863 
864 	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
865 				  &ps->performance_levels[0].sclk,
866 				  &ps->performance_levels[0].mclk);
867 
868 	for (i = 1; i < ps->performance_level_count; i++) {
869 		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
870 			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
871 		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
872 			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
873 	}
874 
875 	if (disable_mclk_switching) {
876 		mclk = ps->performance_levels[0].mclk;
877 		for (i = 1; i < ps->performance_level_count; i++) {
878 			if (mclk < ps->performance_levels[i].mclk)
879 				mclk = ps->performance_levels[i].mclk;
880 		}
881 		for (i = 0; i < ps->performance_level_count; i++) {
882 			ps->performance_levels[i].mclk = mclk;
883 			ps->performance_levels[i].vddci = vddci;
884 		}
885 	} else {
886 		for (i = 1; i < ps->performance_level_count; i++) {
887 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
888 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
889 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
890 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
891 		}
892 	}
893 
894 	for (i = 1; i < ps->performance_level_count; i++)
895 		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
896 					  &ps->performance_levels[i].sclk,
897 					  &ps->performance_levels[i].mclk);
898 
899 	for (i = 0; i < ps->performance_level_count; i++)
900 		btc_adjust_clock_combinations(rdev, max_limits,
901 					      &ps->performance_levels[i]);
902 
903 	for (i = 0; i < ps->performance_level_count; i++) {
904 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
905 						   ps->performance_levels[i].sclk,
906 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
907 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
908 						   ps->performance_levels[i].mclk,
909 						   max_limits->vddci, &ps->performance_levels[i].vddci);
910 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
911 						   ps->performance_levels[i].mclk,
912 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
913 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
914 						   rdev->clock.current_dispclk,
915 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
916 	}
917 
918 	for (i = 0; i < ps->performance_level_count; i++) {
919 		btc_apply_voltage_delta_rules(rdev,
920 					      max_limits->vddc, max_limits->vddci,
921 					      &ps->performance_levels[i].vddc,
922 					      &ps->performance_levels[i].vddci);
923 	}
924 
925 	ps->dc_compatible = true;
926 	for (i = 0; i < ps->performance_level_count; i++) {
927 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
928 			ps->dc_compatible = false;
929 
930 		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
931 			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
932 	}
933 }
934 
935 static void ni_cg_clockgating_default(struct radeon_device *rdev)
936 {
937 	u32 count;
938 	const u32 *ps = NULL;
939 
940 	ps = (const u32 *)&cayman_cgcg_cgls_default;
941 	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
942 
943 	btc_program_mgcg_hw_sequence(rdev, ps, count);
944 }
945 
946 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
947 				      bool enable)
948 {
949 	u32 count;
950 	const u32 *ps = NULL;
951 
952 	if (enable) {
953 		ps = (const u32 *)&cayman_cgcg_cgls_enable;
954 		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
955 	} else {
956 		ps = (const u32 *)&cayman_cgcg_cgls_disable;
957 		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
958 	}
959 
960 	btc_program_mgcg_hw_sequence(rdev, ps, count);
961 }
962 
963 static void ni_mg_clockgating_default(struct radeon_device *rdev)
964 {
965 	u32 count;
966 	const u32 *ps = NULL;
967 
968 	ps = (const u32 *)&cayman_mgcg_default;
969 	count = CAYMAN_MGCG_DEFAULT_LENGTH;
970 
971 	btc_program_mgcg_hw_sequence(rdev, ps, count);
972 }
973 
974 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
975 				     bool enable)
976 {
977 	u32 count;
978 	const u32 *ps = NULL;
979 
980 	if (enable) {
981 		ps = (const u32 *)&cayman_mgcg_enable;
982 		count = CAYMAN_MGCG_ENABLE_LENGTH;
983 	} else {
984 		ps = (const u32 *)&cayman_mgcg_disable;
985 		count = CAYMAN_MGCG_DISABLE_LENGTH;
986 	}
987 
988 	btc_program_mgcg_hw_sequence(rdev, ps, count);
989 }
990 
991 static void ni_ls_clockgating_default(struct radeon_device *rdev)
992 {
993 	u32 count;
994 	const u32 *ps = NULL;
995 
996 	ps = (const u32 *)&cayman_sysls_default;
997 	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
998 
999 	btc_program_mgcg_hw_sequence(rdev, ps, count);
1000 }
1001 
1002 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
1003 				     bool enable)
1004 {
1005 	u32 count;
1006 	const u32 *ps = NULL;
1007 
1008 	if (enable) {
1009 		ps = (const u32 *)&cayman_sysls_enable;
1010 		count = CAYMAN_SYSLS_ENABLE_LENGTH;
1011 	} else {
1012 		ps = (const u32 *)&cayman_sysls_disable;
1013 		count = CAYMAN_SYSLS_DISABLE_LENGTH;
1014 	}
1015 
1016 	btc_program_mgcg_hw_sequence(rdev, ps, count);
1017 
1018 }
1019 
1020 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
1021 							     struct radeon_clock_voltage_dependency_table *table)
1022 {
1023 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1024 	u32 i;
1025 
1026 	if (table) {
1027 		for (i = 0; i < table->count; i++) {
1028 			if (0xff01 == table->entries[i].v) {
1029 				if (pi->max_vddc == 0)
1030 					return -EINVAL;
1031 				table->entries[i].v = pi->max_vddc;
1032 			}
1033 		}
1034 	}
1035 	return 0;
1036 }
1037 
1038 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1039 {
1040 	int ret = 0;
1041 
1042 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1043 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1044 
1045 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1046 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1047 	return ret;
1048 }
1049 
1050 static void ni_stop_dpm(struct radeon_device *rdev)
1051 {
1052 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1053 }
1054 
1055 #if 0
1056 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1057 					bool ac_power)
1058 {
1059 	if (ac_power)
1060 		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1061 			0 : -EINVAL;
1062 
1063 	return 0;
1064 }
1065 #endif
1066 
1067 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1068 						      PPSMC_Msg msg, u32 parameter)
1069 {
1070 	WREG32(SMC_SCRATCH0, parameter);
1071 	return rv770_send_msg_to_smc(rdev, msg);
1072 }
1073 
1074 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1075 {
1076 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1077 		return -EINVAL;
1078 
1079 	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1080 		0 : -EINVAL;
1081 }
1082 
1083 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1084 				   enum radeon_dpm_forced_level level)
1085 {
1086 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1087 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1088 			return -EINVAL;
1089 
1090 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1091 			return -EINVAL;
1092 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1093 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1094 			return -EINVAL;
1095 
1096 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1097 			return -EINVAL;
1098 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1099 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1100 			return -EINVAL;
1101 
1102 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1103 			return -EINVAL;
1104 	}
1105 
1106 	rdev->pm.dpm.forced_level = level;
1107 
1108 	return 0;
1109 }
1110 
1111 static void ni_stop_smc(struct radeon_device *rdev)
1112 {
1113 	u32 tmp;
1114 	int i;
1115 
1116 	for (i = 0; i < rdev->usec_timeout; i++) {
1117 		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1118 		if (tmp != 1)
1119 			break;
1120 		udelay(1);
1121 	}
1122 
1123 	udelay(100);
1124 
1125 	r7xx_stop_smc(rdev);
1126 }
1127 
1128 static int ni_process_firmware_header(struct radeon_device *rdev)
1129 {
1130         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1131         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1132         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1133 	u32 tmp;
1134 	int ret;
1135 
1136 	ret = rv770_read_smc_sram_dword(rdev,
1137 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1138 					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1139 					&tmp, pi->sram_end);
1140 
1141 	if (ret)
1142 		return ret;
1143 
1144 	pi->state_table_start = (u16)tmp;
1145 
1146 	ret = rv770_read_smc_sram_dword(rdev,
1147 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1148 					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1149 					&tmp, pi->sram_end);
1150 
1151 	if (ret)
1152 		return ret;
1153 
1154 	pi->soft_regs_start = (u16)tmp;
1155 
1156 	ret = rv770_read_smc_sram_dword(rdev,
1157 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1158 					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1159 					&tmp, pi->sram_end);
1160 
1161 	if (ret)
1162 		return ret;
1163 
1164 	eg_pi->mc_reg_table_start = (u16)tmp;
1165 
1166 	ret = rv770_read_smc_sram_dword(rdev,
1167 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1168 					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1169 					&tmp, pi->sram_end);
1170 
1171 	if (ret)
1172 		return ret;
1173 
1174 	ni_pi->fan_table_start = (u16)tmp;
1175 
1176 	ret = rv770_read_smc_sram_dword(rdev,
1177 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1178 					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1179 					&tmp, pi->sram_end);
1180 
1181 	if (ret)
1182 		return ret;
1183 
1184 	ni_pi->arb_table_start = (u16)tmp;
1185 
1186 	ret = rv770_read_smc_sram_dword(rdev,
1187 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1188 					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1189 					&tmp, pi->sram_end);
1190 
1191 	if (ret)
1192 		return ret;
1193 
1194 	ni_pi->cac_table_start = (u16)tmp;
1195 
1196 	ret = rv770_read_smc_sram_dword(rdev,
1197 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1198 					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1199 					&tmp, pi->sram_end);
1200 
1201 	if (ret)
1202 		return ret;
1203 
1204 	ni_pi->spll_table_start = (u16)tmp;
1205 
1206 
1207 	return ret;
1208 }
1209 
1210 static void ni_read_clock_registers(struct radeon_device *rdev)
1211 {
1212 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1213 
1214 	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1215 	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1216 	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1217 	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1218 	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1219 	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1220 	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1221 	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1222 	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1223 	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1224 	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1225 	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1226 	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1227 	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1228 }
1229 
1230 #if 0
1231 static int ni_enter_ulp_state(struct radeon_device *rdev)
1232 {
1233 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1234 
1235 	if (pi->gfx_clock_gating) {
1236                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1237 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1238                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1239 		RREG32(GB_ADDR_CONFIG);
1240         }
1241 
1242 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1243                  ~HOST_SMC_MSG_MASK);
1244 
1245 	udelay(25000);
1246 
1247 	return 0;
1248 }
1249 #endif
1250 
1251 static void ni_program_response_times(struct radeon_device *rdev)
1252 {
1253 	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1254 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1255 	u32 reference_clock;
1256 
1257 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1258 
1259 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1260 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1261 
1262 	if (voltage_response_time == 0)
1263 		voltage_response_time = 1000;
1264 
1265 	if (backbias_response_time == 0)
1266 		backbias_response_time = 1000;
1267 
1268 	acpi_delay_time = 15000;
1269 	vbi_time_out = 100000;
1270 
1271 	reference_clock = radeon_get_xclk(rdev);
1272 
1273 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1274 	bb_dly   = (backbias_response_time * reference_clock) / 1600;
1275 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1276 	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1277 
1278 	mclk_switch_limit = (460 * reference_clock) / 100;
1279 
1280 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1281 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1282 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1283 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1284 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1285 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1286 }
1287 
1288 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1289 					  struct atom_voltage_table *voltage_table,
1290 					  NISLANDS_SMC_STATETABLE *table)
1291 {
1292 	unsigned int i;
1293 
1294 	for (i = 0; i < voltage_table->count; i++) {
1295 		table->highSMIO[i] = 0;
1296 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1297 	}
1298 }
1299 
1300 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1301 					   NISLANDS_SMC_STATETABLE *table)
1302 {
1303 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1304 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1305 	unsigned char i;
1306 
1307 	if (eg_pi->vddc_voltage_table.count) {
1308 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1309 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1310 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1311 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1312 
1313 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1314 			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1315 				table->maxVDDCIndexInPPTable = i;
1316 				break;
1317 			}
1318 		}
1319 	}
1320 
1321 	if (eg_pi->vddci_voltage_table.count) {
1322 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1323 
1324 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1325 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1326 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1327 	}
1328 }
1329 
1330 static int ni_populate_voltage_value(struct radeon_device *rdev,
1331 				     struct atom_voltage_table *table,
1332 				     u16 value,
1333 				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1334 {
1335 	unsigned int i;
1336 
1337 	for (i = 0; i < table->count; i++) {
1338 		if (value <= table->entries[i].value) {
1339 			voltage->index = (u8)i;
1340 			voltage->value = cpu_to_be16(table->entries[i].value);
1341 			break;
1342 		}
1343 	}
1344 
1345 	if (i >= table->count)
1346 		return -EINVAL;
1347 
1348 	return 0;
1349 }
1350 
1351 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1352 				   u32 mclk,
1353 				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1354 {
1355         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1356 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1357 
1358 	if (!pi->mvdd_control) {
1359 		voltage->index = eg_pi->mvdd_high_index;
1360                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1361 		return;
1362 	}
1363 
1364 	if (mclk <= pi->mvdd_split_frequency) {
1365 		voltage->index = eg_pi->mvdd_low_index;
1366 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1367 	} else {
1368 		voltage->index = eg_pi->mvdd_high_index;
1369 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1370 	}
1371 }
1372 
1373 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1374 				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1375 				    u16 *std_voltage)
1376 {
1377 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1378 	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1379 		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1380 	else
1381 		*std_voltage = be16_to_cpu(voltage->value);
1382 
1383 	return 0;
1384 }
1385 
1386 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1387 					  u16 value, u8 index,
1388 					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1389 {
1390 	voltage->index = index;
1391 	voltage->value = cpu_to_be16(value);
1392 }
1393 
1394 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1395 {
1396 	u32 xclk_period;
1397 	u32 xclk = radeon_get_xclk(rdev);
1398 	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1399 
1400 	xclk_period = (1000000000UL / xclk);
1401 	xclk_period /= 10000UL;
1402 
1403 	return tmp * xclk_period;
1404 }
1405 
1406 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1407 {
1408 	return (power_in_watts * scaling_factor) << 2;
1409 }
1410 
1411 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1412 					  struct radeon_ps *radeon_state,
1413 					  u32 near_tdp_limit)
1414 {
1415 	struct ni_ps *state = ni_get_ps(radeon_state);
1416 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1417 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1418 	u32 power_boost_limit = 0;
1419 	int ret;
1420 
1421 	if (ni_pi->enable_power_containment &&
1422 	    ni_pi->use_power_boost_limit) {
1423 		NISLANDS_SMC_VOLTAGE_VALUE vddc;
1424 		u16 std_vddc_med;
1425 		u16 std_vddc_high;
1426 		u64 tmp, n, d;
1427 
1428 		if (state->performance_level_count < 3)
1429 			return 0;
1430 
1431 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1432 						state->performance_levels[state->performance_level_count - 2].vddc,
1433 						&vddc);
1434 		if (ret)
1435 			return 0;
1436 
1437 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1438 		if (ret)
1439 			return 0;
1440 
1441 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1442 						state->performance_levels[state->performance_level_count - 1].vddc,
1443 						&vddc);
1444 		if (ret)
1445 			return 0;
1446 
1447 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1448 		if (ret)
1449 			return 0;
1450 
1451 		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1452 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1453 		tmp = div64_u64(n, d);
1454 
1455 		if (tmp >> 32)
1456 			return 0;
1457 		power_boost_limit = (u32)tmp;
1458 	}
1459 
1460 	return power_boost_limit;
1461 }
1462 
1463 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1464 					    bool adjust_polarity,
1465 					    u32 tdp_adjustment,
1466 					    u32 *tdp_limit,
1467 					    u32 *near_tdp_limit)
1468 {
1469 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1470 		return -EINVAL;
1471 
1472 	if (adjust_polarity) {
1473 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1474 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1475 	} else {
1476 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1477 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1478 	}
1479 
1480 	return 0;
1481 }
1482 
1483 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1484 				      struct radeon_ps *radeon_state)
1485 {
1486 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1487 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1488 
1489 	if (ni_pi->enable_power_containment) {
1490 		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1491 		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1492 		u32 tdp_limit;
1493 		u32 near_tdp_limit;
1494 		u32 power_boost_limit;
1495 		int ret;
1496 
1497 		if (scaling_factor == 0)
1498 			return -EINVAL;
1499 
1500 		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1501 
1502 		ret = ni_calculate_adjusted_tdp_limits(rdev,
1503 						       false, /* ??? */
1504 						       rdev->pm.dpm.tdp_adjustment,
1505 						       &tdp_limit,
1506 						       &near_tdp_limit);
1507 		if (ret)
1508 			return ret;
1509 
1510 		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1511 								   near_tdp_limit);
1512 
1513 		smc_table->dpm2Params.TDPLimit =
1514 			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1515 		smc_table->dpm2Params.NearTDPLimit =
1516 			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1517 		smc_table->dpm2Params.SafePowerLimit =
1518 			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1519 							   scaling_factor));
1520 		smc_table->dpm2Params.PowerBoostLimit =
1521 			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1522 
1523 		ret = rv770_copy_bytes_to_smc(rdev,
1524 					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1525 						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1526 					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
1527 					      sizeof(u32) * 4, pi->sram_end);
1528 		if (ret)
1529 			return ret;
1530 	}
1531 
1532 	return 0;
1533 }
1534 
1535 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1536 				u32 arb_freq_src, u32 arb_freq_dest)
1537 {
1538 	u32 mc_arb_dram_timing;
1539 	u32 mc_arb_dram_timing2;
1540 	u32 burst_time;
1541 	u32 mc_cg_config;
1542 
1543 	switch (arb_freq_src) {
1544         case MC_CG_ARB_FREQ_F0:
1545 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1546 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1547 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1548 		break;
1549         case MC_CG_ARB_FREQ_F1:
1550 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1551 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1552 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1553 		break;
1554         case MC_CG_ARB_FREQ_F2:
1555 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1556 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1557 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1558 		break;
1559         case MC_CG_ARB_FREQ_F3:
1560 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1561 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1562 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1563 		break;
1564         default:
1565 		return -EINVAL;
1566 	}
1567 
1568 	switch (arb_freq_dest) {
1569         case MC_CG_ARB_FREQ_F0:
1570 		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1571 		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1572 		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1573 		break;
1574         case MC_CG_ARB_FREQ_F1:
1575 		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1576 		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1577 		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1578 		break;
1579         case MC_CG_ARB_FREQ_F2:
1580 		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1581 		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1582 		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1583 		break;
1584         case MC_CG_ARB_FREQ_F3:
1585 		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1586 		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1587 		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1588 		break;
1589 	default:
1590 		return -EINVAL;
1591 	}
1592 
1593 	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1594 	WREG32(MC_CG_CONFIG, mc_cg_config);
1595 	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1596 
1597 	return 0;
1598 }
1599 
1600 static int ni_init_arb_table_index(struct radeon_device *rdev)
1601 {
1602 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1603 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1604 	u32 tmp;
1605 	int ret;
1606 
1607 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1608 					&tmp, pi->sram_end);
1609 	if (ret)
1610 		return ret;
1611 
1612 	tmp &= 0x00FFFFFF;
1613 	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1614 
1615 	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1616 					  tmp, pi->sram_end);
1617 }
1618 
1619 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1620 {
1621 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1622 }
1623 
1624 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1625 {
1626 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1627 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1628 	u32 tmp;
1629 	int ret;
1630 
1631 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1632 					&tmp, pi->sram_end);
1633 	if (ret)
1634 		return ret;
1635 
1636 	tmp = (tmp >> 24) & 0xff;
1637 
1638 	if (tmp == MC_CG_ARB_FREQ_F0)
1639 		return 0;
1640 
1641 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1642 }
1643 
1644 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1645 						struct rv7xx_pl *pl,
1646 						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1647 {
1648 	u32 dram_timing;
1649 	u32 dram_timing2;
1650 
1651 	arb_regs->mc_arb_rfsh_rate =
1652 		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1653 
1654 
1655 	radeon_atom_set_engine_dram_timings(rdev,
1656                                             pl->sclk,
1657                                             pl->mclk);
1658 
1659 	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1660 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1661 
1662 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1663 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1664 
1665 	return 0;
1666 }
1667 
1668 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1669 						  struct radeon_ps *radeon_state,
1670 						  unsigned int first_arb_set)
1671 {
1672 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1673 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1674 	struct ni_ps *state = ni_get_ps(radeon_state);
1675 	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1676 	int i, ret = 0;
1677 
1678 	for (i = 0; i < state->performance_level_count; i++) {
1679 		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1680 		if (ret)
1681 			break;
1682 
1683 		ret = rv770_copy_bytes_to_smc(rdev,
1684 					      (u16)(ni_pi->arb_table_start +
1685 						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1686 						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1687 					      (u8 *)&arb_regs,
1688 					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1689 					      pi->sram_end);
1690 		if (ret)
1691 			break;
1692 	}
1693 	return ret;
1694 }
1695 
1696 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1697 					       struct radeon_ps *radeon_new_state)
1698 {
1699 	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1700 						      NISLANDS_DRIVER_STATE_ARB_INDEX);
1701 }
1702 
1703 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1704 					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1705 {
1706 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1707 
1708 	voltage->index = eg_pi->mvdd_high_index;
1709 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1710 }
1711 
1712 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1713 					 struct radeon_ps *radeon_initial_state,
1714 					 NISLANDS_SMC_STATETABLE *table)
1715 {
1716 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1717 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1718 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1719 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1720 	u32 reg;
1721 	int ret;
1722 
1723 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1724 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1725 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1726 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1727 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1728 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1729 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1730 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1731 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1732 		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1733 	table->initialState.levels[0].mclk.vDLL_CNTL =
1734 		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1735 	table->initialState.levels[0].mclk.vMPLL_SS =
1736 		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1737 	table->initialState.levels[0].mclk.vMPLL_SS2 =
1738 		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1739 	table->initialState.levels[0].mclk.mclk_value =
1740 		cpu_to_be32(initial_state->performance_levels[0].mclk);
1741 
1742 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1743 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1744 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1745 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1746 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1747 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1748 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1749 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1750 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1751 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1752 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1753 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1754 	table->initialState.levels[0].sclk.sclk_value =
1755 		cpu_to_be32(initial_state->performance_levels[0].sclk);
1756 	table->initialState.levels[0].arbRefreshState =
1757 		NISLANDS_INITIAL_STATE_ARB_INDEX;
1758 
1759 	table->initialState.levels[0].ACIndex = 0;
1760 
1761 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1762 					initial_state->performance_levels[0].vddc,
1763 					&table->initialState.levels[0].vddc);
1764 	if (!ret) {
1765 		u16 std_vddc;
1766 
1767 		ret = ni_get_std_voltage_value(rdev,
1768 					       &table->initialState.levels[0].vddc,
1769 					       &std_vddc);
1770 		if (!ret)
1771 			ni_populate_std_voltage_value(rdev, std_vddc,
1772 						      table->initialState.levels[0].vddc.index,
1773 						      &table->initialState.levels[0].std_vddc);
1774 	}
1775 
1776 	if (eg_pi->vddci_control)
1777 		ni_populate_voltage_value(rdev,
1778 					  &eg_pi->vddci_voltage_table,
1779 					  initial_state->performance_levels[0].vddci,
1780 					  &table->initialState.levels[0].vddci);
1781 
1782 	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1783 
1784 	reg = CG_R(0xffff) | CG_L(0);
1785 	table->initialState.levels[0].aT = cpu_to_be32(reg);
1786 
1787 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1788 
1789 	if (pi->boot_in_gen2)
1790 		table->initialState.levels[0].gen2PCIE = 1;
1791 	else
1792 		table->initialState.levels[0].gen2PCIE = 0;
1793 
1794 	if (pi->mem_gddr5) {
1795 		table->initialState.levels[0].strobeMode =
1796 			cypress_get_strobe_mode_settings(rdev,
1797 							 initial_state->performance_levels[0].mclk);
1798 
1799 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1800 			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1801 		else
1802 			table->initialState.levels[0].mcFlags =  0;
1803 	}
1804 
1805 	table->initialState.levelCount = 1;
1806 
1807 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1808 
1809 	table->initialState.levels[0].dpm2.MaxPS = 0;
1810 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
1811 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1812 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1813 
1814 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1815 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1816 
1817 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1818 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1819 
1820 	return 0;
1821 }
1822 
1823 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1824 				      NISLANDS_SMC_STATETABLE *table)
1825 {
1826 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1827 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1828 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1829 	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1830 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1831 	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1832 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1833 	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1834 	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1835 	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1836 	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1837 	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1838 	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1839 	u32 reg;
1840 	int ret;
1841 
1842 	table->ACPIState = table->initialState;
1843 
1844 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1845 
1846 	if (pi->acpi_vddc) {
1847 		ret = ni_populate_voltage_value(rdev,
1848 						&eg_pi->vddc_voltage_table,
1849 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1850 		if (!ret) {
1851 			u16 std_vddc;
1852 
1853 			ret = ni_get_std_voltage_value(rdev,
1854 						       &table->ACPIState.levels[0].vddc, &std_vddc);
1855 			if (!ret)
1856 				ni_populate_std_voltage_value(rdev, std_vddc,
1857 							      table->ACPIState.levels[0].vddc.index,
1858 							      &table->ACPIState.levels[0].std_vddc);
1859 		}
1860 
1861 		if (pi->pcie_gen2) {
1862 			if (pi->acpi_pcie_gen2)
1863 				table->ACPIState.levels[0].gen2PCIE = 1;
1864 			else
1865 				table->ACPIState.levels[0].gen2PCIE = 0;
1866 		} else {
1867 			table->ACPIState.levels[0].gen2PCIE = 0;
1868 		}
1869 	} else {
1870 		ret = ni_populate_voltage_value(rdev,
1871 						&eg_pi->vddc_voltage_table,
1872 						pi->min_vddc_in_table,
1873 						&table->ACPIState.levels[0].vddc);
1874 		if (!ret) {
1875 			u16 std_vddc;
1876 
1877 			ret = ni_get_std_voltage_value(rdev,
1878 						       &table->ACPIState.levels[0].vddc,
1879 						       &std_vddc);
1880 			if (!ret)
1881 				ni_populate_std_voltage_value(rdev, std_vddc,
1882 							      table->ACPIState.levels[0].vddc.index,
1883 							      &table->ACPIState.levels[0].std_vddc);
1884 		}
1885 		table->ACPIState.levels[0].gen2PCIE = 0;
1886 	}
1887 
1888 	if (eg_pi->acpi_vddci) {
1889 		if (eg_pi->vddci_control)
1890 			ni_populate_voltage_value(rdev,
1891 						  &eg_pi->vddci_voltage_table,
1892 						  eg_pi->acpi_vddci,
1893 						  &table->ACPIState.levels[0].vddci);
1894 	}
1895 
1896 
1897 	mpll_ad_func_cntl &= ~PDNB;
1898 
1899 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1900 
1901         if (pi->mem_gddr5)
1902                 mpll_dq_func_cntl &= ~PDNB;
1903         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1904 
1905 
1906 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1907 			     MRDCKA1_RESET |
1908 			     MRDCKB0_RESET |
1909 			     MRDCKB1_RESET |
1910 			     MRDCKC0_RESET |
1911 			     MRDCKC1_RESET |
1912 			     MRDCKD0_RESET |
1913 			     MRDCKD1_RESET);
1914 
1915 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1916 			      MRDCKA1_PDNB |
1917 			      MRDCKB0_PDNB |
1918 			      MRDCKB1_PDNB |
1919 			      MRDCKC0_PDNB |
1920 			      MRDCKC1_PDNB |
1921 			      MRDCKD0_PDNB |
1922 			      MRDCKD1_PDNB);
1923 
1924 	dll_cntl |= (MRDCKA0_BYPASS |
1925                      MRDCKA1_BYPASS |
1926                      MRDCKB0_BYPASS |
1927                      MRDCKB1_BYPASS |
1928                      MRDCKC0_BYPASS |
1929                      MRDCKC1_BYPASS |
1930                      MRDCKD0_BYPASS |
1931                      MRDCKD1_BYPASS);
1932 
1933         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1934 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1935 
1936 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1937 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1938 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1939 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1940 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1941 	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1942 
1943 	table->ACPIState.levels[0].mclk.mclk_value = 0;
1944 
1945 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1946 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1947 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1948 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1949 
1950 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1951 
1952 	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1953 
1954 	if (eg_pi->dynamic_ac_timing)
1955 		table->ACPIState.levels[0].ACIndex = 1;
1956 
1957 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
1958 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1959 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1960 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1961 
1962 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1963 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1964 
1965 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1966 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1967 
1968 	return 0;
1969 }
1970 
1971 static int ni_init_smc_table(struct radeon_device *rdev)
1972 {
1973 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1974 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1975 	int ret;
1976 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1977 	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1978 
1979 	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1980 
1981 	ni_populate_smc_voltage_tables(rdev, table);
1982 
1983 	switch (rdev->pm.int_thermal_type) {
1984 	case THERMAL_TYPE_NI:
1985 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1986 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1987 		break;
1988 	case THERMAL_TYPE_NONE:
1989 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1990 		break;
1991 	default:
1992 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1993 		break;
1994 	}
1995 
1996 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1997 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1998 
1999 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
2000 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
2001 
2002 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
2003 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
2004 
2005 	if (pi->mem_gddr5)
2006 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
2007 
2008 	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
2009 	if (ret)
2010 		return ret;
2011 
2012 	ret = ni_populate_smc_acpi_state(rdev, table);
2013 	if (ret)
2014 		return ret;
2015 
2016 	table->driverState = table->initialState;
2017 
2018 	table->ULVState = table->initialState;
2019 
2020 	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
2021 						     NISLANDS_INITIAL_STATE_ARB_INDEX);
2022 	if (ret)
2023 		return ret;
2024 
2025 	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
2026 				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
2027 }
2028 
2029 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2030 				    u32 engine_clock,
2031 				    NISLANDS_SMC_SCLK_VALUE *sclk)
2032 {
2033 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2034 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2035 	struct atom_clock_dividers dividers;
2036 	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2037 	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2038 	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2039 	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2040 	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2041 	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2042 	u64 tmp;
2043 	u32 reference_clock = rdev->clock.spll.reference_freq;
2044 	u32 reference_divider;
2045 	u32 fbdiv;
2046 	int ret;
2047 
2048 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2049 					     engine_clock, false, &dividers);
2050 	if (ret)
2051 		return ret;
2052 
2053 	reference_divider = 1 + dividers.ref_div;
2054 
2055 
2056 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2057 	do_div(tmp, reference_clock);
2058 	fbdiv = (u32) tmp;
2059 
2060 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2061 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2062 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2063 
2064 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2065 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2066 
2067 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2068 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2069 	spll_func_cntl_3 |= SPLL_DITHEN;
2070 
2071 	if (pi->sclk_ss) {
2072 		struct radeon_atom_ss ss;
2073 		u32 vco_freq = engine_clock * dividers.post_div;
2074 
2075 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2076 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2077 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2078 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2079 
2080 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
2081 			cg_spll_spread_spectrum |= CLK_S(clk_s);
2082 			cg_spll_spread_spectrum |= SSEN;
2083 
2084 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2085 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2086 		}
2087 	}
2088 
2089 	sclk->sclk_value = engine_clock;
2090 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2091 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2092 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2093 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2094 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2095 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2096 
2097 	return 0;
2098 }
2099 
2100 static int ni_populate_sclk_value(struct radeon_device *rdev,
2101 				  u32 engine_clock,
2102 				  NISLANDS_SMC_SCLK_VALUE *sclk)
2103 {
2104 	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2105 	int ret;
2106 
2107 	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2108 	if (!ret) {
2109 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2110 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2111 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2112 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2113 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2114 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2115 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2116 	}
2117 
2118 	return ret;
2119 }
2120 
2121 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2122 {
2123         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2124 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2125 	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2126 	NISLANDS_SMC_SCLK_VALUE sclk_params;
2127 	u32 fb_div;
2128 	u32 p_div;
2129 	u32 clk_s;
2130 	u32 clk_v;
2131 	u32 sclk = 0;
2132 	int i, ret;
2133 	u32 tmp;
2134 
2135 	if (ni_pi->spll_table_start == 0)
2136 		return -EINVAL;
2137 
2138 	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2139 	if (spll_table == NULL)
2140 		return -ENOMEM;
2141 
2142 	for (i = 0; i < 256; i++) {
2143 		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2144 		if (ret)
2145 			break;
2146 
2147 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2148 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2149 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2150 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2151 
2152 		fb_div &= ~0x00001FFF;
2153 		fb_div >>= 1;
2154 		clk_v >>= 6;
2155 
2156 		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2157 			ret = -EINVAL;
2158 
2159 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2160 			ret = -EINVAL;
2161 
2162 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2163 			ret = -EINVAL;
2164 
2165 		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2166 			ret = -EINVAL;
2167 
2168 		if (ret)
2169 			break;
2170 
2171 		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2172 			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2173 		spll_table->freq[i] = cpu_to_be32(tmp);
2174 
2175 		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2176 			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2177 		spll_table->ss[i] = cpu_to_be32(tmp);
2178 
2179 		sclk += 512;
2180 	}
2181 
2182 	if (!ret)
2183 		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2184 					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2185 
2186 	kfree(spll_table);
2187 
2188 	return ret;
2189 }
2190 
2191 static int ni_populate_mclk_value(struct radeon_device *rdev,
2192 				  u32 engine_clock,
2193 				  u32 memory_clock,
2194 				  NISLANDS_SMC_MCLK_VALUE *mclk,
2195 				  bool strobe_mode,
2196 				  bool dll_state_on)
2197 {
2198 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2199 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2200 	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2201 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2202 	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2203 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2204 	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2205 	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2206 	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2207 	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2208 	struct atom_clock_dividers dividers;
2209 	u32 ibias;
2210 	u32 dll_speed;
2211 	int ret;
2212 	u32 mc_seq_misc7;
2213 
2214 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2215 					     memory_clock, strobe_mode, &dividers);
2216 	if (ret)
2217 		return ret;
2218 
2219 	if (!strobe_mode) {
2220 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2221 
2222 		if (mc_seq_misc7 & 0x8000000)
2223 			dividers.post_div = 1;
2224 	}
2225 
2226 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2227 
2228 	mpll_ad_func_cntl &= ~(CLKR_MASK |
2229 			       YCLK_POST_DIV_MASK |
2230 			       CLKF_MASK |
2231 			       CLKFRAC_MASK |
2232 			       IBIAS_MASK);
2233 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2234 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2235 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2236 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2237 	mpll_ad_func_cntl |= IBIAS(ibias);
2238 
2239 	if (dividers.vco_mode)
2240 		mpll_ad_func_cntl_2 |= VCO_MODE;
2241 	else
2242 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
2243 
2244 	if (pi->mem_gddr5) {
2245 		mpll_dq_func_cntl &= ~(CLKR_MASK |
2246 				       YCLK_POST_DIV_MASK |
2247 				       CLKF_MASK |
2248 				       CLKFRAC_MASK |
2249 				       IBIAS_MASK);
2250 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2251 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2252 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2253 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2254 		mpll_dq_func_cntl |= IBIAS(ibias);
2255 
2256 		if (strobe_mode)
2257 			mpll_dq_func_cntl &= ~PDNB;
2258 		else
2259 			mpll_dq_func_cntl |= PDNB;
2260 
2261 		if (dividers.vco_mode)
2262 			mpll_dq_func_cntl_2 |= VCO_MODE;
2263 		else
2264 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
2265 	}
2266 
2267 	if (pi->mclk_ss) {
2268 		struct radeon_atom_ss ss;
2269 		u32 vco_freq = memory_clock * dividers.post_div;
2270 
2271 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2272 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2273 			u32 reference_clock = rdev->clock.mpll.reference_freq;
2274 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2275 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2276 			u32 clk_v = ss.percentage *
2277 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2278 
2279 			mpll_ss1 &= ~CLKV_MASK;
2280 			mpll_ss1 |= CLKV(clk_v);
2281 
2282 			mpll_ss2 &= ~CLKS_MASK;
2283 			mpll_ss2 |= CLKS(clk_s);
2284 		}
2285 	}
2286 
2287 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2288 					memory_clock);
2289 
2290 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2291 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2292 	if (dll_state_on)
2293 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2294 				     MRDCKA1_PDNB |
2295 				     MRDCKB0_PDNB |
2296 				     MRDCKB1_PDNB |
2297 				     MRDCKC0_PDNB |
2298 				     MRDCKC1_PDNB |
2299 				     MRDCKD0_PDNB |
2300 				     MRDCKD1_PDNB);
2301 	else
2302 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2303 				      MRDCKA1_PDNB |
2304 				      MRDCKB0_PDNB |
2305 				      MRDCKB1_PDNB |
2306 				      MRDCKC0_PDNB |
2307 				      MRDCKC1_PDNB |
2308 				      MRDCKD0_PDNB |
2309 				      MRDCKD1_PDNB);
2310 
2311 
2312 	mclk->mclk_value = cpu_to_be32(memory_clock);
2313 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2314 	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2315 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2316 	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2317 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2318 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2319 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2320 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2321 
2322 	return 0;
2323 }
2324 
2325 static void ni_populate_smc_sp(struct radeon_device *rdev,
2326 			       struct radeon_ps *radeon_state,
2327 			       NISLANDS_SMC_SWSTATE *smc_state)
2328 {
2329 	struct ni_ps *ps = ni_get_ps(radeon_state);
2330 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2331 	int i;
2332 
2333 	for (i = 0; i < ps->performance_level_count - 1; i++)
2334 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2335 
2336 	smc_state->levels[ps->performance_level_count - 1].bSP =
2337 		cpu_to_be32(pi->psp);
2338 }
2339 
2340 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2341 					 struct rv7xx_pl *pl,
2342 					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2343 {
2344 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2345         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2346         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2347 	int ret;
2348 	bool dll_state_on;
2349 	u16 std_vddc;
2350 	u32 tmp = RREG32(DC_STUTTER_CNTL);
2351 
2352 	level->gen2PCIE = pi->pcie_gen2 ?
2353 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2354 
2355 	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2356 	if (ret)
2357 		return ret;
2358 
2359 	level->mcFlags =  0;
2360 	if (pi->mclk_stutter_mode_threshold &&
2361 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2362 	    !eg_pi->uvd_enabled &&
2363 	    (tmp & DC_STUTTER_ENABLE_A) &&
2364 	    (tmp & DC_STUTTER_ENABLE_B))
2365 		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2366 
2367 	if (pi->mem_gddr5) {
2368 		if (pl->mclk > pi->mclk_edc_enable_threshold)
2369 			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2370 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2371 			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2372 
2373 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2374 
2375 		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2376 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2377 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2378 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2379 			else
2380 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2381 		} else {
2382 			dll_state_on = false;
2383 			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2384 				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2385 		}
2386 
2387 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2388 					     &level->mclk,
2389 					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2390 					     dll_state_on);
2391 	} else
2392 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2393 
2394 	if (ret)
2395 		return ret;
2396 
2397 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2398 					pl->vddc, &level->vddc);
2399 	if (ret)
2400 		return ret;
2401 
2402 	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2403 	if (ret)
2404 		return ret;
2405 
2406 	ni_populate_std_voltage_value(rdev, std_vddc,
2407 				      level->vddc.index, &level->std_vddc);
2408 
2409 	if (eg_pi->vddci_control) {
2410 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2411 						pl->vddci, &level->vddci);
2412 		if (ret)
2413 			return ret;
2414 	}
2415 
2416 	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2417 
2418 	return ret;
2419 }
2420 
2421 static int ni_populate_smc_t(struct radeon_device *rdev,
2422 			     struct radeon_ps *radeon_state,
2423 			     NISLANDS_SMC_SWSTATE *smc_state)
2424 {
2425         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2426         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2427 	struct ni_ps *state = ni_get_ps(radeon_state);
2428 	u32 a_t;
2429 	u32 t_l, t_h;
2430 	u32 high_bsp;
2431 	int i, ret;
2432 
2433 	if (state->performance_level_count >= 9)
2434 		return -EINVAL;
2435 
2436 	if (state->performance_level_count < 2) {
2437 		a_t = CG_R(0xffff) | CG_L(0);
2438 		smc_state->levels[0].aT = cpu_to_be32(a_t);
2439 		return 0;
2440 	}
2441 
2442 	smc_state->levels[0].aT = cpu_to_be32(0);
2443 
2444 	for (i = 0; i <= state->performance_level_count - 2; i++) {
2445 		if (eg_pi->uvd_enabled)
2446 			ret = r600_calculate_at(
2447 				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2448 				100 * R600_AH_DFLT,
2449 				state->performance_levels[i + 1].sclk,
2450 				state->performance_levels[i].sclk,
2451 				&t_l,
2452 				&t_h);
2453 		else
2454 			ret = r600_calculate_at(
2455 				1000 * (i + 1),
2456 				100 * R600_AH_DFLT,
2457 				state->performance_levels[i + 1].sclk,
2458 				state->performance_levels[i].sclk,
2459 				&t_l,
2460 				&t_h);
2461 
2462 		if (ret) {
2463 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2464 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2465 		}
2466 
2467 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2468 		a_t |= CG_R(t_l * pi->bsp / 20000);
2469 		smc_state->levels[i].aT = cpu_to_be32(a_t);
2470 
2471 		high_bsp = (i == state->performance_level_count - 2) ?
2472 			pi->pbsp : pi->bsp;
2473 
2474 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2475 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2476 	}
2477 
2478 	return 0;
2479 }
2480 
2481 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2482 						struct radeon_ps *radeon_state,
2483 						NISLANDS_SMC_SWSTATE *smc_state)
2484 {
2485         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2486         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2487 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2488 	struct ni_ps *state = ni_get_ps(radeon_state);
2489 	u32 prev_sclk;
2490 	u32 max_sclk;
2491 	u32 min_sclk;
2492 	int i, ret;
2493 	u32 tdp_limit;
2494 	u32 near_tdp_limit;
2495 	u32 power_boost_limit;
2496 	u8 max_ps_percent;
2497 
2498 	if (ni_pi->enable_power_containment == false)
2499 		return 0;
2500 
2501 	if (state->performance_level_count == 0)
2502 		return -EINVAL;
2503 
2504 	if (smc_state->levelCount != state->performance_level_count)
2505 		return -EINVAL;
2506 
2507 	ret = ni_calculate_adjusted_tdp_limits(rdev,
2508 					       false, /* ??? */
2509 					       rdev->pm.dpm.tdp_adjustment,
2510 					       &tdp_limit,
2511 					       &near_tdp_limit);
2512 	if (ret)
2513 		return ret;
2514 
2515 	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2516 
2517 	ret = rv770_write_smc_sram_dword(rdev,
2518 					 pi->state_table_start +
2519 					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2520 					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2521 					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2522 					 pi->sram_end);
2523 	if (ret)
2524 		power_boost_limit = 0;
2525 
2526 	smc_state->levels[0].dpm2.MaxPS = 0;
2527 	smc_state->levels[0].dpm2.NearTDPDec = 0;
2528 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2529 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2530 	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2531 
2532 	for (i = 1; i < state->performance_level_count; i++) {
2533 		prev_sclk = state->performance_levels[i-1].sclk;
2534 		max_sclk  = state->performance_levels[i].sclk;
2535 		max_ps_percent = (i != (state->performance_level_count - 1)) ?
2536 			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2537 
2538 		if (max_sclk < prev_sclk)
2539 			return -EINVAL;
2540 
2541 		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2542 			min_sclk = max_sclk;
2543 		else if (1 == i)
2544 			min_sclk = prev_sclk;
2545 		else
2546 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2547 
2548 		if (min_sclk < state->performance_levels[0].sclk)
2549 			min_sclk = state->performance_levels[0].sclk;
2550 
2551 		if (min_sclk == 0)
2552 			return -EINVAL;
2553 
2554 		smc_state->levels[i].dpm2.MaxPS =
2555 			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2556 		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2557 		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2558 		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2559 		smc_state->levels[i].stateFlags |=
2560 			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2561 			PPSMC_STATEFLAG_POWERBOOST : 0;
2562 	}
2563 
2564 	return 0;
2565 }
2566 
2567 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2568 					 struct radeon_ps *radeon_state,
2569 					 NISLANDS_SMC_SWSTATE *smc_state)
2570 {
2571 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2572 	struct ni_ps *state = ni_get_ps(radeon_state);
2573 	u32 sq_power_throttle;
2574 	u32 sq_power_throttle2;
2575 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2576 	int i;
2577 
2578 	if (state->performance_level_count == 0)
2579 		return -EINVAL;
2580 
2581 	if (smc_state->levelCount != state->performance_level_count)
2582 		return -EINVAL;
2583 
2584 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2585 		return -EINVAL;
2586 
2587 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2588 		enable_sq_ramping = false;
2589 
2590 	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2591 		enable_sq_ramping = false;
2592 
2593 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2594 		enable_sq_ramping = false;
2595 
2596 	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2597 		enable_sq_ramping = false;
2598 
2599 	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2600 		enable_sq_ramping = false;
2601 
2602 	for (i = 0; i < state->performance_level_count; i++) {
2603 		sq_power_throttle  = 0;
2604 		sq_power_throttle2 = 0;
2605 
2606 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2607 		    enable_sq_ramping) {
2608 			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2609 			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2610 			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2611 			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2612 			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2613 		} else {
2614 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2615 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2616 		}
2617 
2618 		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2619 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2620 	}
2621 
2622 	return 0;
2623 }
2624 
2625 static int ni_enable_power_containment(struct radeon_device *rdev,
2626 				       struct radeon_ps *radeon_new_state,
2627 				       bool enable)
2628 {
2629         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2630 	PPSMC_Result smc_result;
2631 	int ret = 0;
2632 
2633 	if (ni_pi->enable_power_containment) {
2634 		if (enable) {
2635 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2636 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2637 				if (smc_result != PPSMC_Result_OK) {
2638 					ret = -EINVAL;
2639 					ni_pi->pc_enabled = false;
2640 				} else {
2641 					ni_pi->pc_enabled = true;
2642 				}
2643 			}
2644 		} else {
2645 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2646 			if (smc_result != PPSMC_Result_OK)
2647 				ret = -EINVAL;
2648 			ni_pi->pc_enabled = false;
2649 		}
2650 	}
2651 
2652 	return ret;
2653 }
2654 
2655 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2656 					 struct radeon_ps *radeon_state,
2657 					 NISLANDS_SMC_SWSTATE *smc_state)
2658 {
2659         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2660 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2661 	struct ni_ps *state = ni_get_ps(radeon_state);
2662 	int i, ret;
2663 	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2664 
2665 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2666 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2667 
2668 	smc_state->levelCount = 0;
2669 
2670 	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2671 		return -EINVAL;
2672 
2673 	for (i = 0; i < state->performance_level_count; i++) {
2674 		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2675 						    &smc_state->levels[i]);
2676 		smc_state->levels[i].arbRefreshState =
2677 			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2678 
2679 		if (ret)
2680 			return ret;
2681 
2682 		if (ni_pi->enable_power_containment)
2683 			smc_state->levels[i].displayWatermark =
2684 				(state->performance_levels[i].sclk < threshold) ?
2685 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2686 		else
2687 			smc_state->levels[i].displayWatermark = (i < 2) ?
2688 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2689 
2690 		if (eg_pi->dynamic_ac_timing)
2691 			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2692 		else
2693 			smc_state->levels[i].ACIndex = 0;
2694 
2695 		smc_state->levelCount++;
2696 	}
2697 
2698 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2699 				      cpu_to_be32(threshold / 512));
2700 
2701 	ni_populate_smc_sp(rdev, radeon_state, smc_state);
2702 
2703 	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2704 	if (ret)
2705 		ni_pi->enable_power_containment = false;
2706 
2707 	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2708 	if (ret)
2709 		ni_pi->enable_sq_ramping = false;
2710 
2711 	return ni_populate_smc_t(rdev, radeon_state, smc_state);
2712 }
2713 
2714 static int ni_upload_sw_state(struct radeon_device *rdev,
2715 			      struct radeon_ps *radeon_new_state)
2716 {
2717 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2718 	u16 address = pi->state_table_start +
2719 		offsetof(NISLANDS_SMC_STATETABLE, driverState);
2720 	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2721 		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2722 	int ret;
2723 	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2724 
2725 	if (smc_state == NULL)
2726 		return -ENOMEM;
2727 
2728 	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2729 	if (ret)
2730 		goto done;
2731 
2732 	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2733 
2734 done:
2735 	kfree(smc_state);
2736 
2737 	return ret;
2738 }
2739 
2740 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2741 				       struct ni_mc_reg_table *table)
2742 {
2743 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2744 	u8 i, j, k;
2745 	u32 temp_reg;
2746 
2747 	for (i = 0, j = table->last; i < table->last; i++) {
2748 		switch (table->mc_reg_address[i].s1) {
2749 		case MC_SEQ_MISC1 >> 2:
2750 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2751 				return -EINVAL;
2752 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
2753 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2754 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2755 			for (k = 0; k < table->num_entries; k++)
2756 				table->mc_reg_table_entry[k].mc_data[j] =
2757 					((temp_reg & 0xffff0000)) |
2758 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2759 			j++;
2760 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2761 				return -EINVAL;
2762 
2763 			temp_reg = RREG32(MC_PMG_CMD_MRS);
2764 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2765 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2766 			for(k = 0; k < table->num_entries; k++) {
2767 				table->mc_reg_table_entry[k].mc_data[j] =
2768 					(temp_reg & 0xffff0000) |
2769 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2770 				if (!pi->mem_gddr5)
2771 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2772 			}
2773 			j++;
2774 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2775 				return -EINVAL;
2776 			break;
2777 		case MC_SEQ_RESERVE_M >> 2:
2778 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
2779 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2780 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2781 			for (k = 0; k < table->num_entries; k++)
2782 				table->mc_reg_table_entry[k].mc_data[j] =
2783 					(temp_reg & 0xffff0000) |
2784 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2785 			j++;
2786 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2787 				return -EINVAL;
2788 			break;
2789 		default:
2790 			break;
2791 		}
2792 	}
2793 
2794 	table->last = j;
2795 
2796 	return 0;
2797 }
2798 
2799 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2800 {
2801 	bool result = true;
2802 
2803 	switch (in_reg) {
2804         case  MC_SEQ_RAS_TIMING >> 2:
2805 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2806 		break;
2807         case MC_SEQ_CAS_TIMING >> 2:
2808 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2809 		break;
2810         case MC_SEQ_MISC_TIMING >> 2:
2811 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2812 		break;
2813         case MC_SEQ_MISC_TIMING2 >> 2:
2814 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2815 		break;
2816         case MC_SEQ_RD_CTL_D0 >> 2:
2817 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2818 		break;
2819         case MC_SEQ_RD_CTL_D1 >> 2:
2820 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2821 		break;
2822         case MC_SEQ_WR_CTL_D0 >> 2:
2823 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2824 		break;
2825         case MC_SEQ_WR_CTL_D1 >> 2:
2826 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2827 		break;
2828         case MC_PMG_CMD_EMRS >> 2:
2829 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2830 		break;
2831         case MC_PMG_CMD_MRS >> 2:
2832 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2833 		break;
2834         case MC_PMG_CMD_MRS1 >> 2:
2835 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2836 		break;
2837         case MC_SEQ_PMG_TIMING >> 2:
2838 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2839 		break;
2840         case MC_PMG_CMD_MRS2 >> 2:
2841 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2842 		break;
2843         default:
2844 		result = false;
2845 		break;
2846 	}
2847 
2848 	return result;
2849 }
2850 
2851 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2852 {
2853 	u8 i, j;
2854 
2855 	for (i = 0; i < table->last; i++) {
2856 		for (j = 1; j < table->num_entries; j++) {
2857 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2858 				table->valid_flag |= 1 << i;
2859 				break;
2860 			}
2861 		}
2862 	}
2863 }
2864 
2865 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2866 {
2867 	u32 i;
2868 	u16 address;
2869 
2870 	for (i = 0; i < table->last; i++)
2871 		table->mc_reg_address[i].s0 =
2872 			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2873 			address : table->mc_reg_address[i].s1;
2874 }
2875 
2876 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2877 				      struct ni_mc_reg_table *ni_table)
2878 {
2879 	u8 i, j;
2880 
2881 	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2882 		return -EINVAL;
2883 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2884 		return -EINVAL;
2885 
2886 	for (i = 0; i < table->last; i++)
2887 		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2888 	ni_table->last = table->last;
2889 
2890 	for (i = 0; i < table->num_entries; i++) {
2891 		ni_table->mc_reg_table_entry[i].mclk_max =
2892 			table->mc_reg_table_entry[i].mclk_max;
2893 		for (j = 0; j < table->last; j++)
2894 			ni_table->mc_reg_table_entry[i].mc_data[j] =
2895 				table->mc_reg_table_entry[i].mc_data[j];
2896 	}
2897 	ni_table->num_entries = table->num_entries;
2898 
2899 	return 0;
2900 }
2901 
2902 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2903 {
2904 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2905 	int ret;
2906 	struct atom_mc_reg_table *table;
2907 	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2908 	u8 module_index = rv770_get_memory_module_index(rdev);
2909 
2910         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2911         if (!table)
2912                 return -ENOMEM;
2913 
2914 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2915 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2916 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2917 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2918 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2919 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2920 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2921 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2922 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2923 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2924 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2925 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2926 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2927 
2928 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2929 
2930         if (ret)
2931                 goto init_mc_done;
2932 
2933 	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2934 
2935         if (ret)
2936                 goto init_mc_done;
2937 
2938 	ni_set_s0_mc_reg_index(ni_table);
2939 
2940 	ret = ni_set_mc_special_registers(rdev, ni_table);
2941 
2942         if (ret)
2943                 goto init_mc_done;
2944 
2945 	ni_set_valid_flag(ni_table);
2946 
2947 init_mc_done:
2948         kfree(table);
2949 
2950 	return ret;
2951 }
2952 
2953 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2954 					 SMC_NIslands_MCRegisters *mc_reg_table)
2955 {
2956 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2957 	u32 i, j;
2958 
2959 	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2960 		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2961 			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2962 				break;
2963 			mc_reg_table->address[i].s0 =
2964 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2965 			mc_reg_table->address[i].s1 =
2966 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2967 			i++;
2968 		}
2969 	}
2970 	mc_reg_table->last = (u8)i;
2971 }
2972 
2973 
2974 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2975 				    SMC_NIslands_MCRegisterSet *data,
2976 				    u32 num_entries, u32 valid_flag)
2977 {
2978 	u32 i, j;
2979 
2980 	for (i = 0, j = 0; j < num_entries; j++) {
2981 		if (valid_flag & (1 << j)) {
2982 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
2983 			i++;
2984 		}
2985 	}
2986 }
2987 
2988 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2989 						 struct rv7xx_pl *pl,
2990 						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2991 {
2992 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2993 	u32 i = 0;
2994 
2995 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2996 		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2997 			break;
2998 	}
2999 
3000 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
3001 		--i;
3002 
3003 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
3004 				mc_reg_table_data,
3005 				ni_pi->mc_reg_table.last,
3006 				ni_pi->mc_reg_table.valid_flag);
3007 }
3008 
3009 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
3010 					   struct radeon_ps *radeon_state,
3011 					   SMC_NIslands_MCRegisters *mc_reg_table)
3012 {
3013 	struct ni_ps *state = ni_get_ps(radeon_state);
3014 	int i;
3015 
3016 	for (i = 0; i < state->performance_level_count; i++) {
3017 		ni_convert_mc_reg_table_entry_to_smc(rdev,
3018 						     &state->performance_levels[i],
3019 						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
3020 	}
3021 }
3022 
3023 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
3024 				    struct radeon_ps *radeon_boot_state)
3025 {
3026 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3027 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3028         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3029 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3030 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3031 
3032 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3033 
3034 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3035 
3036 	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3037 
3038 	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3039 					     &mc_reg_table->data[0]);
3040 
3041 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3042 				&mc_reg_table->data[1],
3043 				ni_pi->mc_reg_table.last,
3044 				ni_pi->mc_reg_table.valid_flag);
3045 
3046 	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3047 
3048 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3049 				       (u8 *)mc_reg_table,
3050 				       sizeof(SMC_NIslands_MCRegisters),
3051 				       pi->sram_end);
3052 }
3053 
3054 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3055 				  struct radeon_ps *radeon_new_state)
3056 {
3057 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3058 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3059         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3060 	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3061 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3062 	u16 address;
3063 
3064 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3065 
3066 	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3067 
3068 	address = eg_pi->mc_reg_table_start +
3069 		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3070 
3071 	return rv770_copy_bytes_to_smc(rdev, address,
3072 				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3073 				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3074 				       pi->sram_end);
3075 }
3076 
3077 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3078 						   PP_NIslands_CACTABLES *cac_tables)
3079 {
3080 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3081 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3082 	u32 leakage = 0;
3083 	unsigned int i, j, table_size;
3084 	s32 t;
3085 	u32 smc_leakage, max_leakage = 0;
3086 	u32 scaling_factor;
3087 
3088 	table_size = eg_pi->vddc_voltage_table.count;
3089 
3090 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3091 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3092 
3093 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3094 
3095 	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3096 		for (j = 0; j < table_size; j++) {
3097 			t = (1000 * ((i + 1) * 8));
3098 
3099 			if (t < ni_pi->cac_data.leakage_minimum_temperature)
3100 				t = ni_pi->cac_data.leakage_minimum_temperature;
3101 
3102 			ni_calculate_leakage_for_v_and_t(rdev,
3103 							 &ni_pi->cac_data.leakage_coefficients,
3104 							 eg_pi->vddc_voltage_table.entries[j].value,
3105 							 t,
3106 							 ni_pi->cac_data.i_leakage,
3107 							 &leakage);
3108 
3109 			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3110 			if (smc_leakage > max_leakage)
3111 				max_leakage = smc_leakage;
3112 
3113 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3114 		}
3115 	}
3116 
3117 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3118 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3119 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3120 	}
3121 	return 0;
3122 }
3123 
3124 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3125 					    PP_NIslands_CACTABLES *cac_tables)
3126 {
3127 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3128 	struct radeon_cac_leakage_table *leakage_table =
3129 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
3130 	u32 i, j, table_size;
3131 	u32 smc_leakage, max_leakage = 0;
3132 	u32 scaling_factor;
3133 
3134 	if (!leakage_table)
3135 		return -EINVAL;
3136 
3137 	table_size = leakage_table->count;
3138 
3139 	if (eg_pi->vddc_voltage_table.count != table_size)
3140 		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3141 			eg_pi->vddc_voltage_table.count : leakage_table->count;
3142 
3143 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3144 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3145 
3146 	if (table_size == 0)
3147 		return -EINVAL;
3148 
3149 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3150 
3151 	for (j = 0; j < table_size; j++) {
3152 		smc_leakage = leakage_table->entries[j].leakage;
3153 
3154 		if (smc_leakage > max_leakage)
3155 			max_leakage = smc_leakage;
3156 
3157 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3158 			cac_tables->cac_lkge_lut[i][j] =
3159 				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3160 	}
3161 
3162 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3163 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3164 			cac_tables->cac_lkge_lut[i][j] =
3165 				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3166 	}
3167 	return 0;
3168 }
3169 
3170 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3171 {
3172 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3173 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3174 	PP_NIslands_CACTABLES *cac_tables = NULL;
3175 	int i, ret;
3176         u32 reg;
3177 
3178 	if (ni_pi->enable_cac == false)
3179 		return 0;
3180 
3181 	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3182 	if (!cac_tables)
3183 		return -ENOMEM;
3184 
3185 	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3186 	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3187 		TID_UNIT(ni_pi->cac_weights->tid_unit));
3188 	WREG32(CG_CAC_CTRL, reg);
3189 
3190 	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3191 		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3192 
3193 	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3194 		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3195 
3196 	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3197 	ni_pi->cac_data.pwr_const = 0;
3198 	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3199 	ni_pi->cac_data.bif_cac_value = 0;
3200 	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3201 	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3202 	ni_pi->cac_data.allow_ovrflw = 0;
3203 	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3204 	ni_pi->cac_data.num_win_tdp = 0;
3205 	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3206 
3207 	if (ni_pi->driver_calculate_cac_leakage)
3208 		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3209 	else
3210 		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3211 
3212 	if (ret)
3213 		goto done_free;
3214 
3215 	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3216 	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3217 	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3218 	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3219 	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3220 	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3221 	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3222 	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3223 	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3224 
3225 	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3226 				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3227 
3228 done_free:
3229 	if (ret) {
3230 		ni_pi->enable_cac = false;
3231 		ni_pi->enable_power_containment = false;
3232 	}
3233 
3234 	kfree(cac_tables);
3235 
3236 	return 0;
3237 }
3238 
3239 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3240 {
3241 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3242 	u32 reg;
3243 
3244 	if (!ni_pi->enable_cac ||
3245 	    !ni_pi->cac_configuration_required)
3246 		return 0;
3247 
3248 	if (ni_pi->cac_weights == NULL)
3249 		return -EINVAL;
3250 
3251 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3252 						      WEIGHT_TCP_SIG1_MASK |
3253 						      WEIGHT_TA_SIG_MASK);
3254 	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3255 		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3256 		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3257 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3258 
3259 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3260 						      WEIGHT_TCC_EN1_MASK |
3261 						      WEIGHT_TCC_EN2_MASK);
3262 	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3263 		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3264 		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3265 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3266 
3267 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3268 						      WEIGHT_CB_EN1_MASK |
3269 						      WEIGHT_CB_EN2_MASK |
3270 						      WEIGHT_CB_EN3_MASK);
3271 	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3272 		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3273 		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3274 		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3275 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3276 
3277 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3278 						      WEIGHT_DB_SIG1_MASK |
3279 						      WEIGHT_DB_SIG2_MASK |
3280 						      WEIGHT_DB_SIG3_MASK);
3281 	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3282 		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3283 		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3284 		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3285 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3286 
3287 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3288 						      WEIGHT_SXM_SIG1_MASK |
3289 						      WEIGHT_SXM_SIG2_MASK |
3290 						      WEIGHT_SXS_SIG0_MASK |
3291 						      WEIGHT_SXS_SIG1_MASK);
3292 	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3293 		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3294 		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3295 		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3296 		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3297 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3298 
3299 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3300 						      WEIGHT_XBR_1_MASK |
3301 						      WEIGHT_XBR_2_MASK |
3302 						      WEIGHT_SPI_SIG0_MASK);
3303 	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3304 		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3305 		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3306 		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3307 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3308 
3309 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3310 						      WEIGHT_SPI_SIG2_MASK |
3311 						      WEIGHT_SPI_SIG3_MASK |
3312 						      WEIGHT_SPI_SIG4_MASK |
3313 						      WEIGHT_SPI_SIG5_MASK);
3314 	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3315 		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3316 		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3317 		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3318 		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3319 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3320 
3321 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3322 						      WEIGHT_LDS_SIG1_MASK |
3323 						      WEIGHT_SC_MASK);
3324 	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3325 		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3326 		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3327 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3328 
3329 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3330 						      WEIGHT_CP_MASK |
3331 						      WEIGHT_PA_SIG0_MASK |
3332 						      WEIGHT_PA_SIG1_MASK |
3333 						      WEIGHT_VGT_SIG0_MASK);
3334 	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3335 		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3336 		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3337 		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3338 		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3339 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3340 
3341 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3342 						      WEIGHT_VGT_SIG2_MASK |
3343 						      WEIGHT_DC_SIG0_MASK |
3344 						      WEIGHT_DC_SIG1_MASK |
3345 						      WEIGHT_DC_SIG2_MASK);
3346 	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3347 		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3348 		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3349 		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3350 		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3351 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3352 
3353 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3354 						      WEIGHT_UVD_SIG0_MASK |
3355 						      WEIGHT_UVD_SIG1_MASK |
3356 						      WEIGHT_SPARE0_MASK |
3357 						      WEIGHT_SPARE1_MASK);
3358 	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3359 		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3360 		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3361 		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3362 		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3363 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3364 
3365 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3366 						      WEIGHT_SQ_VSP0_MASK);
3367 	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3368 		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3369 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3370 
3371 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3372 	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3373 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3374 
3375 	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3376 							OVR_VAL_SPARE_0_MASK |
3377 							OVR_MODE_SPARE_1_MASK |
3378 							OVR_VAL_SPARE_1_MASK);
3379 	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3380 		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3381 		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3382 		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3383 	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3384 
3385 	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3386 					   VSP0_MASK |
3387 					   GPR_MASK);
3388 	reg |= (VSP(ni_pi->cac_weights->vsp) |
3389 		VSP0(ni_pi->cac_weights->vsp0) |
3390 		GPR(ni_pi->cac_weights->gpr));
3391 	WREG32(SQ_CAC_THRESHOLD, reg);
3392 
3393 	reg = (MCDW_WR_ENABLE |
3394 	       MCDX_WR_ENABLE |
3395 	       MCDY_WR_ENABLE |
3396 	       MCDZ_WR_ENABLE |
3397 	       INDEX(0x09D4));
3398 	WREG32(MC_CG_CONFIG, reg);
3399 
3400 	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3401 	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3402 	       ALLOW_OVERFLOW);
3403 	WREG32(MC_CG_DATAPORT, reg);
3404 
3405 	return 0;
3406 }
3407 
3408 static int ni_enable_smc_cac(struct radeon_device *rdev,
3409 			     struct radeon_ps *radeon_new_state,
3410 			     bool enable)
3411 {
3412 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3413 	int ret = 0;
3414 	PPSMC_Result smc_result;
3415 
3416 	if (ni_pi->enable_cac) {
3417 		if (enable) {
3418 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3419 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3420 
3421 				if (ni_pi->support_cac_long_term_average) {
3422 					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3423 					if (PPSMC_Result_OK != smc_result)
3424 						ni_pi->support_cac_long_term_average = false;
3425 				}
3426 
3427 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3428 				if (PPSMC_Result_OK != smc_result)
3429 					ret = -EINVAL;
3430 
3431 				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3432 			}
3433 		} else if (ni_pi->cac_enabled) {
3434 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3435 
3436 			ni_pi->cac_enabled = false;
3437 
3438 			if (ni_pi->support_cac_long_term_average) {
3439 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3440 				if (PPSMC_Result_OK != smc_result)
3441 					ni_pi->support_cac_long_term_average = false;
3442 			}
3443 		}
3444 	}
3445 
3446 	return ret;
3447 }
3448 
3449 static int ni_pcie_performance_request(struct radeon_device *rdev,
3450 				       u8 perf_req, bool advertise)
3451 {
3452 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3453 
3454 #if defined(CONFIG_ACPI)
3455 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3456             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3457 		if (eg_pi->pcie_performance_request_registered == false)
3458 			radeon_acpi_pcie_notify_device_ready(rdev);
3459 		eg_pi->pcie_performance_request_registered = true;
3460 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3461 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3462                    eg_pi->pcie_performance_request_registered) {
3463 		eg_pi->pcie_performance_request_registered = false;
3464 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3465 	}
3466 #endif
3467 	return 0;
3468 }
3469 
3470 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3471 {
3472 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3473 	u32 tmp;
3474 
3475         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3476 
3477         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3478             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3479                 pi->pcie_gen2 = true;
3480         else
3481 		pi->pcie_gen2 = false;
3482 
3483 	if (!pi->pcie_gen2)
3484 		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3485 
3486 	return 0;
3487 }
3488 
3489 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3490 					    bool enable)
3491 {
3492         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3493         u32 tmp, bif;
3494 
3495 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3496 
3497 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3498 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3499 		if (enable) {
3500 			if (!pi->boot_in_gen2) {
3501 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3502 				bif |= CG_CLIENT_REQ(0xd);
3503 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3504 			}
3505 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3506 			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3507 			tmp |= LC_GEN2_EN_STRAP;
3508 
3509 			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3510 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3511 			udelay(10);
3512 			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3513 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3514 		} else {
3515 			if (!pi->boot_in_gen2) {
3516 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3517 				bif |= CG_CLIENT_REQ(0xd);
3518 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3519 
3520 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3521 				tmp &= ~LC_GEN2_EN_STRAP;
3522 			}
3523 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3524 		}
3525 	}
3526 }
3527 
3528 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3529 					bool enable)
3530 {
3531 	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3532 
3533 	if (enable)
3534 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3535 	else
3536                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3537 }
3538 
3539 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3540 					   struct radeon_ps *new_ps,
3541 					   struct radeon_ps *old_ps)
3542 {
3543 	struct ni_ps *new_state = ni_get_ps(new_ps);
3544 	struct ni_ps *current_state = ni_get_ps(old_ps);
3545 
3546 	if ((new_ps->vclk == old_ps->vclk) &&
3547 	    (new_ps->dclk == old_ps->dclk))
3548 		return;
3549 
3550 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3551 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3552 		return;
3553 
3554 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3555 }
3556 
3557 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3558 					  struct radeon_ps *new_ps,
3559 					  struct radeon_ps *old_ps)
3560 {
3561 	struct ni_ps *new_state = ni_get_ps(new_ps);
3562 	struct ni_ps *current_state = ni_get_ps(old_ps);
3563 
3564 	if ((new_ps->vclk == old_ps->vclk) &&
3565 	    (new_ps->dclk == old_ps->dclk))
3566 		return;
3567 
3568 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3569 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3570 		return;
3571 
3572 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3573 }
3574 
3575 void ni_dpm_setup_asic(struct radeon_device *rdev)
3576 {
3577 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3578 
3579 	ni_read_clock_registers(rdev);
3580 	btc_read_arb_registers(rdev);
3581 	rv770_get_memory_type(rdev);
3582 	if (eg_pi->pcie_performance_request)
3583 		ni_advertise_gen2_capability(rdev);
3584 	rv770_get_pcie_gen2_status(rdev);
3585 	rv770_enable_acpi_pm(rdev);
3586 }
3587 
3588 void ni_update_current_ps(struct radeon_device *rdev,
3589 			  struct radeon_ps *rps)
3590 {
3591 	struct ni_ps *new_ps = ni_get_ps(rps);
3592 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3593         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3594 
3595 	eg_pi->current_rps = *rps;
3596 	ni_pi->current_ps = *new_ps;
3597 	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3598 }
3599 
3600 void ni_update_requested_ps(struct radeon_device *rdev,
3601 			    struct radeon_ps *rps)
3602 {
3603 	struct ni_ps *new_ps = ni_get_ps(rps);
3604 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3605         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3606 
3607 	eg_pi->requested_rps = *rps;
3608 	ni_pi->requested_ps = *new_ps;
3609 	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3610 }
3611 
3612 int ni_dpm_enable(struct radeon_device *rdev)
3613 {
3614 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3615 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3616 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3617 	int ret;
3618 
3619 	if (pi->gfx_clock_gating)
3620 		ni_cg_clockgating_default(rdev);
3621         if (btc_dpm_enabled(rdev))
3622                 return -EINVAL;
3623 	if (pi->mg_clock_gating)
3624 		ni_mg_clockgating_default(rdev);
3625 	if (eg_pi->ls_clock_gating)
3626 		ni_ls_clockgating_default(rdev);
3627 	if (pi->voltage_control) {
3628 		rv770_enable_voltage_control(rdev, true);
3629 		ret = cypress_construct_voltage_tables(rdev);
3630 		if (ret) {
3631 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
3632 			return ret;
3633 		}
3634 	}
3635 	if (eg_pi->dynamic_ac_timing) {
3636 		ret = ni_initialize_mc_reg_table(rdev);
3637 		if (ret)
3638 			eg_pi->dynamic_ac_timing = false;
3639 	}
3640 	if (pi->dynamic_ss)
3641 		cypress_enable_spread_spectrum(rdev, true);
3642 	if (pi->thermal_protection)
3643 		rv770_enable_thermal_protection(rdev, true);
3644 	rv770_setup_bsp(rdev);
3645 	rv770_program_git(rdev);
3646 	rv770_program_tp(rdev);
3647 	rv770_program_tpp(rdev);
3648 	rv770_program_sstp(rdev);
3649 	cypress_enable_display_gap(rdev);
3650 	rv770_program_vc(rdev);
3651 	if (pi->dynamic_pcie_gen2)
3652 		ni_enable_dynamic_pcie_gen2(rdev, true);
3653 	ret = rv770_upload_firmware(rdev);
3654 	if (ret) {
3655 		DRM_ERROR("rv770_upload_firmware failed\n");
3656 		return ret;
3657 	}
3658 	ret = ni_process_firmware_header(rdev);
3659 	if (ret) {
3660 		DRM_ERROR("ni_process_firmware_header failed\n");
3661 		return ret;
3662 	}
3663 	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3664 	if (ret) {
3665 		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3666 		return ret;
3667 	}
3668 	ret = ni_init_smc_table(rdev);
3669 	if (ret) {
3670 		DRM_ERROR("ni_init_smc_table failed\n");
3671 		return ret;
3672 	}
3673 	ret = ni_init_smc_spll_table(rdev);
3674 	if (ret) {
3675 		DRM_ERROR("ni_init_smc_spll_table failed\n");
3676 		return ret;
3677 	}
3678 	ret = ni_init_arb_table_index(rdev);
3679 	if (ret) {
3680 		DRM_ERROR("ni_init_arb_table_index failed\n");
3681 		return ret;
3682 	}
3683 	if (eg_pi->dynamic_ac_timing) {
3684 		ret = ni_populate_mc_reg_table(rdev, boot_ps);
3685 		if (ret) {
3686 			DRM_ERROR("ni_populate_mc_reg_table failed\n");
3687 			return ret;
3688 		}
3689 	}
3690 	ret = ni_initialize_smc_cac_tables(rdev);
3691 	if (ret) {
3692 		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3693 		return ret;
3694 	}
3695 	ret = ni_initialize_hardware_cac_manager(rdev);
3696 	if (ret) {
3697 		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3698 		return ret;
3699 	}
3700 	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3701 	if (ret) {
3702 		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3703 		return ret;
3704 	}
3705 	ni_program_response_times(rdev);
3706 	r7xx_start_smc(rdev);
3707 	ret = cypress_notify_smc_display_change(rdev, false);
3708 	if (ret) {
3709 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
3710 		return ret;
3711 	}
3712 	cypress_enable_sclk_control(rdev, true);
3713 	if (eg_pi->memory_transition)
3714 		cypress_enable_mclk_control(rdev, true);
3715 	cypress_start_dpm(rdev);
3716 	if (pi->gfx_clock_gating)
3717 		ni_gfx_clockgating_enable(rdev, true);
3718 	if (pi->mg_clock_gating)
3719 		ni_mg_clockgating_enable(rdev, true);
3720 	if (eg_pi->ls_clock_gating)
3721 		ni_ls_clockgating_enable(rdev, true);
3722 
3723 	if (rdev->irq.installed &&
3724 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3725 		PPSMC_Result result;
3726 
3727 		ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3728 		if (ret)
3729 			return ret;
3730 		rdev->irq.dpm_thermal = true;
3731 		radeon_irq_set(rdev);
3732 		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3733 
3734 		if (result != PPSMC_Result_OK)
3735 			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3736 	}
3737 
3738 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3739 
3740 	ni_update_current_ps(rdev, boot_ps);
3741 
3742 	return 0;
3743 }
3744 
3745 void ni_dpm_disable(struct radeon_device *rdev)
3746 {
3747 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3748 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3749 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3750 
3751 	if (!btc_dpm_enabled(rdev))
3752 		return;
3753 	rv770_clear_vc(rdev);
3754 	if (pi->thermal_protection)
3755 		rv770_enable_thermal_protection(rdev, false);
3756 	ni_enable_power_containment(rdev, boot_ps, false);
3757 	ni_enable_smc_cac(rdev, boot_ps, false);
3758 	cypress_enable_spread_spectrum(rdev, false);
3759 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3760 	if (pi->dynamic_pcie_gen2)
3761 		ni_enable_dynamic_pcie_gen2(rdev, false);
3762 
3763 	if (rdev->irq.installed &&
3764 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3765 		rdev->irq.dpm_thermal = false;
3766 		radeon_irq_set(rdev);
3767 	}
3768 
3769 	if (pi->gfx_clock_gating)
3770 		ni_gfx_clockgating_enable(rdev, false);
3771 	if (pi->mg_clock_gating)
3772 		ni_mg_clockgating_enable(rdev, false);
3773 	if (eg_pi->ls_clock_gating)
3774 		ni_ls_clockgating_enable(rdev, false);
3775 	ni_stop_dpm(rdev);
3776 	btc_reset_to_default(rdev);
3777 	ni_stop_smc(rdev);
3778 	ni_force_switch_to_arb_f0(rdev);
3779 
3780 	ni_update_current_ps(rdev, boot_ps);
3781 }
3782 
3783 static int ni_power_control_set_level(struct radeon_device *rdev)
3784 {
3785 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3786 	int ret;
3787 
3788 	ret = ni_restrict_performance_levels_before_switch(rdev);
3789 	if (ret)
3790 		return ret;
3791 	ret = rv770_halt_smc(rdev);
3792 	if (ret)
3793 		return ret;
3794 	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3795 	if (ret)
3796 		return ret;
3797 	ret = rv770_resume_smc(rdev);
3798 	if (ret)
3799 		return ret;
3800 	ret = rv770_set_sw_state(rdev);
3801 	if (ret)
3802 		return ret;
3803 
3804 	return 0;
3805 }
3806 
3807 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3808 {
3809 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3810 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3811 	struct radeon_ps *new_ps = &requested_ps;
3812 
3813 	ni_update_requested_ps(rdev, new_ps);
3814 
3815 	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3816 
3817 	return 0;
3818 }
3819 
3820 int ni_dpm_set_power_state(struct radeon_device *rdev)
3821 {
3822 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3823 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3824 	struct radeon_ps *old_ps = &eg_pi->current_rps;
3825 	int ret;
3826 
3827 	ret = ni_restrict_performance_levels_before_switch(rdev);
3828 	if (ret) {
3829 		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3830 		return ret;
3831 	}
3832 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3833 	ret = ni_enable_power_containment(rdev, new_ps, false);
3834 	if (ret) {
3835 		DRM_ERROR("ni_enable_power_containment failed\n");
3836 		return ret;
3837 	}
3838 	ret = ni_enable_smc_cac(rdev, new_ps, false);
3839 	if (ret) {
3840 		DRM_ERROR("ni_enable_smc_cac failed\n");
3841 		return ret;
3842 	}
3843 	ret = rv770_halt_smc(rdev);
3844 	if (ret) {
3845 		DRM_ERROR("rv770_halt_smc failed\n");
3846 		return ret;
3847 	}
3848 	if (eg_pi->smu_uvd_hs)
3849 		btc_notify_uvd_to_smc(rdev, new_ps);
3850 	ret = ni_upload_sw_state(rdev, new_ps);
3851 	if (ret) {
3852 		DRM_ERROR("ni_upload_sw_state failed\n");
3853 		return ret;
3854 	}
3855 	if (eg_pi->dynamic_ac_timing) {
3856 		ret = ni_upload_mc_reg_table(rdev, new_ps);
3857 		if (ret) {
3858 			DRM_ERROR("ni_upload_mc_reg_table failed\n");
3859 			return ret;
3860 		}
3861 	}
3862 	ret = ni_program_memory_timing_parameters(rdev, new_ps);
3863 	if (ret) {
3864 		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3865 		return ret;
3866 	}
3867 	ret = rv770_resume_smc(rdev);
3868 	if (ret) {
3869 		DRM_ERROR("rv770_resume_smc failed\n");
3870 		return ret;
3871 	}
3872 	ret = rv770_set_sw_state(rdev);
3873 	if (ret) {
3874 		DRM_ERROR("rv770_set_sw_state failed\n");
3875 		return ret;
3876 	}
3877 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3878 	ret = ni_enable_smc_cac(rdev, new_ps, true);
3879 	if (ret) {
3880 		DRM_ERROR("ni_enable_smc_cac failed\n");
3881 		return ret;
3882 	}
3883 	ret = ni_enable_power_containment(rdev, new_ps, true);
3884 	if (ret) {
3885 		DRM_ERROR("ni_enable_power_containment failed\n");
3886 		return ret;
3887 	}
3888 
3889 	/* update tdp */
3890 	ret = ni_power_control_set_level(rdev);
3891 	if (ret) {
3892 		DRM_ERROR("ni_power_control_set_level failed\n");
3893 		return ret;
3894 	}
3895 
3896 	return 0;
3897 }
3898 
3899 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3900 {
3901 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3902 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3903 
3904 	ni_update_current_ps(rdev, new_ps);
3905 }
3906 
3907 void ni_dpm_reset_asic(struct radeon_device *rdev)
3908 {
3909 	ni_restrict_performance_levels_before_switch(rdev);
3910 	rv770_set_boot_state(rdev);
3911 }
3912 
3913 union power_info {
3914 	struct _ATOM_POWERPLAY_INFO info;
3915 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
3916 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
3917 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3918 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3919 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3920 };
3921 
3922 union pplib_clock_info {
3923 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3924 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3925 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3926 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3927 };
3928 
3929 union pplib_power_state {
3930 	struct _ATOM_PPLIB_STATE v1;
3931 	struct _ATOM_PPLIB_STATE_V2 v2;
3932 };
3933 
3934 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3935 					  struct radeon_ps *rps,
3936 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3937 					  u8 table_rev)
3938 {
3939 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3940 	rps->class = le16_to_cpu(non_clock_info->usClassification);
3941 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3942 
3943 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3944 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3945 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3946 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
3947 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3948 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3949 	} else {
3950 		rps->vclk = 0;
3951 		rps->dclk = 0;
3952 	}
3953 
3954 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3955 		rdev->pm.dpm.boot_ps = rps;
3956 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3957 		rdev->pm.dpm.uvd_ps = rps;
3958 }
3959 
3960 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3961 				      struct radeon_ps *rps, int index,
3962 				      union pplib_clock_info *clock_info)
3963 {
3964 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3965 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3966 	struct ni_ps *ps = ni_get_ps(rps);
3967 	u16 vddc;
3968 	struct rv7xx_pl *pl = &ps->performance_levels[index];
3969 
3970 	ps->performance_level_count = index + 1;
3971 
3972 	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3973 	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3974 	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3975 	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3976 
3977 	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3978 	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3979 	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3980 
3981 	/* patch up vddc if necessary */
3982 	if (pl->vddc == 0xff01) {
3983 		if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3984 			pl->vddc = vddc;
3985 	}
3986 
3987 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3988 		pi->acpi_vddc = pl->vddc;
3989 		eg_pi->acpi_vddci = pl->vddci;
3990 		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3991 			pi->acpi_pcie_gen2 = true;
3992 		else
3993 			pi->acpi_pcie_gen2 = false;
3994 	}
3995 
3996 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3997 		eg_pi->ulv.supported = true;
3998 		eg_pi->ulv.pl = pl;
3999 	}
4000 
4001 	if (pi->min_vddc_in_table > pl->vddc)
4002 		pi->min_vddc_in_table = pl->vddc;
4003 
4004 	if (pi->max_vddc_in_table < pl->vddc)
4005 		pi->max_vddc_in_table = pl->vddc;
4006 
4007 	/* patch up boot state */
4008 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
4009 		u16 vddc, vddci, mvdd;
4010 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
4011 		pl->mclk = rdev->clock.default_mclk;
4012 		pl->sclk = rdev->clock.default_sclk;
4013 		pl->vddc = vddc;
4014 		pl->vddci = vddci;
4015 	}
4016 
4017 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
4018 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
4019 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
4020 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
4021 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
4022 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
4023 	}
4024 }
4025 
4026 static int ni_parse_power_table(struct radeon_device *rdev)
4027 {
4028 	struct radeon_mode_info *mode_info = &rdev->mode_info;
4029 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4030 	union pplib_power_state *power_state;
4031 	int i, j;
4032 	union pplib_clock_info *clock_info;
4033 	union power_info *power_info;
4034 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4035         u16 data_offset;
4036 	u8 frev, crev;
4037 	struct ni_ps *ps;
4038 
4039 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4040 				   &frev, &crev, &data_offset))
4041 		return -EINVAL;
4042 	power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
4043 
4044 	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4045 				  power_info->pplib.ucNumStates, GFP_KERNEL);
4046 	if (!rdev->pm.dpm.ps)
4047 		return -ENOMEM;
4048 	rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
4049 	rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
4050 	rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
4051 
4052 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4053 		power_state = (union pplib_power_state *)
4054 			((uint8_t*)mode_info->atom_context->bios + data_offset +
4055 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4056 			 i * power_info->pplib.ucStateEntrySize);
4057 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4058 			((uint8_t*)mode_info->atom_context->bios + data_offset +
4059 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4060 			 (power_state->v1.ucNonClockStateIndex *
4061 			  power_info->pplib.ucNonClockSize));
4062 		if (power_info->pplib.ucStateEntrySize - 1) {
4063 			u8 *idx;
4064 			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4065 			if (ps == NULL) {
4066 				kfree(rdev->pm.dpm.ps);
4067 				return -ENOMEM;
4068 			}
4069 			rdev->pm.dpm.ps[i].ps_priv = ps;
4070 			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4071 							 non_clock_info,
4072 							 power_info->pplib.ucNonClockSize);
4073 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4074 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4075 				clock_info = (union pplib_clock_info *)
4076 					((uint8_t*)mode_info->atom_context->bios + data_offset +
4077 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4078 					 (idx[j] * power_info->pplib.ucClockInfoSize));
4079 				ni_parse_pplib_clock_info(rdev,
4080 							  &rdev->pm.dpm.ps[i], j,
4081 							  clock_info);
4082 			}
4083 		}
4084 	}
4085 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4086 	return 0;
4087 }
4088 
4089 int ni_dpm_init(struct radeon_device *rdev)
4090 {
4091 	struct rv7xx_power_info *pi;
4092 	struct evergreen_power_info *eg_pi;
4093 	struct ni_power_info *ni_pi;
4094 	struct atom_clock_dividers dividers;
4095 	int ret;
4096 
4097 	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4098 	if (ni_pi == NULL)
4099 		return -ENOMEM;
4100 	rdev->pm.dpm.priv = ni_pi;
4101 	eg_pi = &ni_pi->eg;
4102 	pi = &eg_pi->rv7xx;
4103 
4104 	rv770_get_max_vddc(rdev);
4105 
4106 	eg_pi->ulv.supported = false;
4107 	pi->acpi_vddc = 0;
4108 	eg_pi->acpi_vddci = 0;
4109 	pi->min_vddc_in_table = 0;
4110 	pi->max_vddc_in_table = 0;
4111 
4112 	ret = ni_parse_power_table(rdev);
4113 	if (ret)
4114 		return ret;
4115 	ret = r600_parse_extended_power_table(rdev);
4116 	if (ret)
4117 		return ret;
4118 
4119 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4120 		kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4121 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4122 		r600_free_extended_power_table(rdev);
4123 		return -ENOMEM;
4124 	}
4125 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4126 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4127 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4128 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4129 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4130 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4131 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4132 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4133 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4134 
4135 	ni_patch_dependency_tables_based_on_leakage(rdev);
4136 
4137 	if (rdev->pm.dpm.voltage_response_time == 0)
4138 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4139 	if (rdev->pm.dpm.backbias_response_time == 0)
4140 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4141 
4142 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4143 					     0, false, &dividers);
4144 	if (ret)
4145 		pi->ref_div = dividers.ref_div + 1;
4146 	else
4147 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4148 
4149 	pi->rlp = RV770_RLP_DFLT;
4150 	pi->rmp = RV770_RMP_DFLT;
4151 	pi->lhp = RV770_LHP_DFLT;
4152 	pi->lmp = RV770_LMP_DFLT;
4153 
4154 	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4155 	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4156 	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4157 	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4158 
4159 	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4160 	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4161 	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4162 	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4163 
4164 	eg_pi->smu_uvd_hs = true;
4165 
4166 	if (rdev->ddev->pci_device == 0x6707) {
4167 		pi->mclk_strobe_mode_threshold = 55000;
4168 		pi->mclk_edc_enable_threshold = 55000;
4169 		eg_pi->mclk_edc_wr_enable_threshold = 55000;
4170 	} else {
4171 		pi->mclk_strobe_mode_threshold = 40000;
4172 		pi->mclk_edc_enable_threshold = 40000;
4173 		eg_pi->mclk_edc_wr_enable_threshold = 40000;
4174 	}
4175 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4176 
4177 	pi->voltage_control =
4178 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4179 
4180 	pi->mvdd_control =
4181 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4182 
4183 	eg_pi->vddci_control =
4184 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4185 
4186 	rv770_get_engine_memory_ss(rdev);
4187 
4188 	pi->asi = RV770_ASI_DFLT;
4189 	pi->pasi = CYPRESS_HASI_DFLT;
4190 	pi->vrc = CYPRESS_VRC_DFLT;
4191 
4192 	pi->power_gating = false;
4193 
4194 	pi->gfx_clock_gating = true;
4195 
4196 	pi->mg_clock_gating = true;
4197 	pi->mgcgtssm = true;
4198 	eg_pi->ls_clock_gating = false;
4199 	eg_pi->sclk_deep_sleep = false;
4200 
4201 	pi->dynamic_pcie_gen2 = true;
4202 
4203 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4204 		pi->thermal_protection = true;
4205 	else
4206 		pi->thermal_protection = false;
4207 
4208 	pi->display_gap = true;
4209 
4210 	pi->dcodt = true;
4211 
4212 	pi->ulps = true;
4213 
4214 	eg_pi->dynamic_ac_timing = true;
4215 	eg_pi->abm = true;
4216 	eg_pi->mcls = true;
4217 	eg_pi->light_sleep = true;
4218 	eg_pi->memory_transition = true;
4219 #if defined(CONFIG_ACPI)
4220 	eg_pi->pcie_performance_request =
4221 		radeon_acpi_is_pcie_performance_request_supported(rdev);
4222 #else
4223 	eg_pi->pcie_performance_request = false;
4224 #endif
4225 
4226 	eg_pi->dll_default_on = false;
4227 
4228 	eg_pi->sclk_deep_sleep = false;
4229 
4230 	pi->mclk_stutter_mode_threshold = 0;
4231 
4232 	pi->sram_end = SMC_RAM_END;
4233 
4234 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4235 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4236 	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4237 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4238 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4239 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4240 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4241 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4242 
4243 	ni_pi->cac_data.leakage_coefficients.at = 516;
4244 	ni_pi->cac_data.leakage_coefficients.bt = 18;
4245 	ni_pi->cac_data.leakage_coefficients.av = 51;
4246 	ni_pi->cac_data.leakage_coefficients.bv = 2957;
4247 
4248 	switch (rdev->ddev->pci_device) {
4249 	case 0x6700:
4250 	case 0x6701:
4251 	case 0x6702:
4252 	case 0x6703:
4253 	case 0x6718:
4254 		ni_pi->cac_weights = &cac_weights_cayman_xt;
4255 		break;
4256 	case 0x6705:
4257 	case 0x6719:
4258 	case 0x671D:
4259 	case 0x671C:
4260 	default:
4261 		ni_pi->cac_weights = &cac_weights_cayman_pro;
4262 		break;
4263 	case 0x6704:
4264 	case 0x6706:
4265 	case 0x6707:
4266 	case 0x6708:
4267 	case 0x6709:
4268 		ni_pi->cac_weights = &cac_weights_cayman_le;
4269 		break;
4270 	}
4271 
4272 	if (ni_pi->cac_weights->enable_power_containment_by_default) {
4273 		ni_pi->enable_power_containment = true;
4274 		ni_pi->enable_cac = true;
4275 		ni_pi->enable_sq_ramping = true;
4276 	} else {
4277 		ni_pi->enable_power_containment = false;
4278 		ni_pi->enable_cac = false;
4279 		ni_pi->enable_sq_ramping = false;
4280 	}
4281 
4282 	ni_pi->driver_calculate_cac_leakage = false;
4283 	ni_pi->cac_configuration_required = true;
4284 
4285 	if (ni_pi->cac_configuration_required) {
4286 		ni_pi->support_cac_long_term_average = true;
4287 		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4288 		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4289 	} else {
4290 		ni_pi->support_cac_long_term_average = false;
4291 		ni_pi->lta_window_size = 0;
4292 		ni_pi->lts_truncate = 0;
4293 	}
4294 
4295 	ni_pi->use_power_boost_limit = true;
4296 
4297 	/* make sure dc limits are valid */
4298 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4299 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4300 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4301 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4302 
4303 	return 0;
4304 }
4305 
4306 void ni_dpm_fini(struct radeon_device *rdev)
4307 {
4308 	int i;
4309 
4310 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4311 		kfree(rdev->pm.dpm.ps[i].ps_priv);
4312 	}
4313 	kfree(rdev->pm.dpm.ps);
4314 	kfree(rdev->pm.dpm.priv);
4315 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4316 	r600_free_extended_power_table(rdev);
4317 }
4318 
4319 void ni_dpm_print_power_state(struct radeon_device *rdev,
4320 			      struct radeon_ps *rps)
4321 {
4322 	struct ni_ps *ps = ni_get_ps(rps);
4323 	struct rv7xx_pl *pl;
4324 	int i;
4325 
4326 	r600_dpm_print_class_info(rps->class, rps->class2);
4327 	r600_dpm_print_cap_info(rps->caps);
4328 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4329 	for (i = 0; i < ps->performance_level_count; i++) {
4330 		pl = &ps->performance_levels[i];
4331 		if (rdev->family >= CHIP_TAHITI)
4332 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4333 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4334 		else
4335 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4336 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4337 	}
4338 	r600_dpm_print_ps_status(rdev, rps);
4339 }
4340 
4341 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4342 						    struct seq_file *m)
4343 {
4344 	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4345 	struct ni_ps *ps = ni_get_ps(rps);
4346 	struct rv7xx_pl *pl;
4347 	u32 current_index =
4348 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4349 		CURRENT_STATE_INDEX_SHIFT;
4350 
4351 	if (current_index >= ps->performance_level_count) {
4352 		seq_printf(m, "invalid dpm profile %d\n", current_index);
4353 	} else {
4354 		pl = &ps->performance_levels[current_index];
4355 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4356 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4357 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4358 	}
4359 }
4360 
4361 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4362 {
4363 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4364 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4365 
4366 	if (low)
4367 		return requested_state->performance_levels[0].sclk;
4368 	else
4369 		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4370 }
4371 
4372 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4373 {
4374 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4375 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4376 
4377 	if (low)
4378 		return requested_state->performance_levels[0].mclk;
4379 	else
4380 		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4381 }
4382