1 /* $NetBSD: amdgpu_dce_virtual.c,v 1.2 2021/12/18 23:44:58 riastradh Exp $ */
2
3 /*
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 */
25
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: amdgpu_dce_virtual.c,v 1.2 2021/12/18 23:44:58 riastradh Exp $");
28
29 #include <drm/drm_vblank.h>
30
31 #include "amdgpu.h"
32 #include "amdgpu_pm.h"
33 #include "amdgpu_i2c.h"
34 #include "atom.h"
35 #include "amdgpu_pll.h"
36 #include "amdgpu_connectors.h"
37 #ifdef CONFIG_DRM_AMDGPU_SI
38 #include "dce_v6_0.h"
39 #endif
40 #ifdef CONFIG_DRM_AMDGPU_CIK
41 #include "dce_v8_0.h"
42 #endif
43 #include "dce_v10_0.h"
44 #include "dce_v11_0.h"
45 #include "dce_virtual.h"
46 #include "ivsrcid/ivsrcid_vislands30.h"
47
48 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
49
50
51 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
52 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
53 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
54 int index);
55 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
56 int crtc,
57 enum amdgpu_interrupt_state state);
58
dce_virtual_vblank_get_counter(struct amdgpu_device * adev,int crtc)59 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
60 {
61 return 0;
62 }
63
dce_virtual_page_flip(struct amdgpu_device * adev,int crtc_id,u64 crtc_base,bool async)64 static void dce_virtual_page_flip(struct amdgpu_device *adev,
65 int crtc_id, u64 crtc_base, bool async)
66 {
67 return;
68 }
69
dce_virtual_crtc_get_scanoutpos(struct amdgpu_device * adev,int crtc,u32 * vbl,u32 * position)70 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
71 u32 *vbl, u32 *position)
72 {
73 *vbl = 0;
74 *position = 0;
75
76 return -EINVAL;
77 }
78
dce_virtual_hpd_sense(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)79 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
80 enum amdgpu_hpd_id hpd)
81 {
82 return true;
83 }
84
dce_virtual_hpd_set_polarity(struct amdgpu_device * adev,enum amdgpu_hpd_id hpd)85 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
86 enum amdgpu_hpd_id hpd)
87 {
88 return;
89 }
90
dce_virtual_hpd_get_gpio_reg(struct amdgpu_device * adev)91 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
92 {
93 return 0;
94 }
95
96 /**
97 * dce_virtual_bandwidth_update - program display watermarks
98 *
99 * @adev: amdgpu_device pointer
100 *
101 * Calculate and program the display watermarks and line
102 * buffer allocation (CIK).
103 */
dce_virtual_bandwidth_update(struct amdgpu_device * adev)104 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
105 {
106 return;
107 }
108
dce_virtual_crtc_gamma_set(struct drm_crtc * crtc,u16 * red,u16 * green,u16 * blue,uint32_t size,struct drm_modeset_acquire_ctx * ctx)109 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
110 u16 *green, u16 *blue, uint32_t size,
111 struct drm_modeset_acquire_ctx *ctx)
112 {
113 return 0;
114 }
115
dce_virtual_crtc_destroy(struct drm_crtc * crtc)116 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
117 {
118 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
119
120 drm_crtc_cleanup(crtc);
121 kfree(amdgpu_crtc);
122 }
123
124 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
125 .cursor_set2 = NULL,
126 .cursor_move = NULL,
127 .gamma_set = dce_virtual_crtc_gamma_set,
128 .set_config = amdgpu_display_crtc_set_config,
129 .destroy = dce_virtual_crtc_destroy,
130 .page_flip_target = amdgpu_display_crtc_page_flip_target,
131 };
132
dce_virtual_crtc_dpms(struct drm_crtc * crtc,int mode)133 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
134 {
135 struct drm_device *dev = crtc->dev;
136 struct amdgpu_device *adev = dev->dev_private;
137 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
138 unsigned type;
139
140 if (amdgpu_sriov_vf(adev))
141 return;
142
143 switch (mode) {
144 case DRM_MODE_DPMS_ON:
145 amdgpu_crtc->enabled = true;
146 /* Make sure VBLANK interrupts are still enabled */
147 type = amdgpu_display_crtc_idx_to_irq_type(adev,
148 amdgpu_crtc->crtc_id);
149 amdgpu_irq_update(adev, &adev->crtc_irq, type);
150 drm_crtc_vblank_on(crtc);
151 break;
152 case DRM_MODE_DPMS_STANDBY:
153 case DRM_MODE_DPMS_SUSPEND:
154 case DRM_MODE_DPMS_OFF:
155 drm_crtc_vblank_off(crtc);
156 amdgpu_crtc->enabled = false;
157 break;
158 }
159 }
160
161
dce_virtual_crtc_prepare(struct drm_crtc * crtc)162 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
163 {
164 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
165 }
166
dce_virtual_crtc_commit(struct drm_crtc * crtc)167 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
168 {
169 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
170 }
171
dce_virtual_crtc_disable(struct drm_crtc * crtc)172 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
173 {
174 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
175
176 dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
177
178 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
179 amdgpu_crtc->encoder = NULL;
180 amdgpu_crtc->connector = NULL;
181 }
182
dce_virtual_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode,int x,int y,struct drm_framebuffer * old_fb)183 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
184 struct drm_display_mode *mode,
185 struct drm_display_mode *adjusted_mode,
186 int x, int y, struct drm_framebuffer *old_fb)
187 {
188 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
189
190 /* update the hw version fpr dpm */
191 amdgpu_crtc->hw_mode = *adjusted_mode;
192
193 return 0;
194 }
195
dce_virtual_crtc_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)196 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
197 const struct drm_display_mode *mode,
198 struct drm_display_mode *adjusted_mode)
199 {
200 return true;
201 }
202
203
dce_virtual_crtc_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)204 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
205 struct drm_framebuffer *old_fb)
206 {
207 return 0;
208 }
209
dce_virtual_crtc_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)210 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
211 struct drm_framebuffer *fb,
212 int x, int y, enum mode_set_atomic state)
213 {
214 return 0;
215 }
216
217 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
218 .dpms = dce_virtual_crtc_dpms,
219 .mode_fixup = dce_virtual_crtc_mode_fixup,
220 .mode_set = dce_virtual_crtc_mode_set,
221 .mode_set_base = dce_virtual_crtc_set_base,
222 .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
223 .prepare = dce_virtual_crtc_prepare,
224 .commit = dce_virtual_crtc_commit,
225 .disable = dce_virtual_crtc_disable,
226 };
227
dce_virtual_crtc_init(struct amdgpu_device * adev,int index)228 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
229 {
230 struct amdgpu_crtc *amdgpu_crtc;
231
232 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
233 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
234 if (amdgpu_crtc == NULL)
235 return -ENOMEM;
236
237 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
238
239 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
240 amdgpu_crtc->crtc_id = index;
241 adev->mode_info.crtcs[index] = amdgpu_crtc;
242
243 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
244 amdgpu_crtc->encoder = NULL;
245 amdgpu_crtc->connector = NULL;
246 amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
247 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
248
249 return 0;
250 }
251
dce_virtual_early_init(void * handle)252 static int dce_virtual_early_init(void *handle)
253 {
254 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
255
256 dce_virtual_set_display_funcs(adev);
257 dce_virtual_set_irq_funcs(adev);
258
259 adev->mode_info.num_hpd = 1;
260 adev->mode_info.num_dig = 1;
261 return 0;
262 }
263
264 static struct drm_encoder *
dce_virtual_encoder(struct drm_connector * connector)265 dce_virtual_encoder(struct drm_connector *connector)
266 {
267 struct drm_encoder *encoder;
268
269 drm_connector_for_each_possible_encoder(connector, encoder) {
270 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
271 return encoder;
272 }
273
274 /* pick the first one */
275 drm_connector_for_each_possible_encoder(connector, encoder)
276 return encoder;
277
278 return NULL;
279 }
280
dce_virtual_get_modes(struct drm_connector * connector)281 static int dce_virtual_get_modes(struct drm_connector *connector)
282 {
283 struct drm_device *dev = connector->dev;
284 struct drm_display_mode *mode = NULL;
285 unsigned i;
286 static const struct mode_size {
287 int w;
288 int h;
289 } common_modes[17] = {
290 { 640, 480},
291 { 720, 480},
292 { 800, 600},
293 { 848, 480},
294 {1024, 768},
295 {1152, 768},
296 {1280, 720},
297 {1280, 800},
298 {1280, 854},
299 {1280, 960},
300 {1280, 1024},
301 {1440, 900},
302 {1400, 1050},
303 {1680, 1050},
304 {1600, 1200},
305 {1920, 1080},
306 {1920, 1200}
307 };
308
309 for (i = 0; i < 17; i++) {
310 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
311 drm_mode_probed_add(connector, mode);
312 }
313
314 return 0;
315 }
316
dce_virtual_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)317 static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector,
318 struct drm_display_mode *mode)
319 {
320 return MODE_OK;
321 }
322
323 static int
dce_virtual_dpms(struct drm_connector * connector,int mode)324 dce_virtual_dpms(struct drm_connector *connector, int mode)
325 {
326 return 0;
327 }
328
329 static int
dce_virtual_set_property(struct drm_connector * connector,struct drm_property * property,uint64_t val)330 dce_virtual_set_property(struct drm_connector *connector,
331 struct drm_property *property,
332 uint64_t val)
333 {
334 return 0;
335 }
336
dce_virtual_destroy(struct drm_connector * connector)337 static void dce_virtual_destroy(struct drm_connector *connector)
338 {
339 drm_connector_unregister(connector);
340 drm_connector_cleanup(connector);
341 kfree(connector);
342 }
343
dce_virtual_force(struct drm_connector * connector)344 static void dce_virtual_force(struct drm_connector *connector)
345 {
346 return;
347 }
348
349 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
350 .get_modes = dce_virtual_get_modes,
351 .mode_valid = dce_virtual_mode_valid,
352 .best_encoder = dce_virtual_encoder,
353 };
354
355 static const struct drm_connector_funcs dce_virtual_connector_funcs = {
356 .dpms = dce_virtual_dpms,
357 .fill_modes = drm_helper_probe_single_connector_modes,
358 .set_property = dce_virtual_set_property,
359 .destroy = dce_virtual_destroy,
360 .force = dce_virtual_force,
361 };
362
dce_virtual_sw_init(void * handle)363 static int dce_virtual_sw_init(void *handle)
364 {
365 int r, i;
366 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
367
368 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq);
369 if (r)
370 return r;
371
372 adev->ddev->max_vblank_count = 0;
373
374 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
375
376 adev->ddev->mode_config.max_width = 16384;
377 adev->ddev->mode_config.max_height = 16384;
378
379 adev->ddev->mode_config.preferred_depth = 24;
380 adev->ddev->mode_config.prefer_shadow = 1;
381
382 adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
383
384 r = amdgpu_display_modeset_create_props(adev);
385 if (r)
386 return r;
387
388 adev->ddev->mode_config.max_width = 16384;
389 adev->ddev->mode_config.max_height = 16384;
390
391 /* allocate crtcs, encoders, connectors */
392 for (i = 0; i < adev->mode_info.num_crtc; i++) {
393 r = dce_virtual_crtc_init(adev, i);
394 if (r)
395 return r;
396 r = dce_virtual_connector_encoder_init(adev, i);
397 if (r)
398 return r;
399 }
400
401 drm_kms_helper_poll_init(adev->ddev);
402
403 adev->mode_info.mode_config_initialized = true;
404 return 0;
405 }
406
dce_virtual_sw_fini(void * handle)407 static int dce_virtual_sw_fini(void *handle)
408 {
409 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
410
411 kfree(adev->mode_info.bios_hardcoded_edid);
412
413 drm_kms_helper_poll_fini(adev->ddev);
414
415 drm_mode_config_cleanup(adev->ddev);
416 /* clear crtcs pointer to avoid dce irq finish routine access freed data */
417 memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
418 adev->mode_info.mode_config_initialized = false;
419 return 0;
420 }
421
dce_virtual_hw_init(void * handle)422 static int dce_virtual_hw_init(void *handle)
423 {
424 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
425
426 switch (adev->asic_type) {
427 #ifdef CONFIG_DRM_AMDGPU_SI
428 case CHIP_TAHITI:
429 case CHIP_PITCAIRN:
430 case CHIP_VERDE:
431 case CHIP_OLAND:
432 dce_v6_0_disable_dce(adev);
433 break;
434 #endif
435 #ifdef CONFIG_DRM_AMDGPU_CIK
436 case CHIP_BONAIRE:
437 case CHIP_HAWAII:
438 case CHIP_KAVERI:
439 case CHIP_KABINI:
440 case CHIP_MULLINS:
441 dce_v8_0_disable_dce(adev);
442 break;
443 #endif
444 case CHIP_FIJI:
445 case CHIP_TONGA:
446 dce_v10_0_disable_dce(adev);
447 break;
448 case CHIP_CARRIZO:
449 case CHIP_STONEY:
450 case CHIP_POLARIS10:
451 case CHIP_POLARIS11:
452 case CHIP_VEGAM:
453 dce_v11_0_disable_dce(adev);
454 break;
455 case CHIP_TOPAZ:
456 #ifdef CONFIG_DRM_AMDGPU_SI
457 case CHIP_HAINAN:
458 #endif
459 /* no DCE */
460 break;
461 default:
462 break;
463 }
464 return 0;
465 }
466
dce_virtual_hw_fini(void * handle)467 static int dce_virtual_hw_fini(void *handle)
468 {
469 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
470 int i = 0;
471
472 for (i = 0; i<adev->mode_info.num_crtc; i++)
473 if (adev->mode_info.crtcs[i])
474 dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
475
476 return 0;
477 }
478
dce_virtual_suspend(void * handle)479 static int dce_virtual_suspend(void *handle)
480 {
481 return dce_virtual_hw_fini(handle);
482 }
483
dce_virtual_resume(void * handle)484 static int dce_virtual_resume(void *handle)
485 {
486 return dce_virtual_hw_init(handle);
487 }
488
dce_virtual_is_idle(void * handle)489 static bool dce_virtual_is_idle(void *handle)
490 {
491 return true;
492 }
493
dce_virtual_wait_for_idle(void * handle)494 static int dce_virtual_wait_for_idle(void *handle)
495 {
496 return 0;
497 }
498
dce_virtual_soft_reset(void * handle)499 static int dce_virtual_soft_reset(void *handle)
500 {
501 return 0;
502 }
503
dce_virtual_set_clockgating_state(void * handle,enum amd_clockgating_state state)504 static int dce_virtual_set_clockgating_state(void *handle,
505 enum amd_clockgating_state state)
506 {
507 return 0;
508 }
509
dce_virtual_set_powergating_state(void * handle,enum amd_powergating_state state)510 static int dce_virtual_set_powergating_state(void *handle,
511 enum amd_powergating_state state)
512 {
513 return 0;
514 }
515
516 static const struct amd_ip_funcs dce_virtual_ip_funcs = {
517 .name = "dce_virtual",
518 .early_init = dce_virtual_early_init,
519 .late_init = NULL,
520 .sw_init = dce_virtual_sw_init,
521 .sw_fini = dce_virtual_sw_fini,
522 .hw_init = dce_virtual_hw_init,
523 .hw_fini = dce_virtual_hw_fini,
524 .suspend = dce_virtual_suspend,
525 .resume = dce_virtual_resume,
526 .is_idle = dce_virtual_is_idle,
527 .wait_for_idle = dce_virtual_wait_for_idle,
528 .soft_reset = dce_virtual_soft_reset,
529 .set_clockgating_state = dce_virtual_set_clockgating_state,
530 .set_powergating_state = dce_virtual_set_powergating_state,
531 };
532
533 /* these are handled by the primary encoders */
dce_virtual_encoder_prepare(struct drm_encoder * encoder)534 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
535 {
536 return;
537 }
538
dce_virtual_encoder_commit(struct drm_encoder * encoder)539 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
540 {
541 return;
542 }
543
544 static void
dce_virtual_encoder_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)545 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
546 struct drm_display_mode *mode,
547 struct drm_display_mode *adjusted_mode)
548 {
549 return;
550 }
551
dce_virtual_encoder_disable(struct drm_encoder * encoder)552 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
553 {
554 return;
555 }
556
557 static void
dce_virtual_encoder_dpms(struct drm_encoder * encoder,int mode)558 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
559 {
560 return;
561 }
562
dce_virtual_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)563 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
564 const struct drm_display_mode *mode,
565 struct drm_display_mode *adjusted_mode)
566 {
567 return true;
568 }
569
570 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
571 .dpms = dce_virtual_encoder_dpms,
572 .mode_fixup = dce_virtual_encoder_mode_fixup,
573 .prepare = dce_virtual_encoder_prepare,
574 .mode_set = dce_virtual_encoder_mode_set,
575 .commit = dce_virtual_encoder_commit,
576 .disable = dce_virtual_encoder_disable,
577 };
578
dce_virtual_encoder_destroy(struct drm_encoder * encoder)579 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
580 {
581 drm_encoder_cleanup(encoder);
582 kfree(encoder);
583 }
584
585 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
586 .destroy = dce_virtual_encoder_destroy,
587 };
588
dce_virtual_connector_encoder_init(struct amdgpu_device * adev,int index)589 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
590 int index)
591 {
592 struct drm_encoder *encoder;
593 struct drm_connector *connector;
594
595 /* add a new encoder */
596 encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
597 if (!encoder)
598 return -ENOMEM;
599 encoder->possible_crtcs = 1 << index;
600 drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
601 DRM_MODE_ENCODER_VIRTUAL, NULL);
602 drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
603
604 connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
605 if (!connector) {
606 kfree(encoder);
607 return -ENOMEM;
608 }
609
610 /* add a new connector */
611 drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
612 DRM_MODE_CONNECTOR_VIRTUAL);
613 drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
614 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
615 connector->interlace_allowed = false;
616 connector->doublescan_allowed = false;
617 drm_connector_register(connector);
618
619 /* link them */
620 drm_connector_attach_encoder(connector, encoder);
621
622 return 0;
623 }
624
625 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
626 .bandwidth_update = &dce_virtual_bandwidth_update,
627 .vblank_get_counter = &dce_virtual_vblank_get_counter,
628 .backlight_set_level = NULL,
629 .backlight_get_level = NULL,
630 .hpd_sense = &dce_virtual_hpd_sense,
631 .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
632 .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
633 .page_flip = &dce_virtual_page_flip,
634 .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
635 .add_encoder = NULL,
636 .add_connector = NULL,
637 };
638
dce_virtual_set_display_funcs(struct amdgpu_device * adev)639 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
640 {
641 adev->mode_info.funcs = &dce_virtual_display_funcs;
642 }
643
dce_virtual_pageflip(struct amdgpu_device * adev,unsigned crtc_id)644 static int dce_virtual_pageflip(struct amdgpu_device *adev,
645 unsigned crtc_id)
646 {
647 unsigned long flags;
648 struct amdgpu_crtc *amdgpu_crtc;
649 struct amdgpu_flip_work *works;
650
651 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
652
653 if (crtc_id >= adev->mode_info.num_crtc) {
654 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
655 return -EINVAL;
656 }
657
658 /* IRQ could occur when in initial stage */
659 if (amdgpu_crtc == NULL)
660 return 0;
661
662 spin_lock_irqsave(&adev->ddev->event_lock, flags);
663 works = amdgpu_crtc->pflip_works;
664 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
665 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
666 "AMDGPU_FLIP_SUBMITTED(%d)\n",
667 amdgpu_crtc->pflip_status,
668 AMDGPU_FLIP_SUBMITTED);
669 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
670 return 0;
671 }
672
673 /* page flip completed. clean up */
674 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
675 amdgpu_crtc->pflip_works = NULL;
676
677 /* wakeup usersapce */
678 if (works->event)
679 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
680
681 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
682
683 drm_crtc_vblank_put(&amdgpu_crtc->base);
684 amdgpu_bo_unref(&works->old_abo);
685 kfree(works->shared);
686 kfree(works);
687
688 return 0;
689 }
690
dce_virtual_vblank_timer_handle(struct hrtimer * vblank_timer)691 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
692 {
693 struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
694 struct amdgpu_crtc, vblank_timer);
695 struct drm_device *ddev = amdgpu_crtc->base.dev;
696 struct amdgpu_device *adev = ddev->dev_private;
697
698 drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
699 dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
700 hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
701 HRTIMER_MODE_REL);
702
703 return HRTIMER_NORESTART;
704 }
705
dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device * adev,int crtc,enum amdgpu_interrupt_state state)706 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
707 int crtc,
708 enum amdgpu_interrupt_state state)
709 {
710 if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
711 DRM_DEBUG("invalid crtc %d\n", crtc);
712 return;
713 }
714
715 if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
716 DRM_DEBUG("Enable software vsync timer\n");
717 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
718 CLOCK_MONOTONIC, HRTIMER_MODE_REL);
719 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
720 DCE_VIRTUAL_VBLANK_PERIOD);
721 adev->mode_info.crtcs[crtc]->vblank_timer.function =
722 dce_virtual_vblank_timer_handle;
723 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
724 DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
725 } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
726 DRM_DEBUG("Disable software vsync timer\n");
727 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
728 }
729
730 adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
731 DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
732 }
733
734
dce_virtual_set_crtc_irq_state(struct amdgpu_device * adev,struct amdgpu_irq_src * source,unsigned type,enum amdgpu_interrupt_state state)735 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
736 struct amdgpu_irq_src *source,
737 unsigned type,
738 enum amdgpu_interrupt_state state)
739 {
740 if (type > AMDGPU_CRTC_IRQ_VBLANK6)
741 return -EINVAL;
742
743 dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
744
745 return 0;
746 }
747
748 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
749 .set = dce_virtual_set_crtc_irq_state,
750 .process = NULL,
751 };
752
dce_virtual_set_irq_funcs(struct amdgpu_device * adev)753 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
754 {
755 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
756 adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
757 }
758
759 const struct amdgpu_ip_block_version dce_virtual_ip_block =
760 {
761 .type = AMD_IP_BLOCK_TYPE_DCE,
762 .major = 1,
763 .minor = 0,
764 .rev = 0,
765 .funcs = &dce_virtual_ip_funcs,
766 };
767