1 /* $NetBSD: mga_state.c,v 1.3 2021/12/18 23:45:32 riastradh Exp $ */
2
3 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
4 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
5 *
6 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
7 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
8 * All Rights Reserved.
9 *
10 * Permission is hereby granted, free of charge, to any person obtaining a
11 * copy of this software and associated documentation files (the "Software"),
12 * to deal in the Software without restriction, including without limitation
13 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
14 * and/or sell copies of the Software, and to permit persons to whom the
15 * Software is furnished to do so, subject to the following conditions:
16 *
17 * The above copyright notice and this permission notice (including the next
18 * paragraph) shall be included in all copies or substantial portions of the
19 * Software.
20 *
21 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
24 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
25 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
26 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
27 * OTHER DEALINGS IN THE SOFTWARE.
28 *
29 * Authors:
30 * Jeff Hartmann <jhartmann@valinux.com>
31 * Keith Whitwell <keith@tungstengraphics.com>
32 *
33 * Rewritten by:
34 * Gareth Hughes <gareth@valinux.com>
35 */
36
37 #include <sys/cdefs.h>
38 __KERNEL_RCSID(0, "$NetBSD: mga_state.c,v 1.3 2021/12/18 23:45:32 riastradh Exp $");
39
40 #include "mga_drv.h"
41
42 /* ================================================================
43 * DMA hardware state programming functions
44 */
45
mga_emit_clip_rect(drm_mga_private_t * dev_priv,struct drm_clip_rect * box)46 static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
47 struct drm_clip_rect *box)
48 {
49 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
50 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
51 unsigned int pitch = dev_priv->front_pitch;
52 DMA_LOCALS;
53
54 BEGIN_DMA(2);
55
56 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
57 */
58 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
59 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
60 MGA_LEN + MGA_EXEC, 0x80000000,
61 MGA_DWGCTL, ctx->dwgctl,
62 MGA_LEN + MGA_EXEC, 0x80000000);
63 }
64 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
65 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
66 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
67
68 ADVANCE_DMA();
69 }
70
mga_g200_emit_context(drm_mga_private_t * dev_priv)71 static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
72 {
73 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
74 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
75 DMA_LOCALS;
76
77 BEGIN_DMA(3);
78
79 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
80 MGA_MACCESS, ctx->maccess,
81 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
82
83 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
84 MGA_FOGCOL, ctx->fogcolor,
85 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
86
87 DMA_BLOCK(MGA_FCOL, ctx->fcol,
88 MGA_DMAPAD, 0x00000000,
89 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
90
91 ADVANCE_DMA();
92 }
93
mga_g400_emit_context(drm_mga_private_t * dev_priv)94 static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
95 {
96 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
97 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
98 DMA_LOCALS;
99
100 BEGIN_DMA(4);
101
102 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
103 MGA_MACCESS, ctx->maccess,
104 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
105
106 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
107 MGA_FOGCOL, ctx->fogcolor,
108 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
109
110 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
111 MGA_TDUALSTAGE0, ctx->tdualstage0,
112 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
113
114 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
115 MGA_STENCILCTL, ctx->stencilctl,
116 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
117
118 ADVANCE_DMA();
119 }
120
mga_g200_emit_tex0(drm_mga_private_t * dev_priv)121 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
122 {
123 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
124 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
125 DMA_LOCALS;
126
127 BEGIN_DMA(4);
128
129 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
130 MGA_TEXCTL, tex->texctl,
131 MGA_TEXFILTER, tex->texfilter,
132 MGA_TEXBORDERCOL, tex->texbordercol);
133
134 DMA_BLOCK(MGA_TEXORG, tex->texorg,
135 MGA_TEXORG1, tex->texorg1,
136 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
137
138 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
139 MGA_TEXWIDTH, tex->texwidth,
140 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
141
142 DMA_BLOCK(MGA_WR34, tex->texheight,
143 MGA_TEXTRANS, 0x0000ffff,
144 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
145
146 ADVANCE_DMA();
147 }
148
mga_g400_emit_tex0(drm_mga_private_t * dev_priv)149 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
150 {
151 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
152 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
153 DMA_LOCALS;
154
155 /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
156 /* tex->texctl, tex->texctl2); */
157
158 BEGIN_DMA(6);
159
160 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
161 MGA_TEXCTL, tex->texctl,
162 MGA_TEXFILTER, tex->texfilter,
163 MGA_TEXBORDERCOL, tex->texbordercol);
164
165 DMA_BLOCK(MGA_TEXORG, tex->texorg,
166 MGA_TEXORG1, tex->texorg1,
167 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
168
169 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
170 MGA_TEXWIDTH, tex->texwidth,
171 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
172
173 DMA_BLOCK(MGA_WR57, 0x00000000,
174 MGA_WR53, 0x00000000,
175 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
176
177 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
178 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
179 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
180 MGA_DMAPAD, 0x00000000);
181
182 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
183 MGA_DMAPAD, 0x00000000,
184 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
185
186 ADVANCE_DMA();
187 }
188
mga_g400_emit_tex1(drm_mga_private_t * dev_priv)189 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
190 {
191 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
192 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
193 DMA_LOCALS;
194
195 /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
196 /* tex->texctl, tex->texctl2); */
197
198 BEGIN_DMA(5);
199
200 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
201 MGA_MAP1_ENABLE |
202 MGA_G400_TC2_MAGIC),
203 MGA_TEXCTL, tex->texctl,
204 MGA_TEXFILTER, tex->texfilter,
205 MGA_TEXBORDERCOL, tex->texbordercol);
206
207 DMA_BLOCK(MGA_TEXORG, tex->texorg,
208 MGA_TEXORG1, tex->texorg1,
209 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
210
211 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
212 MGA_TEXWIDTH, tex->texwidth,
213 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
214
215 DMA_BLOCK(MGA_WR57, 0x00000000,
216 MGA_WR53, 0x00000000,
217 MGA_WR61, 0x00000000,
218 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
219
220 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
221 MGA_TEXTRANS, 0x0000ffff,
222 MGA_TEXTRANSHIGH, 0x0000ffff,
223 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
224
225 ADVANCE_DMA();
226 }
227
mga_g200_emit_pipe(drm_mga_private_t * dev_priv)228 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
229 {
230 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
231 unsigned int pipe = sarea_priv->warp_pipe;
232 DMA_LOCALS;
233
234 BEGIN_DMA(3);
235
236 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
237 MGA_WVRTXSZ, 0x00000007,
238 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
239
240 DMA_BLOCK(MGA_WR25, 0x00000100,
241 MGA_WR34, 0x00000000,
242 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
243
244 /* Padding required due to hardware bug.
245 */
246 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
247 MGA_DMAPAD, 0xffffffff,
248 MGA_DMAPAD, 0xffffffff,
249 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
250 MGA_WMODE_START | dev_priv->wagp_enable));
251
252 ADVANCE_DMA();
253 }
254
mga_g400_emit_pipe(drm_mga_private_t * dev_priv)255 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
256 {
257 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
258 unsigned int pipe = sarea_priv->warp_pipe;
259 DMA_LOCALS;
260
261 /* printk("mga_g400_emit_pipe %x\n", pipe); */
262
263 BEGIN_DMA(10);
264
265 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
266 MGA_DMAPAD, 0x00000000,
267 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
268
269 if (pipe & MGA_T2) {
270 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
271 MGA_DMAPAD, 0x00000000,
272 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
273
274 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
275 MGA_WACCEPTSEQ, 0x00000000,
276 MGA_WACCEPTSEQ, 0x00000000,
277 MGA_WACCEPTSEQ, 0x1e000000);
278 } else {
279 if (dev_priv->warp_pipe & MGA_T2) {
280 /* Flush the WARP pipe */
281 DMA_BLOCK(MGA_YDST, 0x00000000,
282 MGA_FXLEFT, 0x00000000,
283 MGA_FXRIGHT, 0x00000001,
284 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
285
286 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
287 MGA_DWGSYNC, 0x00007000,
288 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
289 MGA_LEN + MGA_EXEC, 0x00000000);
290
291 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
292 MGA_G400_TC2_MAGIC),
293 MGA_LEN + MGA_EXEC, 0x00000000,
294 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
295 MGA_DMAPAD, 0x00000000);
296 }
297
298 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
299 MGA_DMAPAD, 0x00000000,
300 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
301
302 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
303 MGA_WACCEPTSEQ, 0x00000000,
304 MGA_WACCEPTSEQ, 0x00000000,
305 MGA_WACCEPTSEQ, 0x18000000);
306 }
307
308 DMA_BLOCK(MGA_WFLAG, 0x00000000,
309 MGA_WFLAG1, 0x00000000,
310 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
311
312 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
313 MGA_WR57, 0x00000000, /* tex0 */
314 MGA_WR53, 0x00000000, /* tex1 */
315 MGA_WR61, 0x00000000); /* tex1 */
316
317 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
318 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
319 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
320 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
321
322 /* Padding required due to hardware bug */
323 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
324 MGA_DMAPAD, 0xffffffff,
325 MGA_DMAPAD, 0xffffffff,
326 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
327 MGA_WMODE_START | dev_priv->wagp_enable));
328
329 ADVANCE_DMA();
330 }
331
mga_g200_emit_state(drm_mga_private_t * dev_priv)332 static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
333 {
334 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
335 unsigned int dirty = sarea_priv->dirty;
336
337 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
338 mga_g200_emit_pipe(dev_priv);
339 dev_priv->warp_pipe = sarea_priv->warp_pipe;
340 }
341
342 if (dirty & MGA_UPLOAD_CONTEXT) {
343 mga_g200_emit_context(dev_priv);
344 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
345 }
346
347 if (dirty & MGA_UPLOAD_TEX0) {
348 mga_g200_emit_tex0(dev_priv);
349 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
350 }
351 }
352
mga_g400_emit_state(drm_mga_private_t * dev_priv)353 static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
354 {
355 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
356 unsigned int dirty = sarea_priv->dirty;
357 int multitex = sarea_priv->warp_pipe & MGA_T2;
358
359 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
360 mga_g400_emit_pipe(dev_priv);
361 dev_priv->warp_pipe = sarea_priv->warp_pipe;
362 }
363
364 if (dirty & MGA_UPLOAD_CONTEXT) {
365 mga_g400_emit_context(dev_priv);
366 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
367 }
368
369 if (dirty & MGA_UPLOAD_TEX0) {
370 mga_g400_emit_tex0(dev_priv);
371 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
372 }
373
374 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
375 mga_g400_emit_tex1(dev_priv);
376 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
377 }
378 }
379
380 /* ================================================================
381 * SAREA state verification
382 */
383
384 /* Disallow all write destinations except the front and backbuffer.
385 */
mga_verify_context(drm_mga_private_t * dev_priv)386 static int mga_verify_context(drm_mga_private_t *dev_priv)
387 {
388 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
389 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
390
391 if (ctx->dstorg != dev_priv->front_offset &&
392 ctx->dstorg != dev_priv->back_offset) {
393 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
394 ctx->dstorg, dev_priv->front_offset,
395 dev_priv->back_offset);
396 ctx->dstorg = 0;
397 return -EINVAL;
398 }
399
400 return 0;
401 }
402
403 /* Disallow texture reads from PCI space.
404 */
mga_verify_tex(drm_mga_private_t * dev_priv,int unit)405 static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
406 {
407 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
408 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
409 unsigned int org;
410
411 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
412
413 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
414 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
415 tex->texorg = 0;
416 return -EINVAL;
417 }
418
419 return 0;
420 }
421
mga_verify_state(drm_mga_private_t * dev_priv)422 static int mga_verify_state(drm_mga_private_t *dev_priv)
423 {
424 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
425 unsigned int dirty = sarea_priv->dirty;
426 int ret = 0;
427
428 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
429 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
430
431 if (dirty & MGA_UPLOAD_CONTEXT)
432 ret |= mga_verify_context(dev_priv);
433
434 if (dirty & MGA_UPLOAD_TEX0)
435 ret |= mga_verify_tex(dev_priv, 0);
436
437 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
438 if (dirty & MGA_UPLOAD_TEX1)
439 ret |= mga_verify_tex(dev_priv, 1);
440
441 if (dirty & MGA_UPLOAD_PIPE)
442 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
443 } else {
444 if (dirty & MGA_UPLOAD_PIPE)
445 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
446 }
447
448 return (ret == 0);
449 }
450
mga_verify_iload(drm_mga_private_t * dev_priv,unsigned int dstorg,unsigned int length)451 static int mga_verify_iload(drm_mga_private_t *dev_priv,
452 unsigned int dstorg, unsigned int length)
453 {
454 if (dstorg < dev_priv->texture_offset ||
455 dstorg + length > (dev_priv->texture_offset +
456 dev_priv->texture_size)) {
457 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
458 return -EINVAL;
459 }
460
461 if (length & MGA_ILOAD_MASK) {
462 DRM_ERROR("*** bad iload length: 0x%x\n",
463 length & MGA_ILOAD_MASK);
464 return -EINVAL;
465 }
466
467 return 0;
468 }
469
mga_verify_blit(drm_mga_private_t * dev_priv,unsigned int srcorg,unsigned int dstorg)470 static int mga_verify_blit(drm_mga_private_t *dev_priv,
471 unsigned int srcorg, unsigned int dstorg)
472 {
473 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
474 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
475 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
476 return -EINVAL;
477 }
478 return 0;
479 }
480
481 /* ================================================================
482 *
483 */
484
mga_dma_dispatch_clear(struct drm_device * dev,drm_mga_clear_t * clear)485 static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
486 {
487 drm_mga_private_t *dev_priv = dev->dev_private;
488 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
489 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
490 struct drm_clip_rect *pbox = sarea_priv->boxes;
491 int nbox = sarea_priv->nbox;
492 int i;
493 DMA_LOCALS;
494 DRM_DEBUG("\n");
495
496 BEGIN_DMA(1);
497
498 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
499 MGA_DMAPAD, 0x00000000,
500 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
501
502 ADVANCE_DMA();
503
504 for (i = 0; i < nbox; i++) {
505 struct drm_clip_rect *box = &pbox[i];
506 u32 height = box->y2 - box->y1;
507
508 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
509 box->x1, box->y1, box->x2, box->y2);
510
511 if (clear->flags & MGA_FRONT) {
512 BEGIN_DMA(2);
513
514 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
515 MGA_PLNWT, clear->color_mask,
516 MGA_YDSTLEN, (box->y1 << 16) | height,
517 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
518
519 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
520 MGA_FCOL, clear->clear_color,
521 MGA_DSTORG, dev_priv->front_offset,
522 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
523
524 ADVANCE_DMA();
525 }
526
527 if (clear->flags & MGA_BACK) {
528 BEGIN_DMA(2);
529
530 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
531 MGA_PLNWT, clear->color_mask,
532 MGA_YDSTLEN, (box->y1 << 16) | height,
533 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
534
535 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
536 MGA_FCOL, clear->clear_color,
537 MGA_DSTORG, dev_priv->back_offset,
538 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
539
540 ADVANCE_DMA();
541 }
542
543 if (clear->flags & MGA_DEPTH) {
544 BEGIN_DMA(2);
545
546 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
547 MGA_PLNWT, clear->depth_mask,
548 MGA_YDSTLEN, (box->y1 << 16) | height,
549 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
550
551 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
552 MGA_FCOL, clear->clear_depth,
553 MGA_DSTORG, dev_priv->depth_offset,
554 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
555
556 ADVANCE_DMA();
557 }
558
559 }
560
561 BEGIN_DMA(1);
562
563 /* Force reset of DWGCTL */
564 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
565 MGA_DMAPAD, 0x00000000,
566 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
567
568 ADVANCE_DMA();
569
570 FLUSH_DMA();
571 }
572
mga_dma_dispatch_swap(struct drm_device * dev)573 static void mga_dma_dispatch_swap(struct drm_device *dev)
574 {
575 drm_mga_private_t *dev_priv = dev->dev_private;
576 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
577 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
578 struct drm_clip_rect *pbox = sarea_priv->boxes;
579 int nbox = sarea_priv->nbox;
580 int i;
581 DMA_LOCALS;
582 DRM_DEBUG("\n");
583
584 sarea_priv->last_frame.head = dev_priv->prim.tail;
585 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
586
587 BEGIN_DMA(4 + nbox);
588
589 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
590 MGA_DMAPAD, 0x00000000,
591 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
592
593 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
594 MGA_MACCESS, dev_priv->maccess,
595 MGA_SRCORG, dev_priv->back_offset,
596 MGA_AR5, dev_priv->front_pitch);
597
598 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
599 MGA_DMAPAD, 0x00000000,
600 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
601
602 for (i = 0; i < nbox; i++) {
603 struct drm_clip_rect *box = &pbox[i];
604 u32 height = box->y2 - box->y1;
605 u32 start = box->y1 * dev_priv->front_pitch;
606
607 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
608 box->x1, box->y1, box->x2, box->y2);
609
610 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
611 MGA_AR3, start + box->x1,
612 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
613 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
614 }
615
616 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
617 MGA_PLNWT, ctx->plnwt,
618 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
619
620 ADVANCE_DMA();
621
622 FLUSH_DMA();
623
624 DRM_DEBUG("... done.\n");
625 }
626
mga_dma_dispatch_vertex(struct drm_device * dev,struct drm_buf * buf)627 static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
628 {
629 drm_mga_private_t *dev_priv = dev->dev_private;
630 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
631 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
632 u32 address = (u32) buf->bus_address;
633 u32 length = (u32) buf->used;
634 int i = 0;
635 DMA_LOCALS;
636 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
637
638 if (buf->used) {
639 buf_priv->dispatched = 1;
640
641 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
642
643 do {
644 if (i < sarea_priv->nbox) {
645 mga_emit_clip_rect(dev_priv,
646 &sarea_priv->boxes[i]);
647 }
648
649 BEGIN_DMA(1);
650
651 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
652 MGA_DMAPAD, 0x00000000,
653 MGA_SECADDRESS, (address |
654 MGA_DMA_VERTEX),
655 MGA_SECEND, ((address + length) |
656 dev_priv->dma_access));
657
658 ADVANCE_DMA();
659 } while (++i < sarea_priv->nbox);
660 }
661
662 if (buf_priv->discard) {
663 AGE_BUFFER(buf_priv);
664 buf->pending = 0;
665 buf->used = 0;
666 buf_priv->dispatched = 0;
667
668 mga_freelist_put(dev, buf);
669 }
670
671 FLUSH_DMA();
672 }
673
mga_dma_dispatch_indices(struct drm_device * dev,struct drm_buf * buf,unsigned int start,unsigned int end)674 static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
675 unsigned int start, unsigned int end)
676 {
677 drm_mga_private_t *dev_priv = dev->dev_private;
678 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
679 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
680 u32 address = (u32) buf->bus_address;
681 int i = 0;
682 DMA_LOCALS;
683 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
684
685 if (start != end) {
686 buf_priv->dispatched = 1;
687
688 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
689
690 do {
691 if (i < sarea_priv->nbox) {
692 mga_emit_clip_rect(dev_priv,
693 &sarea_priv->boxes[i]);
694 }
695
696 BEGIN_DMA(1);
697
698 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
699 MGA_DMAPAD, 0x00000000,
700 MGA_SETUPADDRESS, address + start,
701 MGA_SETUPEND, ((address + end) |
702 dev_priv->dma_access));
703
704 ADVANCE_DMA();
705 } while (++i < sarea_priv->nbox);
706 }
707
708 if (buf_priv->discard) {
709 AGE_BUFFER(buf_priv);
710 buf->pending = 0;
711 buf->used = 0;
712 buf_priv->dispatched = 0;
713
714 mga_freelist_put(dev, buf);
715 }
716
717 FLUSH_DMA();
718 }
719
720 /* This copies a 64 byte aligned agp region to the frambuffer with a
721 * standard blit, the ioctl needs to do checking.
722 */
mga_dma_dispatch_iload(struct drm_device * dev,struct drm_buf * buf,unsigned int dstorg,unsigned int length)723 static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
724 unsigned int dstorg, unsigned int length)
725 {
726 drm_mga_private_t *dev_priv = dev->dev_private;
727 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
728 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
729 u32 srcorg =
730 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
731 u32 y2;
732 DMA_LOCALS;
733 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
734
735 y2 = length / 64;
736
737 BEGIN_DMA(5);
738
739 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
740 MGA_DMAPAD, 0x00000000,
741 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
742
743 DMA_BLOCK(MGA_DSTORG, dstorg,
744 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
745
746 DMA_BLOCK(MGA_PITCH, 64,
747 MGA_PLNWT, 0xffffffff,
748 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
749
750 DMA_BLOCK(MGA_AR0, 63,
751 MGA_AR3, 0,
752 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
753
754 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
755 MGA_SRCORG, dev_priv->front_offset,
756 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
757
758 ADVANCE_DMA();
759
760 AGE_BUFFER(buf_priv);
761
762 buf->pending = 0;
763 buf->used = 0;
764 buf_priv->dispatched = 0;
765
766 mga_freelist_put(dev, buf);
767
768 FLUSH_DMA();
769 }
770
mga_dma_dispatch_blit(struct drm_device * dev,drm_mga_blit_t * blit)771 static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
772 {
773 drm_mga_private_t *dev_priv = dev->dev_private;
774 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
775 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
776 struct drm_clip_rect *pbox = sarea_priv->boxes;
777 int nbox = sarea_priv->nbox;
778 u32 scandir = 0, i;
779 DMA_LOCALS;
780 DRM_DEBUG("\n");
781
782 BEGIN_DMA(4 + nbox);
783
784 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
785 MGA_DMAPAD, 0x00000000,
786 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
787
788 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
789 MGA_PLNWT, blit->planemask,
790 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
791
792 DMA_BLOCK(MGA_SGN, scandir,
793 MGA_MACCESS, dev_priv->maccess,
794 MGA_AR5, blit->ydir * blit->src_pitch,
795 MGA_PITCH, blit->dst_pitch);
796
797 for (i = 0; i < nbox; i++) {
798 int srcx = pbox[i].x1 + blit->delta_sx;
799 int srcy = pbox[i].y1 + blit->delta_sy;
800 int dstx = pbox[i].x1 + blit->delta_dx;
801 int dsty = pbox[i].y1 + blit->delta_dy;
802 int h = pbox[i].y2 - pbox[i].y1;
803 int w = pbox[i].x2 - pbox[i].x1 - 1;
804 int start;
805
806 if (blit->ydir == -1)
807 srcy = blit->height - srcy - 1;
808
809 start = srcy * blit->src_pitch + srcx;
810
811 DMA_BLOCK(MGA_AR0, start + w,
812 MGA_AR3, start,
813 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
814 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
815 }
816
817 /* Do something to flush AGP?
818 */
819
820 /* Force reset of DWGCTL */
821 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
822 MGA_PLNWT, ctx->plnwt,
823 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
824
825 ADVANCE_DMA();
826 }
827
828 /* ================================================================
829 *
830 */
831
mga_dma_clear(struct drm_device * dev,void * data,struct drm_file * file_priv)832 static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
833 {
834 drm_mga_private_t *dev_priv = dev->dev_private;
835 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
836 drm_mga_clear_t *clear = data;
837
838 LOCK_TEST_WITH_RETURN(dev, file_priv);
839
840 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
841 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
842
843 WRAP_TEST_WITH_RETURN(dev_priv);
844
845 mga_dma_dispatch_clear(dev, clear);
846
847 /* Make sure we restore the 3D state next time.
848 */
849 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
850
851 return 0;
852 }
853
mga_dma_swap(struct drm_device * dev,void * data,struct drm_file * file_priv)854 static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
855 {
856 drm_mga_private_t *dev_priv = dev->dev_private;
857 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
858
859 LOCK_TEST_WITH_RETURN(dev, file_priv);
860
861 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
862 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
863
864 WRAP_TEST_WITH_RETURN(dev_priv);
865
866 mga_dma_dispatch_swap(dev);
867
868 /* Make sure we restore the 3D state next time.
869 */
870 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
871
872 return 0;
873 }
874
mga_dma_vertex(struct drm_device * dev,void * data,struct drm_file * file_priv)875 static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
876 {
877 drm_mga_private_t *dev_priv = dev->dev_private;
878 struct drm_device_dma *dma = dev->dma;
879 struct drm_buf *buf;
880 drm_mga_buf_priv_t *buf_priv;
881 drm_mga_vertex_t *vertex = data;
882
883 LOCK_TEST_WITH_RETURN(dev, file_priv);
884
885 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
886 return -EINVAL;
887 buf = dma->buflist[vertex->idx];
888 buf_priv = buf->dev_private;
889
890 buf->used = vertex->used;
891 buf_priv->discard = vertex->discard;
892
893 if (!mga_verify_state(dev_priv)) {
894 if (vertex->discard) {
895 if (buf_priv->dispatched == 1)
896 AGE_BUFFER(buf_priv);
897 buf_priv->dispatched = 0;
898 mga_freelist_put(dev, buf);
899 }
900 return -EINVAL;
901 }
902
903 WRAP_TEST_WITH_RETURN(dev_priv);
904
905 mga_dma_dispatch_vertex(dev, buf);
906
907 return 0;
908 }
909
mga_dma_indices(struct drm_device * dev,void * data,struct drm_file * file_priv)910 static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
911 {
912 drm_mga_private_t *dev_priv = dev->dev_private;
913 struct drm_device_dma *dma = dev->dma;
914 struct drm_buf *buf;
915 drm_mga_buf_priv_t *buf_priv;
916 drm_mga_indices_t *indices = data;
917
918 LOCK_TEST_WITH_RETURN(dev, file_priv);
919
920 if (indices->idx < 0 || indices->idx > dma->buf_count)
921 return -EINVAL;
922
923 buf = dma->buflist[indices->idx];
924 buf_priv = buf->dev_private;
925
926 buf_priv->discard = indices->discard;
927
928 if (!mga_verify_state(dev_priv)) {
929 if (indices->discard) {
930 if (buf_priv->dispatched == 1)
931 AGE_BUFFER(buf_priv);
932 buf_priv->dispatched = 0;
933 mga_freelist_put(dev, buf);
934 }
935 return -EINVAL;
936 }
937
938 WRAP_TEST_WITH_RETURN(dev_priv);
939
940 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
941
942 return 0;
943 }
944
mga_dma_iload(struct drm_device * dev,void * data,struct drm_file * file_priv)945 static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
946 {
947 struct drm_device_dma *dma = dev->dma;
948 drm_mga_private_t *dev_priv = dev->dev_private;
949 struct drm_buf *buf;
950 drm_mga_buf_priv_t *buf_priv;
951 drm_mga_iload_t *iload = data;
952 DRM_DEBUG("\n");
953
954 LOCK_TEST_WITH_RETURN(dev, file_priv);
955
956 #if 0
957 if (mga_do_wait_for_idle(dev_priv) < 0) {
958 if (MGA_DMA_DEBUG)
959 DRM_INFO("-EBUSY\n");
960 return -EBUSY;
961 }
962 #endif
963 if (iload->idx < 0 || iload->idx > dma->buf_count)
964 return -EINVAL;
965
966 buf = dma->buflist[iload->idx];
967 buf_priv = buf->dev_private;
968
969 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
970 mga_freelist_put(dev, buf);
971 return -EINVAL;
972 }
973
974 WRAP_TEST_WITH_RETURN(dev_priv);
975
976 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
977
978 /* Make sure we restore the 3D state next time.
979 */
980 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
981
982 return 0;
983 }
984
mga_dma_blit(struct drm_device * dev,void * data,struct drm_file * file_priv)985 static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
986 {
987 drm_mga_private_t *dev_priv = dev->dev_private;
988 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
989 drm_mga_blit_t *blit = data;
990 DRM_DEBUG("\n");
991
992 LOCK_TEST_WITH_RETURN(dev, file_priv);
993
994 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
995 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
996
997 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
998 return -EINVAL;
999
1000 WRAP_TEST_WITH_RETURN(dev_priv);
1001
1002 mga_dma_dispatch_blit(dev, blit);
1003
1004 /* Make sure we restore the 3D state next time.
1005 */
1006 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1007
1008 return 0;
1009 }
1010
mga_getparam(struct drm_device * dev,void * data,struct drm_file * file_priv)1011 int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1012 {
1013 drm_mga_private_t *dev_priv = dev->dev_private;
1014 drm_mga_getparam_t *param = data;
1015 int value;
1016
1017 if (!dev_priv) {
1018 DRM_ERROR("called with no initialization\n");
1019 return -EINVAL;
1020 }
1021
1022 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1023
1024 switch (param->param) {
1025 case MGA_PARAM_IRQ_NR:
1026 value = dev->pdev->irq;
1027 break;
1028 case MGA_PARAM_CARD_TYPE:
1029 value = dev_priv->chipset;
1030 break;
1031 default:
1032 return -EINVAL;
1033 }
1034
1035 if (copy_to_user(param->value, &value, sizeof(int))) {
1036 DRM_ERROR("copy_to_user\n");
1037 return -EFAULT;
1038 }
1039
1040 return 0;
1041 }
1042
mga_set_fence(struct drm_device * dev,void * data,struct drm_file * file_priv)1043 static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1044 {
1045 drm_mga_private_t *dev_priv = dev->dev_private;
1046 u32 *fence = data;
1047 DMA_LOCALS;
1048
1049 if (!dev_priv) {
1050 DRM_ERROR("called with no initialization\n");
1051 return -EINVAL;
1052 }
1053
1054 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1055
1056 /* I would normal do this assignment in the declaration of fence,
1057 * but dev_priv may be NULL.
1058 */
1059
1060 *fence = dev_priv->next_fence_to_post;
1061 dev_priv->next_fence_to_post++;
1062
1063 BEGIN_DMA(1);
1064 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1065 MGA_DMAPAD, 0x00000000,
1066 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1067 ADVANCE_DMA();
1068
1069 return 0;
1070 }
1071
mga_wait_fence(struct drm_device * dev,void * data,struct drm_file * file_priv)1072 static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
1073 file_priv)
1074 {
1075 drm_mga_private_t *dev_priv = dev->dev_private;
1076 u32 *fence = data;
1077
1078 if (!dev_priv) {
1079 DRM_ERROR("called with no initialization\n");
1080 return -EINVAL;
1081 }
1082
1083 DRM_DEBUG("pid=%d\n", task_pid_nr(current));
1084
1085 mga_driver_fence_wait(dev, fence);
1086 return 0;
1087 }
1088
1089 const struct drm_ioctl_desc mga_ioctls[] = {
1090 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1091 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1092 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
1093 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
1094 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1095 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1096 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
1097 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1098 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
1099 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
1100 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1101 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1102 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1103 };
1104
1105 int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);
1106