1 /* $NetBSD: mga_state.c,v 1.2 2018/08/27 04:58:24 riastradh Exp $ */ 2 3 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*- 4 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com 5 * 6 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas. 7 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California. 8 * All Rights Reserved. 9 * 10 * Permission is hereby granted, free of charge, to any person obtaining a 11 * copy of this software and associated documentation files (the "Software"), 12 * to deal in the Software without restriction, including without limitation 13 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 14 * and/or sell copies of the Software, and to permit persons to whom the 15 * Software is furnished to do so, subject to the following conditions: 16 * 17 * The above copyright notice and this permission notice (including the next 18 * paragraph) shall be included in all copies or substantial portions of the 19 * Software. 20 * 21 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 22 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 23 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 24 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 25 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 26 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 * OTHER DEALINGS IN THE SOFTWARE. 28 * 29 * Authors: 30 * Jeff Hartmann <jhartmann@valinux.com> 31 * Keith Whitwell <keith@tungstengraphics.com> 32 * 33 * Rewritten by: 34 * Gareth Hughes <gareth@valinux.com> 35 */ 36 37 #include <sys/cdefs.h> 38 __KERNEL_RCSID(0, "$NetBSD: mga_state.c,v 1.2 2018/08/27 04:58:24 riastradh Exp $"); 39 40 #include <drm/drmP.h> 41 #include <drm/mga_drm.h> 42 #include "mga_drv.h" 43 44 /* ================================================================ 45 * DMA hardware state programming functions 46 */ 47 48 static void mga_emit_clip_rect(drm_mga_private_t *dev_priv, 49 struct drm_clip_rect *box) 50 { 51 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 52 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 53 unsigned int pitch = dev_priv->front_pitch; 54 DMA_LOCALS; 55 56 BEGIN_DMA(2); 57 58 /* Force reset of DWGCTL on G400 (eliminates clip disable bit). 59 */ 60 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) { 61 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl, 62 MGA_LEN + MGA_EXEC, 0x80000000, 63 MGA_DWGCTL, ctx->dwgctl, 64 MGA_LEN + MGA_EXEC, 0x80000000); 65 } 66 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 67 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1, 68 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch); 69 70 ADVANCE_DMA(); 71 } 72 73 static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv) 74 { 75 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 76 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 77 DMA_LOCALS; 78 79 BEGIN_DMA(3); 80 81 DMA_BLOCK(MGA_DSTORG, ctx->dstorg, 82 MGA_MACCESS, ctx->maccess, 83 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl); 84 85 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl, 86 MGA_FOGCOL, ctx->fogcolor, 87 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset); 88 89 DMA_BLOCK(MGA_FCOL, ctx->fcol, 90 MGA_DMAPAD, 0x00000000, 91 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000); 92 93 ADVANCE_DMA(); 94 } 95 96 static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv) 97 { 98 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 99 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 100 DMA_LOCALS; 101 102 BEGIN_DMA(4); 103 104 DMA_BLOCK(MGA_DSTORG, ctx->dstorg, 105 MGA_MACCESS, ctx->maccess, 106 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl); 107 108 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl, 109 MGA_FOGCOL, ctx->fogcolor, 110 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset); 111 112 DMA_BLOCK(MGA_WFLAG1, ctx->wflag, 113 MGA_TDUALSTAGE0, ctx->tdualstage0, 114 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol); 115 116 DMA_BLOCK(MGA_STENCIL, ctx->stencil, 117 MGA_STENCILCTL, ctx->stencilctl, 118 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000); 119 120 ADVANCE_DMA(); 121 } 122 123 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv) 124 { 125 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 126 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0]; 127 DMA_LOCALS; 128 129 BEGIN_DMA(4); 130 131 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2, 132 MGA_TEXCTL, tex->texctl, 133 MGA_TEXFILTER, tex->texfilter, 134 MGA_TEXBORDERCOL, tex->texbordercol); 135 136 DMA_BLOCK(MGA_TEXORG, tex->texorg, 137 MGA_TEXORG1, tex->texorg1, 138 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3); 139 140 DMA_BLOCK(MGA_TEXORG4, tex->texorg4, 141 MGA_TEXWIDTH, tex->texwidth, 142 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth); 143 144 DMA_BLOCK(MGA_WR34, tex->texheight, 145 MGA_TEXTRANS, 0x0000ffff, 146 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000); 147 148 ADVANCE_DMA(); 149 } 150 151 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv) 152 { 153 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 154 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0]; 155 DMA_LOCALS; 156 157 /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */ 158 /* tex->texctl, tex->texctl2); */ 159 160 BEGIN_DMA(6); 161 162 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC, 163 MGA_TEXCTL, tex->texctl, 164 MGA_TEXFILTER, tex->texfilter, 165 MGA_TEXBORDERCOL, tex->texbordercol); 166 167 DMA_BLOCK(MGA_TEXORG, tex->texorg, 168 MGA_TEXORG1, tex->texorg1, 169 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3); 170 171 DMA_BLOCK(MGA_TEXORG4, tex->texorg4, 172 MGA_TEXWIDTH, tex->texwidth, 173 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000); 174 175 DMA_BLOCK(MGA_WR57, 0x00000000, 176 MGA_WR53, 0x00000000, 177 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC); 178 179 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC, 180 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC, 181 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC, 182 MGA_DMAPAD, 0x00000000); 183 184 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 185 MGA_DMAPAD, 0x00000000, 186 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff); 187 188 ADVANCE_DMA(); 189 } 190 191 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv) 192 { 193 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 194 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1]; 195 DMA_LOCALS; 196 197 /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */ 198 /* tex->texctl, tex->texctl2); */ 199 200 BEGIN_DMA(5); 201 202 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 | 203 MGA_MAP1_ENABLE | 204 MGA_G400_TC2_MAGIC), 205 MGA_TEXCTL, tex->texctl, 206 MGA_TEXFILTER, tex->texfilter, 207 MGA_TEXBORDERCOL, tex->texbordercol); 208 209 DMA_BLOCK(MGA_TEXORG, tex->texorg, 210 MGA_TEXORG1, tex->texorg1, 211 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3); 212 213 DMA_BLOCK(MGA_TEXORG4, tex->texorg4, 214 MGA_TEXWIDTH, tex->texwidth, 215 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000); 216 217 DMA_BLOCK(MGA_WR57, 0x00000000, 218 MGA_WR53, 0x00000000, 219 MGA_WR61, 0x00000000, 220 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC); 221 222 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC, 223 MGA_TEXTRANS, 0x0000ffff, 224 MGA_TEXTRANSHIGH, 0x0000ffff, 225 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC); 226 227 ADVANCE_DMA(); 228 } 229 230 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv) 231 { 232 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 233 unsigned int pipe = sarea_priv->warp_pipe; 234 DMA_LOCALS; 235 236 BEGIN_DMA(3); 237 238 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND, 239 MGA_WVRTXSZ, 0x00000007, 240 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000); 241 242 DMA_BLOCK(MGA_WR25, 0x00000100, 243 MGA_WR34, 0x00000000, 244 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff); 245 246 /* Padding required due to hardware bug. 247 */ 248 DMA_BLOCK(MGA_DMAPAD, 0xffffffff, 249 MGA_DMAPAD, 0xffffffff, 250 MGA_DMAPAD, 0xffffffff, 251 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] | 252 MGA_WMODE_START | dev_priv->wagp_enable)); 253 254 ADVANCE_DMA(); 255 } 256 257 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv) 258 { 259 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 260 unsigned int pipe = sarea_priv->warp_pipe; 261 DMA_LOCALS; 262 263 /* printk("mga_g400_emit_pipe %x\n", pipe); */ 264 265 BEGIN_DMA(10); 266 267 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND, 268 MGA_DMAPAD, 0x00000000, 269 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000); 270 271 if (pipe & MGA_T2) { 272 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09, 273 MGA_DMAPAD, 0x00000000, 274 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000); 275 276 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000, 277 MGA_WACCEPTSEQ, 0x00000000, 278 MGA_WACCEPTSEQ, 0x00000000, 279 MGA_WACCEPTSEQ, 0x1e000000); 280 } else { 281 if (dev_priv->warp_pipe & MGA_T2) { 282 /* Flush the WARP pipe */ 283 DMA_BLOCK(MGA_YDST, 0x00000000, 284 MGA_FXLEFT, 0x00000000, 285 MGA_FXRIGHT, 0x00000001, 286 MGA_DWGCTL, MGA_DWGCTL_FLUSH); 287 288 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001, 289 MGA_DWGSYNC, 0x00007000, 290 MGA_TEXCTL2, MGA_G400_TC2_MAGIC, 291 MGA_LEN + MGA_EXEC, 0x00000000); 292 293 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX | 294 MGA_G400_TC2_MAGIC), 295 MGA_LEN + MGA_EXEC, 0x00000000, 296 MGA_TEXCTL2, MGA_G400_TC2_MAGIC, 297 MGA_DMAPAD, 0x00000000); 298 } 299 300 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807, 301 MGA_DMAPAD, 0x00000000, 302 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000); 303 304 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000, 305 MGA_WACCEPTSEQ, 0x00000000, 306 MGA_WACCEPTSEQ, 0x00000000, 307 MGA_WACCEPTSEQ, 0x18000000); 308 } 309 310 DMA_BLOCK(MGA_WFLAG, 0x00000000, 311 MGA_WFLAG1, 0x00000000, 312 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000); 313 314 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */ 315 MGA_WR57, 0x00000000, /* tex0 */ 316 MGA_WR53, 0x00000000, /* tex1 */ 317 MGA_WR61, 0x00000000); /* tex1 */ 318 319 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */ 320 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */ 321 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */ 322 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */ 323 324 /* Padding required due to hardware bug */ 325 DMA_BLOCK(MGA_DMAPAD, 0xffffffff, 326 MGA_DMAPAD, 0xffffffff, 327 MGA_DMAPAD, 0xffffffff, 328 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] | 329 MGA_WMODE_START | dev_priv->wagp_enable)); 330 331 ADVANCE_DMA(); 332 } 333 334 static void mga_g200_emit_state(drm_mga_private_t *dev_priv) 335 { 336 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 337 unsigned int dirty = sarea_priv->dirty; 338 339 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) { 340 mga_g200_emit_pipe(dev_priv); 341 dev_priv->warp_pipe = sarea_priv->warp_pipe; 342 } 343 344 if (dirty & MGA_UPLOAD_CONTEXT) { 345 mga_g200_emit_context(dev_priv); 346 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT; 347 } 348 349 if (dirty & MGA_UPLOAD_TEX0) { 350 mga_g200_emit_tex0(dev_priv); 351 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0; 352 } 353 } 354 355 static void mga_g400_emit_state(drm_mga_private_t *dev_priv) 356 { 357 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 358 unsigned int dirty = sarea_priv->dirty; 359 int multitex = sarea_priv->warp_pipe & MGA_T2; 360 361 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) { 362 mga_g400_emit_pipe(dev_priv); 363 dev_priv->warp_pipe = sarea_priv->warp_pipe; 364 } 365 366 if (dirty & MGA_UPLOAD_CONTEXT) { 367 mga_g400_emit_context(dev_priv); 368 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT; 369 } 370 371 if (dirty & MGA_UPLOAD_TEX0) { 372 mga_g400_emit_tex0(dev_priv); 373 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0; 374 } 375 376 if ((dirty & MGA_UPLOAD_TEX1) && multitex) { 377 mga_g400_emit_tex1(dev_priv); 378 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1; 379 } 380 } 381 382 /* ================================================================ 383 * SAREA state verification 384 */ 385 386 /* Disallow all write destinations except the front and backbuffer. 387 */ 388 static int mga_verify_context(drm_mga_private_t *dev_priv) 389 { 390 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 391 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 392 393 if (ctx->dstorg != dev_priv->front_offset && 394 ctx->dstorg != dev_priv->back_offset) { 395 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n", 396 ctx->dstorg, dev_priv->front_offset, 397 dev_priv->back_offset); 398 ctx->dstorg = 0; 399 return -EINVAL; 400 } 401 402 return 0; 403 } 404 405 /* Disallow texture reads from PCI space. 406 */ 407 static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit) 408 { 409 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 410 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit]; 411 unsigned int org; 412 413 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK); 414 415 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) { 416 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit); 417 tex->texorg = 0; 418 return -EINVAL; 419 } 420 421 return 0; 422 } 423 424 static int mga_verify_state(drm_mga_private_t *dev_priv) 425 { 426 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 427 unsigned int dirty = sarea_priv->dirty; 428 int ret = 0; 429 430 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS) 431 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS; 432 433 if (dirty & MGA_UPLOAD_CONTEXT) 434 ret |= mga_verify_context(dev_priv); 435 436 if (dirty & MGA_UPLOAD_TEX0) 437 ret |= mga_verify_tex(dev_priv, 0); 438 439 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) { 440 if (dirty & MGA_UPLOAD_TEX1) 441 ret |= mga_verify_tex(dev_priv, 1); 442 443 if (dirty & MGA_UPLOAD_PIPE) 444 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES); 445 } else { 446 if (dirty & MGA_UPLOAD_PIPE) 447 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES); 448 } 449 450 return (ret == 0); 451 } 452 453 static int mga_verify_iload(drm_mga_private_t *dev_priv, 454 unsigned int dstorg, unsigned int length) 455 { 456 if (dstorg < dev_priv->texture_offset || 457 dstorg + length > (dev_priv->texture_offset + 458 dev_priv->texture_size)) { 459 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg); 460 return -EINVAL; 461 } 462 463 if (length & MGA_ILOAD_MASK) { 464 DRM_ERROR("*** bad iload length: 0x%x\n", 465 length & MGA_ILOAD_MASK); 466 return -EINVAL; 467 } 468 469 return 0; 470 } 471 472 static int mga_verify_blit(drm_mga_private_t *dev_priv, 473 unsigned int srcorg, unsigned int dstorg) 474 { 475 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) || 476 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) { 477 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg); 478 return -EINVAL; 479 } 480 return 0; 481 } 482 483 /* ================================================================ 484 * 485 */ 486 487 static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear) 488 { 489 drm_mga_private_t *dev_priv = dev->dev_private; 490 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 491 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 492 struct drm_clip_rect *pbox = sarea_priv->boxes; 493 int nbox = sarea_priv->nbox; 494 int i; 495 DMA_LOCALS; 496 DRM_DEBUG("\n"); 497 498 BEGIN_DMA(1); 499 500 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 501 MGA_DMAPAD, 0x00000000, 502 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000); 503 504 ADVANCE_DMA(); 505 506 for (i = 0; i < nbox; i++) { 507 struct drm_clip_rect *box = &pbox[i]; 508 u32 height = box->y2 - box->y1; 509 510 DRM_DEBUG(" from=%d,%d to=%d,%d\n", 511 box->x1, box->y1, box->x2, box->y2); 512 513 if (clear->flags & MGA_FRONT) { 514 BEGIN_DMA(2); 515 516 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 517 MGA_PLNWT, clear->color_mask, 518 MGA_YDSTLEN, (box->y1 << 16) | height, 519 MGA_FXBNDRY, (box->x2 << 16) | box->x1); 520 521 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 522 MGA_FCOL, clear->clear_color, 523 MGA_DSTORG, dev_priv->front_offset, 524 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd); 525 526 ADVANCE_DMA(); 527 } 528 529 if (clear->flags & MGA_BACK) { 530 BEGIN_DMA(2); 531 532 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 533 MGA_PLNWT, clear->color_mask, 534 MGA_YDSTLEN, (box->y1 << 16) | height, 535 MGA_FXBNDRY, (box->x2 << 16) | box->x1); 536 537 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 538 MGA_FCOL, clear->clear_color, 539 MGA_DSTORG, dev_priv->back_offset, 540 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd); 541 542 ADVANCE_DMA(); 543 } 544 545 if (clear->flags & MGA_DEPTH) { 546 BEGIN_DMA(2); 547 548 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 549 MGA_PLNWT, clear->depth_mask, 550 MGA_YDSTLEN, (box->y1 << 16) | height, 551 MGA_FXBNDRY, (box->x2 << 16) | box->x1); 552 553 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 554 MGA_FCOL, clear->clear_depth, 555 MGA_DSTORG, dev_priv->depth_offset, 556 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd); 557 558 ADVANCE_DMA(); 559 } 560 561 } 562 563 BEGIN_DMA(1); 564 565 /* Force reset of DWGCTL */ 566 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 567 MGA_DMAPAD, 0x00000000, 568 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl); 569 570 ADVANCE_DMA(); 571 572 FLUSH_DMA(); 573 } 574 575 static void mga_dma_dispatch_swap(struct drm_device *dev) 576 { 577 drm_mga_private_t *dev_priv = dev->dev_private; 578 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 579 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 580 struct drm_clip_rect *pbox = sarea_priv->boxes; 581 int nbox = sarea_priv->nbox; 582 int i; 583 DMA_LOCALS; 584 DRM_DEBUG("\n"); 585 586 sarea_priv->last_frame.head = dev_priv->prim.tail; 587 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap; 588 589 BEGIN_DMA(4 + nbox); 590 591 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 592 MGA_DMAPAD, 0x00000000, 593 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000); 594 595 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset, 596 MGA_MACCESS, dev_priv->maccess, 597 MGA_SRCORG, dev_priv->back_offset, 598 MGA_AR5, dev_priv->front_pitch); 599 600 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 601 MGA_DMAPAD, 0x00000000, 602 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY); 603 604 for (i = 0; i < nbox; i++) { 605 struct drm_clip_rect *box = &pbox[i]; 606 u32 height = box->y2 - box->y1; 607 u32 start = box->y1 * dev_priv->front_pitch; 608 609 DRM_DEBUG(" from=%d,%d to=%d,%d\n", 610 box->x1, box->y1, box->x2, box->y2); 611 612 DMA_BLOCK(MGA_AR0, start + box->x2 - 1, 613 MGA_AR3, start + box->x1, 614 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1, 615 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height); 616 } 617 618 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 619 MGA_PLNWT, ctx->plnwt, 620 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl); 621 622 ADVANCE_DMA(); 623 624 FLUSH_DMA(); 625 626 DRM_DEBUG("... done.\n"); 627 } 628 629 static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf) 630 { 631 drm_mga_private_t *dev_priv = dev->dev_private; 632 drm_mga_buf_priv_t *buf_priv = buf->dev_private; 633 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 634 u32 address = (u32) buf->bus_address; 635 u32 length = (u32) buf->used; 636 int i = 0; 637 DMA_LOCALS; 638 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used); 639 640 if (buf->used) { 641 buf_priv->dispatched = 1; 642 643 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty); 644 645 do { 646 if (i < sarea_priv->nbox) { 647 mga_emit_clip_rect(dev_priv, 648 &sarea_priv->boxes[i]); 649 } 650 651 BEGIN_DMA(1); 652 653 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 654 MGA_DMAPAD, 0x00000000, 655 MGA_SECADDRESS, (address | 656 MGA_DMA_VERTEX), 657 MGA_SECEND, ((address + length) | 658 dev_priv->dma_access)); 659 660 ADVANCE_DMA(); 661 } while (++i < sarea_priv->nbox); 662 } 663 664 if (buf_priv->discard) { 665 AGE_BUFFER(buf_priv); 666 buf->pending = 0; 667 buf->used = 0; 668 buf_priv->dispatched = 0; 669 670 mga_freelist_put(dev, buf); 671 } 672 673 FLUSH_DMA(); 674 } 675 676 static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf, 677 unsigned int start, unsigned int end) 678 { 679 drm_mga_private_t *dev_priv = dev->dev_private; 680 drm_mga_buf_priv_t *buf_priv = buf->dev_private; 681 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 682 u32 address = (u32) buf->bus_address; 683 int i = 0; 684 DMA_LOCALS; 685 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end); 686 687 if (start != end) { 688 buf_priv->dispatched = 1; 689 690 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty); 691 692 do { 693 if (i < sarea_priv->nbox) { 694 mga_emit_clip_rect(dev_priv, 695 &sarea_priv->boxes[i]); 696 } 697 698 BEGIN_DMA(1); 699 700 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 701 MGA_DMAPAD, 0x00000000, 702 MGA_SETUPADDRESS, address + start, 703 MGA_SETUPEND, ((address + end) | 704 dev_priv->dma_access)); 705 706 ADVANCE_DMA(); 707 } while (++i < sarea_priv->nbox); 708 } 709 710 if (buf_priv->discard) { 711 AGE_BUFFER(buf_priv); 712 buf->pending = 0; 713 buf->used = 0; 714 buf_priv->dispatched = 0; 715 716 mga_freelist_put(dev, buf); 717 } 718 719 FLUSH_DMA(); 720 } 721 722 /* This copies a 64 byte aligned agp region to the frambuffer with a 723 * standard blit, the ioctl needs to do checking. 724 */ 725 static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf, 726 unsigned int dstorg, unsigned int length) 727 { 728 drm_mga_private_t *dev_priv = dev->dev_private; 729 drm_mga_buf_priv_t *buf_priv = buf->dev_private; 730 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state; 731 u32 srcorg = 732 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM; 733 u32 y2; 734 DMA_LOCALS; 735 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used); 736 737 y2 = length / 64; 738 739 BEGIN_DMA(5); 740 741 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 742 MGA_DMAPAD, 0x00000000, 743 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000); 744 745 DMA_BLOCK(MGA_DSTORG, dstorg, 746 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64); 747 748 DMA_BLOCK(MGA_PITCH, 64, 749 MGA_PLNWT, 0xffffffff, 750 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY); 751 752 DMA_BLOCK(MGA_AR0, 63, 753 MGA_AR3, 0, 754 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2); 755 756 DMA_BLOCK(MGA_PLNWT, ctx->plnwt, 757 MGA_SRCORG, dev_priv->front_offset, 758 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000); 759 760 ADVANCE_DMA(); 761 762 AGE_BUFFER(buf_priv); 763 764 buf->pending = 0; 765 buf->used = 0; 766 buf_priv->dispatched = 0; 767 768 mga_freelist_put(dev, buf); 769 770 FLUSH_DMA(); 771 } 772 773 static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit) 774 { 775 drm_mga_private_t *dev_priv = dev->dev_private; 776 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 777 drm_mga_context_regs_t *ctx = &sarea_priv->context_state; 778 struct drm_clip_rect *pbox = sarea_priv->boxes; 779 int nbox = sarea_priv->nbox; 780 u32 scandir = 0, i; 781 DMA_LOCALS; 782 DRM_DEBUG("\n"); 783 784 BEGIN_DMA(4 + nbox); 785 786 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 787 MGA_DMAPAD, 0x00000000, 788 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000); 789 790 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY, 791 MGA_PLNWT, blit->planemask, 792 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg); 793 794 DMA_BLOCK(MGA_SGN, scandir, 795 MGA_MACCESS, dev_priv->maccess, 796 MGA_AR5, blit->ydir * blit->src_pitch, 797 MGA_PITCH, blit->dst_pitch); 798 799 for (i = 0; i < nbox; i++) { 800 int srcx = pbox[i].x1 + blit->delta_sx; 801 int srcy = pbox[i].y1 + blit->delta_sy; 802 int dstx = pbox[i].x1 + blit->delta_dx; 803 int dsty = pbox[i].y1 + blit->delta_dy; 804 int h = pbox[i].y2 - pbox[i].y1; 805 int w = pbox[i].x2 - pbox[i].x1 - 1; 806 int start; 807 808 if (blit->ydir == -1) 809 srcy = blit->height - srcy - 1; 810 811 start = srcy * blit->src_pitch + srcx; 812 813 DMA_BLOCK(MGA_AR0, start + w, 814 MGA_AR3, start, 815 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff), 816 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h); 817 } 818 819 /* Do something to flush AGP? 820 */ 821 822 /* Force reset of DWGCTL */ 823 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 824 MGA_PLNWT, ctx->plnwt, 825 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl); 826 827 ADVANCE_DMA(); 828 } 829 830 /* ================================================================ 831 * 832 */ 833 834 static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv) 835 { 836 drm_mga_private_t *dev_priv = dev->dev_private; 837 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 838 drm_mga_clear_t *clear = data; 839 840 LOCK_TEST_WITH_RETURN(dev, file_priv); 841 842 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS) 843 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS; 844 845 WRAP_TEST_WITH_RETURN(dev_priv); 846 847 mga_dma_dispatch_clear(dev, clear); 848 849 /* Make sure we restore the 3D state next time. 850 */ 851 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT; 852 853 return 0; 854 } 855 856 static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv) 857 { 858 drm_mga_private_t *dev_priv = dev->dev_private; 859 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 860 861 LOCK_TEST_WITH_RETURN(dev, file_priv); 862 863 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS) 864 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS; 865 866 WRAP_TEST_WITH_RETURN(dev_priv); 867 868 mga_dma_dispatch_swap(dev); 869 870 /* Make sure we restore the 3D state next time. 871 */ 872 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT; 873 874 return 0; 875 } 876 877 static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv) 878 { 879 drm_mga_private_t *dev_priv = dev->dev_private; 880 struct drm_device_dma *dma = dev->dma; 881 struct drm_buf *buf; 882 drm_mga_buf_priv_t *buf_priv; 883 drm_mga_vertex_t *vertex = data; 884 885 LOCK_TEST_WITH_RETURN(dev, file_priv); 886 887 if (vertex->idx < 0 || vertex->idx > dma->buf_count) 888 return -EINVAL; 889 buf = dma->buflist[vertex->idx]; 890 buf_priv = buf->dev_private; 891 892 buf->used = vertex->used; 893 buf_priv->discard = vertex->discard; 894 895 if (!mga_verify_state(dev_priv)) { 896 if (vertex->discard) { 897 if (buf_priv->dispatched == 1) 898 AGE_BUFFER(buf_priv); 899 buf_priv->dispatched = 0; 900 mga_freelist_put(dev, buf); 901 } 902 return -EINVAL; 903 } 904 905 WRAP_TEST_WITH_RETURN(dev_priv); 906 907 mga_dma_dispatch_vertex(dev, buf); 908 909 return 0; 910 } 911 912 static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv) 913 { 914 drm_mga_private_t *dev_priv = dev->dev_private; 915 struct drm_device_dma *dma = dev->dma; 916 struct drm_buf *buf; 917 drm_mga_buf_priv_t *buf_priv; 918 drm_mga_indices_t *indices = data; 919 920 LOCK_TEST_WITH_RETURN(dev, file_priv); 921 922 if (indices->idx < 0 || indices->idx > dma->buf_count) 923 return -EINVAL; 924 925 buf = dma->buflist[indices->idx]; 926 buf_priv = buf->dev_private; 927 928 buf_priv->discard = indices->discard; 929 930 if (!mga_verify_state(dev_priv)) { 931 if (indices->discard) { 932 if (buf_priv->dispatched == 1) 933 AGE_BUFFER(buf_priv); 934 buf_priv->dispatched = 0; 935 mga_freelist_put(dev, buf); 936 } 937 return -EINVAL; 938 } 939 940 WRAP_TEST_WITH_RETURN(dev_priv); 941 942 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end); 943 944 return 0; 945 } 946 947 static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv) 948 { 949 struct drm_device_dma *dma = dev->dma; 950 drm_mga_private_t *dev_priv = dev->dev_private; 951 struct drm_buf *buf; 952 drm_mga_buf_priv_t *buf_priv; 953 drm_mga_iload_t *iload = data; 954 DRM_DEBUG("\n"); 955 956 LOCK_TEST_WITH_RETURN(dev, file_priv); 957 958 #if 0 959 if (mga_do_wait_for_idle(dev_priv) < 0) { 960 if (MGA_DMA_DEBUG) 961 DRM_INFO("-EBUSY\n"); 962 return -EBUSY; 963 } 964 #endif 965 if (iload->idx < 0 || iload->idx > dma->buf_count) 966 return -EINVAL; 967 968 buf = dma->buflist[iload->idx]; 969 buf_priv = buf->dev_private; 970 971 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) { 972 mga_freelist_put(dev, buf); 973 return -EINVAL; 974 } 975 976 WRAP_TEST_WITH_RETURN(dev_priv); 977 978 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length); 979 980 /* Make sure we restore the 3D state next time. 981 */ 982 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT; 983 984 return 0; 985 } 986 987 static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv) 988 { 989 drm_mga_private_t *dev_priv = dev->dev_private; 990 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; 991 drm_mga_blit_t *blit = data; 992 DRM_DEBUG("\n"); 993 994 LOCK_TEST_WITH_RETURN(dev, file_priv); 995 996 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS) 997 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS; 998 999 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg)) 1000 return -EINVAL; 1001 1002 WRAP_TEST_WITH_RETURN(dev_priv); 1003 1004 mga_dma_dispatch_blit(dev, blit); 1005 1006 /* Make sure we restore the 3D state next time. 1007 */ 1008 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT; 1009 1010 return 0; 1011 } 1012 1013 static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv) 1014 { 1015 drm_mga_private_t *dev_priv = dev->dev_private; 1016 drm_mga_getparam_t *param = data; 1017 int value; 1018 1019 if (!dev_priv) { 1020 DRM_ERROR("called with no initialization\n"); 1021 return -EINVAL; 1022 } 1023 1024 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID); 1025 1026 switch (param->param) { 1027 case MGA_PARAM_IRQ_NR: 1028 value = dev->pdev->irq; 1029 break; 1030 case MGA_PARAM_CARD_TYPE: 1031 value = dev_priv->chipset; 1032 break; 1033 default: 1034 return -EINVAL; 1035 } 1036 1037 if (copy_to_user(param->value, &value, sizeof(int))) { 1038 DRM_ERROR("copy_to_user\n"); 1039 return -EFAULT; 1040 } 1041 1042 return 0; 1043 } 1044 1045 static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv) 1046 { 1047 drm_mga_private_t *dev_priv = dev->dev_private; 1048 u32 *fence = data; 1049 DMA_LOCALS; 1050 1051 if (!dev_priv) { 1052 DRM_ERROR("called with no initialization\n"); 1053 return -EINVAL; 1054 } 1055 1056 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID); 1057 1058 /* I would normal do this assignment in the declaration of fence, 1059 * but dev_priv may be NULL. 1060 */ 1061 1062 *fence = dev_priv->next_fence_to_post; 1063 dev_priv->next_fence_to_post++; 1064 1065 BEGIN_DMA(1); 1066 DMA_BLOCK(MGA_DMAPAD, 0x00000000, 1067 MGA_DMAPAD, 0x00000000, 1068 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000); 1069 ADVANCE_DMA(); 1070 1071 return 0; 1072 } 1073 1074 static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file * 1075 file_priv) 1076 { 1077 drm_mga_private_t *dev_priv = dev->dev_private; 1078 u32 *fence = data; 1079 1080 if (!dev_priv) { 1081 DRM_ERROR("called with no initialization\n"); 1082 return -EINVAL; 1083 } 1084 1085 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID); 1086 1087 mga_driver_fence_wait(dev, fence); 1088 return 0; 1089 } 1090 1091 const struct drm_ioctl_desc mga_ioctls[] = { 1092 DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1093 DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH), 1094 DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH), 1095 DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH), 1096 DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH), 1097 DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH), 1098 DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH), 1099 DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH), 1100 DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH), 1101 DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH), 1102 DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH), 1103 DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH), 1104 DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY), 1105 }; 1106 1107 int mga_max_ioctl = ARRAY_SIZE(mga_ioctls); 1108