Merge commit 'crater/master'
[dragonfly.git] / sys / dev / drm / mga_state.c
blob9d9857178a246ceb4f57f30821a0dedc271ec9c8
1 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 */
4 /*-
5 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
6 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
7 * All Rights Reserved.
9 * Permission is hereby granted, free of charge, to any person obtaining a
10 * copy of this software and associated documentation files (the "Software"),
11 * to deal in the Software without restriction, including without limitation
12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 * and/or sell copies of the Software, and to permit persons to whom the
14 * Software is furnished to do so, subject to the following conditions:
16 * The above copyright notice and this permission notice (including the next
17 * paragraph) shall be included in all copies or substantial portions of the
18 * Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
24 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
26 * OTHER DEALINGS IN THE SOFTWARE.
28 * Authors:
29 * Jeff Hartmann <jhartmann@valinux.com>
30 * Keith Whitwell <keith@tungstengraphics.com>
32 * Rewritten by:
33 * Gareth Hughes <gareth@valinux.com>
36 #include "dev/drm/drmP.h"
37 #include "dev/drm/drm.h"
38 #include "dev/drm/mga_drm.h"
39 #include "dev/drm/mga_drv.h"
41 /* ================================================================
42 * DMA hardware state programming functions
45 static void mga_emit_clip_rect(drm_mga_private_t * dev_priv,
46 struct drm_clip_rect * box)
48 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
49 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
50 unsigned int pitch = dev_priv->front_pitch;
51 DMA_LOCALS;
53 BEGIN_DMA(2);
55 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
57 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
58 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
59 MGA_LEN + MGA_EXEC, 0x80000000,
60 MGA_DWGCTL, ctx->dwgctl,
61 MGA_LEN + MGA_EXEC, 0x80000000);
63 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
64 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
65 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
67 ADVANCE_DMA();
70 static __inline__ void mga_g200_emit_context(drm_mga_private_t * dev_priv)
72 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
73 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
74 DMA_LOCALS;
76 BEGIN_DMA(3);
78 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
79 MGA_MACCESS, ctx->maccess,
80 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
82 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
83 MGA_FOGCOL, ctx->fogcolor,
84 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
86 DMA_BLOCK(MGA_FCOL, ctx->fcol,
87 MGA_DMAPAD, 0x00000000,
88 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
90 ADVANCE_DMA();
93 static __inline__ void mga_g400_emit_context(drm_mga_private_t * dev_priv)
95 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
96 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
97 DMA_LOCALS;
99 BEGIN_DMA(4);
101 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
102 MGA_MACCESS, ctx->maccess,
103 MGA_PLNWT, ctx->plnwt,
104 MGA_DWGCTL, ctx->dwgctl);
106 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
107 MGA_FOGCOL, ctx->fogcolor,
108 MGA_WFLAG, ctx->wflag,
109 MGA_ZORG, dev_priv->depth_offset);
111 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
112 MGA_TDUALSTAGE0, ctx->tdualstage0,
113 MGA_TDUALSTAGE1, ctx->tdualstage1,
114 MGA_FCOL, ctx->fcol);
116 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
117 MGA_STENCILCTL, ctx->stencilctl,
118 MGA_DMAPAD, 0x00000000,
119 MGA_DMAPAD, 0x00000000);
121 ADVANCE_DMA();
124 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t * dev_priv)
126 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
127 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
128 DMA_LOCALS;
130 BEGIN_DMA(4);
132 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
133 MGA_TEXCTL, tex->texctl,
134 MGA_TEXFILTER, tex->texfilter,
135 MGA_TEXBORDERCOL, tex->texbordercol);
137 DMA_BLOCK(MGA_TEXORG, tex->texorg,
138 MGA_TEXORG1, tex->texorg1,
139 MGA_TEXORG2, tex->texorg2,
140 MGA_TEXORG3, tex->texorg3);
142 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
143 MGA_TEXWIDTH, tex->texwidth,
144 MGA_TEXHEIGHT, tex->texheight,
145 MGA_WR24, tex->texwidth);
147 DMA_BLOCK(MGA_WR34, tex->texheight,
148 MGA_TEXTRANS, 0x0000ffff,
149 MGA_TEXTRANSHIGH, 0x0000ffff,
150 MGA_DMAPAD, 0x00000000);
152 ADVANCE_DMA();
155 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t * dev_priv)
157 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
158 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
159 DMA_LOCALS;
161 /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
162 /* tex->texctl, tex->texctl2); */
164 BEGIN_DMA(6);
166 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
167 MGA_TEXCTL, tex->texctl,
168 MGA_TEXFILTER, tex->texfilter,
169 MGA_TEXBORDERCOL, tex->texbordercol);
171 DMA_BLOCK(MGA_TEXORG, tex->texorg,
172 MGA_TEXORG1, tex->texorg1,
173 MGA_TEXORG2, tex->texorg2,
174 MGA_TEXORG3, tex->texorg3);
176 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
177 MGA_TEXWIDTH, tex->texwidth,
178 MGA_TEXHEIGHT, tex->texheight,
179 MGA_WR49, 0x00000000);
181 DMA_BLOCK(MGA_WR57, 0x00000000,
182 MGA_WR53, 0x00000000,
183 MGA_WR61, 0x00000000,
184 MGA_WR52, MGA_G400_WR_MAGIC);
186 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
187 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
188 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
189 MGA_DMAPAD, 0x00000000);
191 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
192 MGA_DMAPAD, 0x00000000,
193 MGA_TEXTRANS, 0x0000ffff,
194 MGA_TEXTRANSHIGH, 0x0000ffff);
196 ADVANCE_DMA();
199 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t * dev_priv)
201 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
202 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
203 DMA_LOCALS;
205 /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
206 /* tex->texctl, tex->texctl2); */
208 BEGIN_DMA(5);
210 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
211 MGA_MAP1_ENABLE |
212 MGA_G400_TC2_MAGIC),
213 MGA_TEXCTL, tex->texctl,
214 MGA_TEXFILTER, tex->texfilter,
215 MGA_TEXBORDERCOL, tex->texbordercol);
217 DMA_BLOCK(MGA_TEXORG, tex->texorg,
218 MGA_TEXORG1, tex->texorg1,
219 MGA_TEXORG2, tex->texorg2,
220 MGA_TEXORG3, tex->texorg3);
222 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
223 MGA_TEXWIDTH, tex->texwidth,
224 MGA_TEXHEIGHT, tex->texheight,
225 MGA_WR49, 0x00000000);
227 DMA_BLOCK(MGA_WR57, 0x00000000,
228 MGA_WR53, 0x00000000,
229 MGA_WR61, 0x00000000,
230 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
232 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
233 MGA_TEXTRANS, 0x0000ffff,
234 MGA_TEXTRANSHIGH, 0x0000ffff,
235 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
237 ADVANCE_DMA();
240 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t * dev_priv)
242 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
243 unsigned int pipe = sarea_priv->warp_pipe;
244 DMA_LOCALS;
246 BEGIN_DMA(3);
248 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
249 MGA_WVRTXSZ, 0x00000007,
250 MGA_WFLAG, 0x00000000,
251 MGA_WR24, 0x00000000);
253 DMA_BLOCK(MGA_WR25, 0x00000100,
254 MGA_WR34, 0x00000000,
255 MGA_WR42, 0x0000ffff,
256 MGA_WR60, 0x0000ffff);
258 /* Padding required to to hardware bug.
260 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
261 MGA_DMAPAD, 0xffffffff,
262 MGA_DMAPAD, 0xffffffff,
263 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
264 MGA_WMODE_START | dev_priv->wagp_enable));
266 ADVANCE_DMA();
269 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t * dev_priv)
271 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
272 unsigned int pipe = sarea_priv->warp_pipe;
273 DMA_LOCALS;
275 /* printk("mga_g400_emit_pipe %x\n", pipe); */
277 BEGIN_DMA(10);
279 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
280 MGA_DMAPAD, 0x00000000,
281 MGA_DMAPAD, 0x00000000,
282 MGA_DMAPAD, 0x00000000);
284 if (pipe & MGA_T2) {
285 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
286 MGA_DMAPAD, 0x00000000,
287 MGA_DMAPAD, 0x00000000,
288 MGA_DMAPAD, 0x00000000);
290 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
291 MGA_WACCEPTSEQ, 0x00000000,
292 MGA_WACCEPTSEQ, 0x00000000,
293 MGA_WACCEPTSEQ, 0x1e000000);
294 } else {
295 if (dev_priv->warp_pipe & MGA_T2) {
296 /* Flush the WARP pipe */
297 DMA_BLOCK(MGA_YDST, 0x00000000,
298 MGA_FXLEFT, 0x00000000,
299 MGA_FXRIGHT, 0x00000001,
300 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
302 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
303 MGA_DWGSYNC, 0x00007000,
304 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
305 MGA_LEN + MGA_EXEC, 0x00000000);
307 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
308 MGA_G400_TC2_MAGIC),
309 MGA_LEN + MGA_EXEC, 0x00000000,
310 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
311 MGA_DMAPAD, 0x00000000);
314 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
315 MGA_DMAPAD, 0x00000000,
316 MGA_DMAPAD, 0x00000000,
317 MGA_DMAPAD, 0x00000000);
319 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
320 MGA_WACCEPTSEQ, 0x00000000,
321 MGA_WACCEPTSEQ, 0x00000000,
322 MGA_WACCEPTSEQ, 0x18000000);
325 DMA_BLOCK(MGA_WFLAG, 0x00000000,
326 MGA_WFLAG1, 0x00000000,
327 MGA_WR56, MGA_G400_WR56_MAGIC,
328 MGA_DMAPAD, 0x00000000);
330 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
331 MGA_WR57, 0x00000000, /* tex0 */
332 MGA_WR53, 0x00000000, /* tex1 */
333 MGA_WR61, 0x00000000); /* tex1 */
335 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
336 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
337 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
338 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
340 /* Padding required to to hardware bug */
341 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
342 MGA_DMAPAD, 0xffffffff,
343 MGA_DMAPAD, 0xffffffff,
344 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
345 MGA_WMODE_START | dev_priv->wagp_enable));
347 ADVANCE_DMA();
350 static void mga_g200_emit_state(drm_mga_private_t * dev_priv)
352 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
353 unsigned int dirty = sarea_priv->dirty;
355 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
356 mga_g200_emit_pipe(dev_priv);
357 dev_priv->warp_pipe = sarea_priv->warp_pipe;
360 if (dirty & MGA_UPLOAD_CONTEXT) {
361 mga_g200_emit_context(dev_priv);
362 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
365 if (dirty & MGA_UPLOAD_TEX0) {
366 mga_g200_emit_tex0(dev_priv);
367 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
371 static void mga_g400_emit_state(drm_mga_private_t * dev_priv)
373 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
374 unsigned int dirty = sarea_priv->dirty;
375 int multitex = sarea_priv->warp_pipe & MGA_T2;
377 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
378 mga_g400_emit_pipe(dev_priv);
379 dev_priv->warp_pipe = sarea_priv->warp_pipe;
382 if (dirty & MGA_UPLOAD_CONTEXT) {
383 mga_g400_emit_context(dev_priv);
384 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
387 if (dirty & MGA_UPLOAD_TEX0) {
388 mga_g400_emit_tex0(dev_priv);
389 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
392 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
393 mga_g400_emit_tex1(dev_priv);
394 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
398 /* ================================================================
399 * SAREA state verification
402 /* Disallow all write destinations except the front and backbuffer.
404 static int mga_verify_context(drm_mga_private_t * dev_priv)
406 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
407 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
409 if (ctx->dstorg != dev_priv->front_offset &&
410 ctx->dstorg != dev_priv->back_offset) {
411 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
412 ctx->dstorg, dev_priv->front_offset,
413 dev_priv->back_offset);
414 ctx->dstorg = 0;
415 return -EINVAL;
418 return 0;
421 /* Disallow texture reads from PCI space.
423 static int mga_verify_tex(drm_mga_private_t * dev_priv, int unit)
425 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
426 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
427 unsigned int org;
429 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
431 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
432 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
433 tex->texorg = 0;
434 return -EINVAL;
437 return 0;
440 static int mga_verify_state(drm_mga_private_t * dev_priv)
442 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
443 unsigned int dirty = sarea_priv->dirty;
444 int ret = 0;
446 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
447 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
449 if (dirty & MGA_UPLOAD_CONTEXT)
450 ret |= mga_verify_context(dev_priv);
452 if (dirty & MGA_UPLOAD_TEX0)
453 ret |= mga_verify_tex(dev_priv, 0);
455 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
456 if (dirty & MGA_UPLOAD_TEX1)
457 ret |= mga_verify_tex(dev_priv, 1);
459 if (dirty & MGA_UPLOAD_PIPE)
460 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
461 } else {
462 if (dirty & MGA_UPLOAD_PIPE)
463 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
466 return (ret == 0);
469 static int mga_verify_iload(drm_mga_private_t * dev_priv,
470 unsigned int dstorg, unsigned int length)
472 if (dstorg < dev_priv->texture_offset ||
473 dstorg + length > (dev_priv->texture_offset +
474 dev_priv->texture_size)) {
475 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
476 return -EINVAL;
479 if (length & MGA_ILOAD_MASK) {
480 DRM_ERROR("*** bad iload length: 0x%x\n",
481 length & MGA_ILOAD_MASK);
482 return -EINVAL;
485 return 0;
488 static int mga_verify_blit(drm_mga_private_t * dev_priv,
489 unsigned int srcorg, unsigned int dstorg)
491 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
492 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
493 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
494 return -EINVAL;
496 return 0;
499 /* ================================================================
503 static void mga_dma_dispatch_clear(struct drm_device * dev, drm_mga_clear_t * clear)
505 drm_mga_private_t *dev_priv = dev->dev_private;
506 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
507 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
508 struct drm_clip_rect *pbox = sarea_priv->boxes;
509 int nbox = sarea_priv->nbox;
510 int i;
511 DMA_LOCALS;
512 DRM_DEBUG("\n");
514 BEGIN_DMA(1);
516 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
517 MGA_DMAPAD, 0x00000000,
518 MGA_DWGSYNC, 0x00007100,
519 MGA_DWGSYNC, 0x00007000);
521 ADVANCE_DMA();
523 for (i = 0; i < nbox; i++) {
524 struct drm_clip_rect *box = &pbox[i];
525 u32 height = box->y2 - box->y1;
527 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
528 box->x1, box->y1, box->x2, box->y2);
530 if (clear->flags & MGA_FRONT) {
531 BEGIN_DMA(2);
533 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
534 MGA_PLNWT, clear->color_mask,
535 MGA_YDSTLEN, (box->y1 << 16) | height,
536 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
538 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
539 MGA_FCOL, clear->clear_color,
540 MGA_DSTORG, dev_priv->front_offset,
541 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
543 ADVANCE_DMA();
546 if (clear->flags & MGA_BACK) {
547 BEGIN_DMA(2);
549 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
550 MGA_PLNWT, clear->color_mask,
551 MGA_YDSTLEN, (box->y1 << 16) | height,
552 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
554 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
555 MGA_FCOL, clear->clear_color,
556 MGA_DSTORG, dev_priv->back_offset,
557 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
559 ADVANCE_DMA();
562 if (clear->flags & MGA_DEPTH) {
563 BEGIN_DMA(2);
565 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
566 MGA_PLNWT, clear->depth_mask,
567 MGA_YDSTLEN, (box->y1 << 16) | height,
568 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
570 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
571 MGA_FCOL, clear->clear_depth,
572 MGA_DSTORG, dev_priv->depth_offset,
573 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
575 ADVANCE_DMA();
580 BEGIN_DMA(1);
582 /* Force reset of DWGCTL */
583 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
584 MGA_DMAPAD, 0x00000000,
585 MGA_PLNWT, ctx->plnwt,
586 MGA_DWGCTL, ctx->dwgctl);
588 ADVANCE_DMA();
590 FLUSH_DMA();
593 static void mga_dma_dispatch_swap(struct drm_device * dev)
595 drm_mga_private_t *dev_priv = dev->dev_private;
596 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
597 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
598 struct drm_clip_rect *pbox = sarea_priv->boxes;
599 int nbox = sarea_priv->nbox;
600 int i;
601 DMA_LOCALS;
602 DRM_DEBUG("\n");
604 sarea_priv->last_frame.head = dev_priv->prim.tail;
605 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
607 BEGIN_DMA(4 + nbox);
609 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
610 MGA_DMAPAD, 0x00000000,
611 MGA_DWGSYNC, 0x00007100,
612 MGA_DWGSYNC, 0x00007000);
614 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
615 MGA_MACCESS, dev_priv->maccess,
616 MGA_SRCORG, dev_priv->back_offset,
617 MGA_AR5, dev_priv->front_pitch);
619 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
620 MGA_DMAPAD, 0x00000000,
621 MGA_PLNWT, 0xffffffff,
622 MGA_DWGCTL, MGA_DWGCTL_COPY);
624 for (i = 0; i < nbox; i++) {
625 struct drm_clip_rect *box = &pbox[i];
626 u32 height = box->y2 - box->y1;
627 u32 start = box->y1 * dev_priv->front_pitch;
629 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
630 box->x1, box->y1, box->x2, box->y2);
632 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
633 MGA_AR3, start + box->x1,
634 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
635 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
638 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
639 MGA_PLNWT, ctx->plnwt,
640 MGA_SRCORG, dev_priv->front_offset,
641 MGA_DWGCTL, ctx->dwgctl);
643 ADVANCE_DMA();
645 FLUSH_DMA();
647 DRM_DEBUG("... done.\n");
650 static void mga_dma_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf)
652 drm_mga_private_t *dev_priv = dev->dev_private;
653 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
654 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
655 u32 address = (u32) buf->bus_address;
656 u32 length = (u32) buf->used;
657 int i = 0;
658 DMA_LOCALS;
659 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
661 if (buf->used) {
662 buf_priv->dispatched = 1;
664 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
666 do {
667 if (i < sarea_priv->nbox) {
668 mga_emit_clip_rect(dev_priv,
669 &sarea_priv->boxes[i]);
672 BEGIN_DMA(1);
674 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
675 MGA_DMAPAD, 0x00000000,
676 MGA_SECADDRESS, (address |
677 MGA_DMA_VERTEX),
678 MGA_SECEND, ((address + length) |
679 dev_priv->dma_access));
681 ADVANCE_DMA();
682 } while (++i < sarea_priv->nbox);
685 if (buf_priv->discard) {
686 AGE_BUFFER(buf_priv);
687 buf->pending = 0;
688 buf->used = 0;
689 buf_priv->dispatched = 0;
691 mga_freelist_put(dev, buf);
694 FLUSH_DMA();
697 static void mga_dma_dispatch_indices(struct drm_device * dev, struct drm_buf * buf,
698 unsigned int start, unsigned int end)
700 drm_mga_private_t *dev_priv = dev->dev_private;
701 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
702 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
703 u32 address = (u32) buf->bus_address;
704 int i = 0;
705 DMA_LOCALS;
706 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
708 if (start != end) {
709 buf_priv->dispatched = 1;
711 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
713 do {
714 if (i < sarea_priv->nbox) {
715 mga_emit_clip_rect(dev_priv,
716 &sarea_priv->boxes[i]);
719 BEGIN_DMA(1);
721 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
722 MGA_DMAPAD, 0x00000000,
723 MGA_SETUPADDRESS, address + start,
724 MGA_SETUPEND, ((address + end) |
725 dev_priv->dma_access));
727 ADVANCE_DMA();
728 } while (++i < sarea_priv->nbox);
731 if (buf_priv->discard) {
732 AGE_BUFFER(buf_priv);
733 buf->pending = 0;
734 buf->used = 0;
735 buf_priv->dispatched = 0;
737 mga_freelist_put(dev, buf);
740 FLUSH_DMA();
743 /* This copies a 64 byte aligned agp region to the frambuffer with a
744 * standard blit, the ioctl needs to do checking.
746 static void mga_dma_dispatch_iload(struct drm_device * dev, struct drm_buf * buf,
747 unsigned int dstorg, unsigned int length)
749 drm_mga_private_t *dev_priv = dev->dev_private;
750 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
751 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
752 u32 srcorg = buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
753 u32 y2;
754 DMA_LOCALS;
755 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
757 y2 = length / 64;
759 BEGIN_DMA(5);
761 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
762 MGA_DMAPAD, 0x00000000,
763 MGA_DWGSYNC, 0x00007100,
764 MGA_DWGSYNC, 0x00007000);
766 DMA_BLOCK(MGA_DSTORG, dstorg,
767 MGA_MACCESS, 0x00000000,
768 MGA_SRCORG, srcorg,
769 MGA_AR5, 64);
771 DMA_BLOCK(MGA_PITCH, 64,
772 MGA_PLNWT, 0xffffffff,
773 MGA_DMAPAD, 0x00000000,
774 MGA_DWGCTL, MGA_DWGCTL_COPY);
776 DMA_BLOCK(MGA_AR0, 63,
777 MGA_AR3, 0,
778 MGA_FXBNDRY, (63 << 16) | 0,
779 MGA_YDSTLEN + MGA_EXEC, y2);
781 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
782 MGA_SRCORG, dev_priv->front_offset,
783 MGA_PITCH, dev_priv->front_pitch,
784 MGA_DWGSYNC, 0x00007000);
786 ADVANCE_DMA();
788 AGE_BUFFER(buf_priv);
790 buf->pending = 0;
791 buf->used = 0;
792 buf_priv->dispatched = 0;
794 mga_freelist_put(dev, buf);
796 FLUSH_DMA();
799 static void mga_dma_dispatch_blit(struct drm_device * dev, drm_mga_blit_t * blit)
801 drm_mga_private_t *dev_priv = dev->dev_private;
802 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
803 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
804 struct drm_clip_rect *pbox = sarea_priv->boxes;
805 int nbox = sarea_priv->nbox;
806 u32 scandir = 0, i;
807 DMA_LOCALS;
808 DRM_DEBUG("\n");
810 BEGIN_DMA(4 + nbox);
812 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
813 MGA_DMAPAD, 0x00000000,
814 MGA_DWGSYNC, 0x00007100,
815 MGA_DWGSYNC, 0x00007000);
817 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
818 MGA_PLNWT, blit->planemask,
819 MGA_SRCORG, blit->srcorg,
820 MGA_DSTORG, blit->dstorg);
822 DMA_BLOCK(MGA_SGN, scandir,
823 MGA_MACCESS, dev_priv->maccess,
824 MGA_AR5, blit->ydir * blit->src_pitch,
825 MGA_PITCH, blit->dst_pitch);
827 for (i = 0; i < nbox; i++) {
828 int srcx = pbox[i].x1 + blit->delta_sx;
829 int srcy = pbox[i].y1 + blit->delta_sy;
830 int dstx = pbox[i].x1 + blit->delta_dx;
831 int dsty = pbox[i].y1 + blit->delta_dy;
832 int h = pbox[i].y2 - pbox[i].y1;
833 int w = pbox[i].x2 - pbox[i].x1 - 1;
834 int start;
836 if (blit->ydir == -1) {
837 srcy = blit->height - srcy - 1;
840 start = srcy * blit->src_pitch + srcx;
842 DMA_BLOCK(MGA_AR0, start + w,
843 MGA_AR3, start,
844 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
845 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
848 /* Do something to flush AGP?
851 /* Force reset of DWGCTL */
852 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
853 MGA_PLNWT, ctx->plnwt,
854 MGA_PITCH, dev_priv->front_pitch,
855 MGA_DWGCTL, ctx->dwgctl);
857 ADVANCE_DMA();
860 /* ================================================================
864 static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
866 drm_mga_private_t *dev_priv = dev->dev_private;
867 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
868 drm_mga_clear_t *clear = data;
870 LOCK_TEST_WITH_RETURN(dev, file_priv);
872 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
873 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
875 WRAP_TEST_WITH_RETURN(dev_priv);
877 mga_dma_dispatch_clear(dev, clear);
879 /* Make sure we restore the 3D state next time.
881 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
883 return 0;
886 static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
888 drm_mga_private_t *dev_priv = dev->dev_private;
889 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
891 LOCK_TEST_WITH_RETURN(dev, file_priv);
893 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
894 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
896 WRAP_TEST_WITH_RETURN(dev_priv);
898 mga_dma_dispatch_swap(dev);
900 /* Make sure we restore the 3D state next time.
902 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
904 return 0;
907 static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
909 drm_mga_private_t *dev_priv = dev->dev_private;
910 struct drm_device_dma *dma = dev->dma;
911 struct drm_buf *buf;
912 drm_mga_buf_priv_t *buf_priv;
913 drm_mga_vertex_t *vertex = data;
915 LOCK_TEST_WITH_RETURN(dev, file_priv);
917 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
918 return -EINVAL;
919 buf = dma->buflist[vertex->idx];
920 buf_priv = buf->dev_private;
922 buf->used = vertex->used;
923 buf_priv->discard = vertex->discard;
925 if (!mga_verify_state(dev_priv)) {
926 if (vertex->discard) {
927 if (buf_priv->dispatched == 1)
928 AGE_BUFFER(buf_priv);
929 buf_priv->dispatched = 0;
930 mga_freelist_put(dev, buf);
932 return -EINVAL;
935 WRAP_TEST_WITH_RETURN(dev_priv);
937 mga_dma_dispatch_vertex(dev, buf);
939 return 0;
942 static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
944 drm_mga_private_t *dev_priv = dev->dev_private;
945 struct drm_device_dma *dma = dev->dma;
946 struct drm_buf *buf;
947 drm_mga_buf_priv_t *buf_priv;
948 drm_mga_indices_t *indices = data;
950 LOCK_TEST_WITH_RETURN(dev, file_priv);
952 if (indices->idx < 0 || indices->idx > dma->buf_count)
953 return -EINVAL;
955 buf = dma->buflist[indices->idx];
956 buf_priv = buf->dev_private;
958 buf_priv->discard = indices->discard;
960 if (!mga_verify_state(dev_priv)) {
961 if (indices->discard) {
962 if (buf_priv->dispatched == 1)
963 AGE_BUFFER(buf_priv);
964 buf_priv->dispatched = 0;
965 mga_freelist_put(dev, buf);
967 return -EINVAL;
970 WRAP_TEST_WITH_RETURN(dev_priv);
972 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
974 return 0;
977 static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
979 struct drm_device_dma *dma = dev->dma;
980 drm_mga_private_t *dev_priv = dev->dev_private;
981 struct drm_buf *buf;
982 drm_mga_buf_priv_t *buf_priv;
983 drm_mga_iload_t *iload = data;
984 DRM_DEBUG("\n");
986 LOCK_TEST_WITH_RETURN(dev, file_priv);
988 #if 0
989 if (mga_do_wait_for_idle(dev_priv) < 0) {
990 if (MGA_DMA_DEBUG)
991 DRM_INFO("-EBUSY\n");
992 return -EBUSY;
994 #endif
995 if (iload->idx < 0 || iload->idx > dma->buf_count)
996 return -EINVAL;
998 buf = dma->buflist[iload->idx];
999 buf_priv = buf->dev_private;
1001 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
1002 mga_freelist_put(dev, buf);
1003 return -EINVAL;
1006 WRAP_TEST_WITH_RETURN(dev_priv);
1008 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
1010 /* Make sure we restore the 3D state next time.
1012 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1014 return 0;
1017 static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
1019 drm_mga_private_t *dev_priv = dev->dev_private;
1020 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
1021 drm_mga_blit_t *blit = data;
1022 DRM_DEBUG("\n");
1024 LOCK_TEST_WITH_RETURN(dev, file_priv);
1026 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
1027 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
1029 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
1030 return -EINVAL;
1032 WRAP_TEST_WITH_RETURN(dev_priv);
1034 mga_dma_dispatch_blit(dev, blit);
1036 /* Make sure we restore the 3D state next time.
1038 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1040 return 0;
1043 static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1045 drm_mga_private_t *dev_priv = dev->dev_private;
1046 drm_mga_getparam_t *param = data;
1047 int value;
1049 if (!dev_priv) {
1050 DRM_ERROR("called with no initialization\n");
1051 return -EINVAL;
1054 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1056 switch (param->param) {
1057 case MGA_PARAM_IRQ_NR:
1058 value = dev->irq;
1059 break;
1060 case MGA_PARAM_CARD_TYPE:
1061 value = dev_priv->chipset;
1062 break;
1063 default:
1064 return -EINVAL;
1067 if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
1068 DRM_ERROR("copy_to_user\n");
1069 return -EFAULT;
1072 return 0;
1075 static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1077 drm_mga_private_t *dev_priv = dev->dev_private;
1078 u32 *fence = data;
1079 DMA_LOCALS;
1081 if (!dev_priv) {
1082 DRM_ERROR("called with no initialization\n");
1083 return -EINVAL;
1086 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1088 /* I would normal do this assignment in the declaration of fence,
1089 * but dev_priv may be NULL.
1092 *fence = dev_priv->next_fence_to_post;
1093 dev_priv->next_fence_to_post++;
1095 BEGIN_DMA(1);
1096 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1097 MGA_DMAPAD, 0x00000000,
1098 MGA_DMAPAD, 0x00000000,
1099 MGA_SOFTRAP, 0x00000000);
1100 ADVANCE_DMA();
1102 return 0;
1105 static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1107 drm_mga_private_t *dev_priv = dev->dev_private;
1108 u32 *fence = data;
1110 if (!dev_priv) {
1111 DRM_ERROR("called with no initialization\n");
1112 return -EINVAL;
1115 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1117 mga_driver_fence_wait(dev, fence);
1119 return 0;
1122 struct drm_ioctl_desc mga_ioctls[] = {
1123 DRM_IOCTL_DEF(DRM_MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1124 DRM_IOCTL_DEF(DRM_MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1125 DRM_IOCTL_DEF(DRM_MGA_RESET, mga_dma_reset, DRM_AUTH),
1126 DRM_IOCTL_DEF(DRM_MGA_SWAP, mga_dma_swap, DRM_AUTH),
1127 DRM_IOCTL_DEF(DRM_MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1128 DRM_IOCTL_DEF(DRM_MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1129 DRM_IOCTL_DEF(DRM_MGA_INDICES, mga_dma_indices, DRM_AUTH),
1130 DRM_IOCTL_DEF(DRM_MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1131 DRM_IOCTL_DEF(DRM_MGA_BLIT, mga_dma_blit, DRM_AUTH),
1132 DRM_IOCTL_DEF(DRM_MGA_GETPARAM, mga_getparam, DRM_AUTH),
1133 DRM_IOCTL_DEF(DRM_MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1134 DRM_IOCTL_DEF(DRM_MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1135 DRM_IOCTL_DEF(DRM_MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1139 int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);