Sync with HEAD.
[dragonfly.git] / sys / dev / drm / mga_state.c
blob35508c57ae5405c18ddc8bccbb9c57c837b69b29
1 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3 */
4 /*
5 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
6 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
7 * All Rights Reserved.
9 * Permission is hereby granted, free of charge, to any person obtaining a
10 * copy of this software and associated documentation files (the "Software"),
11 * to deal in the Software without restriction, including without limitation
12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 * and/or sell copies of the Software, and to permit persons to whom the
14 * Software is furnished to do so, subject to the following conditions:
16 * The above copyright notice and this permission notice (including the next
17 * paragraph) shall be included in all copies or substantial portions of the
18 * Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
24 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
26 * OTHER DEALINGS IN THE SOFTWARE.
28 * Authors:
29 * Jeff Hartmann <jhartmann@valinux.com>
30 * Keith Whitwell <keith@tungstengraphics.com>
32 * Rewritten by:
33 * Gareth Hughes <gareth@valinux.com>
35 * $DragonFly: src/sys/dev/drm/mga_state.c,v 1.1 2008/04/05 18:12:29 hasso Exp $
38 #include "drmP.h"
39 #include "drm.h"
40 #include "mga_drm.h"
41 #include "mga_drv.h"
43 /* ================================================================
44 * DMA hardware state programming functions
47 static void mga_emit_clip_rect(drm_mga_private_t * dev_priv,
48 struct drm_clip_rect * box)
50 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
51 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
52 unsigned int pitch = dev_priv->front_pitch;
53 DMA_LOCALS;
55 BEGIN_DMA(2);
57 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
59 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
60 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
61 MGA_LEN + MGA_EXEC, 0x80000000,
62 MGA_DWGCTL, ctx->dwgctl,
63 MGA_LEN + MGA_EXEC, 0x80000000);
65 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
66 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
67 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
69 ADVANCE_DMA();
72 static __inline__ void mga_g200_emit_context(drm_mga_private_t * dev_priv)
74 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
75 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
76 DMA_LOCALS;
78 BEGIN_DMA(3);
80 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
81 MGA_MACCESS, ctx->maccess,
82 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
84 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
85 MGA_FOGCOL, ctx->fogcolor,
86 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
88 DMA_BLOCK(MGA_FCOL, ctx->fcol,
89 MGA_DMAPAD, 0x00000000,
90 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
92 ADVANCE_DMA();
95 static __inline__ void mga_g400_emit_context(drm_mga_private_t * dev_priv)
97 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
98 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
99 DMA_LOCALS;
101 BEGIN_DMA(4);
103 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
104 MGA_MACCESS, ctx->maccess,
105 MGA_PLNWT, ctx->plnwt,
106 MGA_DWGCTL, ctx->dwgctl);
108 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
109 MGA_FOGCOL, ctx->fogcolor,
110 MGA_WFLAG, ctx->wflag,
111 MGA_ZORG, dev_priv->depth_offset);
113 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
114 MGA_TDUALSTAGE0, ctx->tdualstage0,
115 MGA_TDUALSTAGE1, ctx->tdualstage1,
116 MGA_FCOL, ctx->fcol);
118 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
119 MGA_STENCILCTL, ctx->stencilctl,
120 MGA_DMAPAD, 0x00000000,
121 MGA_DMAPAD, 0x00000000);
123 ADVANCE_DMA();
126 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t * dev_priv)
128 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
129 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
130 DMA_LOCALS;
132 BEGIN_DMA(4);
134 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
135 MGA_TEXCTL, tex->texctl,
136 MGA_TEXFILTER, tex->texfilter,
137 MGA_TEXBORDERCOL, tex->texbordercol);
139 DMA_BLOCK(MGA_TEXORG, tex->texorg,
140 MGA_TEXORG1, tex->texorg1,
141 MGA_TEXORG2, tex->texorg2,
142 MGA_TEXORG3, tex->texorg3);
144 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
145 MGA_TEXWIDTH, tex->texwidth,
146 MGA_TEXHEIGHT, tex->texheight,
147 MGA_WR24, tex->texwidth);
149 DMA_BLOCK(MGA_WR34, tex->texheight,
150 MGA_TEXTRANS, 0x0000ffff,
151 MGA_TEXTRANSHIGH, 0x0000ffff,
152 MGA_DMAPAD, 0x00000000);
154 ADVANCE_DMA();
157 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t * dev_priv)
159 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
160 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
161 DMA_LOCALS;
163 /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
164 /* tex->texctl, tex->texctl2); */
166 BEGIN_DMA(6);
168 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
169 MGA_TEXCTL, tex->texctl,
170 MGA_TEXFILTER, tex->texfilter,
171 MGA_TEXBORDERCOL, tex->texbordercol);
173 DMA_BLOCK(MGA_TEXORG, tex->texorg,
174 MGA_TEXORG1, tex->texorg1,
175 MGA_TEXORG2, tex->texorg2,
176 MGA_TEXORG3, tex->texorg3);
178 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
179 MGA_TEXWIDTH, tex->texwidth,
180 MGA_TEXHEIGHT, tex->texheight,
181 MGA_WR49, 0x00000000);
183 DMA_BLOCK(MGA_WR57, 0x00000000,
184 MGA_WR53, 0x00000000,
185 MGA_WR61, 0x00000000,
186 MGA_WR52, MGA_G400_WR_MAGIC);
188 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
189 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
190 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
191 MGA_DMAPAD, 0x00000000);
193 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
194 MGA_DMAPAD, 0x00000000,
195 MGA_TEXTRANS, 0x0000ffff,
196 MGA_TEXTRANSHIGH, 0x0000ffff);
198 ADVANCE_DMA();
201 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t * dev_priv)
203 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
204 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
205 DMA_LOCALS;
207 /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
208 /* tex->texctl, tex->texctl2); */
210 BEGIN_DMA(5);
212 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
213 MGA_MAP1_ENABLE |
214 MGA_G400_TC2_MAGIC),
215 MGA_TEXCTL, tex->texctl,
216 MGA_TEXFILTER, tex->texfilter,
217 MGA_TEXBORDERCOL, tex->texbordercol);
219 DMA_BLOCK(MGA_TEXORG, tex->texorg,
220 MGA_TEXORG1, tex->texorg1,
221 MGA_TEXORG2, tex->texorg2,
222 MGA_TEXORG3, tex->texorg3);
224 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
225 MGA_TEXWIDTH, tex->texwidth,
226 MGA_TEXHEIGHT, tex->texheight,
227 MGA_WR49, 0x00000000);
229 DMA_BLOCK(MGA_WR57, 0x00000000,
230 MGA_WR53, 0x00000000,
231 MGA_WR61, 0x00000000,
232 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
234 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
235 MGA_TEXTRANS, 0x0000ffff,
236 MGA_TEXTRANSHIGH, 0x0000ffff,
237 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
239 ADVANCE_DMA();
242 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t * dev_priv)
244 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
245 unsigned int pipe = sarea_priv->warp_pipe;
246 DMA_LOCALS;
248 BEGIN_DMA(3);
250 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
251 MGA_WVRTXSZ, 0x00000007,
252 MGA_WFLAG, 0x00000000,
253 MGA_WR24, 0x00000000);
255 DMA_BLOCK(MGA_WR25, 0x00000100,
256 MGA_WR34, 0x00000000,
257 MGA_WR42, 0x0000ffff,
258 MGA_WR60, 0x0000ffff);
260 /* Padding required to to hardware bug.
262 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
263 MGA_DMAPAD, 0xffffffff,
264 MGA_DMAPAD, 0xffffffff,
265 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
266 MGA_WMODE_START | dev_priv->wagp_enable));
268 ADVANCE_DMA();
271 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t * dev_priv)
273 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
274 unsigned int pipe = sarea_priv->warp_pipe;
275 DMA_LOCALS;
277 /* printk("mga_g400_emit_pipe %x\n", pipe); */
279 BEGIN_DMA(10);
281 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
282 MGA_DMAPAD, 0x00000000,
283 MGA_DMAPAD, 0x00000000,
284 MGA_DMAPAD, 0x00000000);
286 if (pipe & MGA_T2) {
287 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
288 MGA_DMAPAD, 0x00000000,
289 MGA_DMAPAD, 0x00000000,
290 MGA_DMAPAD, 0x00000000);
292 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
293 MGA_WACCEPTSEQ, 0x00000000,
294 MGA_WACCEPTSEQ, 0x00000000,
295 MGA_WACCEPTSEQ, 0x1e000000);
296 } else {
297 if (dev_priv->warp_pipe & MGA_T2) {
298 /* Flush the WARP pipe */
299 DMA_BLOCK(MGA_YDST, 0x00000000,
300 MGA_FXLEFT, 0x00000000,
301 MGA_FXRIGHT, 0x00000001,
302 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
304 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
305 MGA_DWGSYNC, 0x00007000,
306 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
307 MGA_LEN + MGA_EXEC, 0x00000000);
309 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
310 MGA_G400_TC2_MAGIC),
311 MGA_LEN + MGA_EXEC, 0x00000000,
312 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
313 MGA_DMAPAD, 0x00000000);
316 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
317 MGA_DMAPAD, 0x00000000,
318 MGA_DMAPAD, 0x00000000,
319 MGA_DMAPAD, 0x00000000);
321 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
322 MGA_WACCEPTSEQ, 0x00000000,
323 MGA_WACCEPTSEQ, 0x00000000,
324 MGA_WACCEPTSEQ, 0x18000000);
327 DMA_BLOCK(MGA_WFLAG, 0x00000000,
328 MGA_WFLAG1, 0x00000000,
329 MGA_WR56, MGA_G400_WR56_MAGIC,
330 MGA_DMAPAD, 0x00000000);
332 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
333 MGA_WR57, 0x00000000, /* tex0 */
334 MGA_WR53, 0x00000000, /* tex1 */
335 MGA_WR61, 0x00000000); /* tex1 */
337 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
338 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
339 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
340 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
342 /* Padding required to to hardware bug */
343 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
344 MGA_DMAPAD, 0xffffffff,
345 MGA_DMAPAD, 0xffffffff,
346 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
347 MGA_WMODE_START | dev_priv->wagp_enable));
349 ADVANCE_DMA();
352 static void mga_g200_emit_state(drm_mga_private_t * dev_priv)
354 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
355 unsigned int dirty = sarea_priv->dirty;
357 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
358 mga_g200_emit_pipe(dev_priv);
359 dev_priv->warp_pipe = sarea_priv->warp_pipe;
362 if (dirty & MGA_UPLOAD_CONTEXT) {
363 mga_g200_emit_context(dev_priv);
364 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
367 if (dirty & MGA_UPLOAD_TEX0) {
368 mga_g200_emit_tex0(dev_priv);
369 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
373 static void mga_g400_emit_state(drm_mga_private_t * dev_priv)
375 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
376 unsigned int dirty = sarea_priv->dirty;
377 int multitex = sarea_priv->warp_pipe & MGA_T2;
379 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
380 mga_g400_emit_pipe(dev_priv);
381 dev_priv->warp_pipe = sarea_priv->warp_pipe;
384 if (dirty & MGA_UPLOAD_CONTEXT) {
385 mga_g400_emit_context(dev_priv);
386 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
389 if (dirty & MGA_UPLOAD_TEX0) {
390 mga_g400_emit_tex0(dev_priv);
391 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
394 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
395 mga_g400_emit_tex1(dev_priv);
396 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
400 /* ================================================================
401 * SAREA state verification
404 /* Disallow all write destinations except the front and backbuffer.
406 static int mga_verify_context(drm_mga_private_t * dev_priv)
408 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
409 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
411 if (ctx->dstorg != dev_priv->front_offset &&
412 ctx->dstorg != dev_priv->back_offset) {
413 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
414 ctx->dstorg, dev_priv->front_offset,
415 dev_priv->back_offset);
416 ctx->dstorg = 0;
417 return -EINVAL;
420 return 0;
423 /* Disallow texture reads from PCI space.
425 static int mga_verify_tex(drm_mga_private_t * dev_priv, int unit)
427 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
428 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
429 unsigned int org;
431 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
433 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
434 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
435 tex->texorg = 0;
436 return -EINVAL;
439 return 0;
442 static int mga_verify_state(drm_mga_private_t * dev_priv)
444 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
445 unsigned int dirty = sarea_priv->dirty;
446 int ret = 0;
448 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
449 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
451 if (dirty & MGA_UPLOAD_CONTEXT)
452 ret |= mga_verify_context(dev_priv);
454 if (dirty & MGA_UPLOAD_TEX0)
455 ret |= mga_verify_tex(dev_priv, 0);
457 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
458 if (dirty & MGA_UPLOAD_TEX1)
459 ret |= mga_verify_tex(dev_priv, 1);
461 if (dirty & MGA_UPLOAD_PIPE)
462 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
463 } else {
464 if (dirty & MGA_UPLOAD_PIPE)
465 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
468 return (ret == 0);
471 static int mga_verify_iload(drm_mga_private_t * dev_priv,
472 unsigned int dstorg, unsigned int length)
474 if (dstorg < dev_priv->texture_offset ||
475 dstorg + length > (dev_priv->texture_offset +
476 dev_priv->texture_size)) {
477 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
478 return -EINVAL;
481 if (length & MGA_ILOAD_MASK) {
482 DRM_ERROR("*** bad iload length: 0x%x\n",
483 length & MGA_ILOAD_MASK);
484 return -EINVAL;
487 return 0;
490 static int mga_verify_blit(drm_mga_private_t * dev_priv,
491 unsigned int srcorg, unsigned int dstorg)
493 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
494 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
495 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
496 return -EINVAL;
498 return 0;
501 /* ================================================================
505 static void mga_dma_dispatch_clear(struct drm_device * dev, drm_mga_clear_t * clear)
507 drm_mga_private_t *dev_priv = dev->dev_private;
508 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
509 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
510 struct drm_clip_rect *pbox = sarea_priv->boxes;
511 int nbox = sarea_priv->nbox;
512 int i;
513 DMA_LOCALS;
514 DRM_DEBUG("\n");
516 BEGIN_DMA(1);
518 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
519 MGA_DMAPAD, 0x00000000,
520 MGA_DWGSYNC, 0x00007100,
521 MGA_DWGSYNC, 0x00007000);
523 ADVANCE_DMA();
525 for (i = 0; i < nbox; i++) {
526 struct drm_clip_rect *box = &pbox[i];
527 u32 height = box->y2 - box->y1;
529 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
530 box->x1, box->y1, box->x2, box->y2);
532 if (clear->flags & MGA_FRONT) {
533 BEGIN_DMA(2);
535 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
536 MGA_PLNWT, clear->color_mask,
537 MGA_YDSTLEN, (box->y1 << 16) | height,
538 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
540 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
541 MGA_FCOL, clear->clear_color,
542 MGA_DSTORG, dev_priv->front_offset,
543 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
545 ADVANCE_DMA();
548 if (clear->flags & MGA_BACK) {
549 BEGIN_DMA(2);
551 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
552 MGA_PLNWT, clear->color_mask,
553 MGA_YDSTLEN, (box->y1 << 16) | height,
554 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
556 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
557 MGA_FCOL, clear->clear_color,
558 MGA_DSTORG, dev_priv->back_offset,
559 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
561 ADVANCE_DMA();
564 if (clear->flags & MGA_DEPTH) {
565 BEGIN_DMA(2);
567 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
568 MGA_PLNWT, clear->depth_mask,
569 MGA_YDSTLEN, (box->y1 << 16) | height,
570 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
572 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
573 MGA_FCOL, clear->clear_depth,
574 MGA_DSTORG, dev_priv->depth_offset,
575 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
577 ADVANCE_DMA();
582 BEGIN_DMA(1);
584 /* Force reset of DWGCTL */
585 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
586 MGA_DMAPAD, 0x00000000,
587 MGA_PLNWT, ctx->plnwt,
588 MGA_DWGCTL, ctx->dwgctl);
590 ADVANCE_DMA();
592 FLUSH_DMA();
595 static void mga_dma_dispatch_swap(struct drm_device * dev)
597 drm_mga_private_t *dev_priv = dev->dev_private;
598 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
599 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
600 struct drm_clip_rect *pbox = sarea_priv->boxes;
601 int nbox = sarea_priv->nbox;
602 int i;
603 DMA_LOCALS;
604 DRM_DEBUG("\n");
606 sarea_priv->last_frame.head = dev_priv->prim.tail;
607 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
609 BEGIN_DMA(4 + nbox);
611 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
612 MGA_DMAPAD, 0x00000000,
613 MGA_DWGSYNC, 0x00007100,
614 MGA_DWGSYNC, 0x00007000);
616 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
617 MGA_MACCESS, dev_priv->maccess,
618 MGA_SRCORG, dev_priv->back_offset,
619 MGA_AR5, dev_priv->front_pitch);
621 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
622 MGA_DMAPAD, 0x00000000,
623 MGA_PLNWT, 0xffffffff,
624 MGA_DWGCTL, MGA_DWGCTL_COPY);
626 for (i = 0; i < nbox; i++) {
627 struct drm_clip_rect *box = &pbox[i];
628 u32 height = box->y2 - box->y1;
629 u32 start = box->y1 * dev_priv->front_pitch;
631 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
632 box->x1, box->y1, box->x2, box->y2);
634 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
635 MGA_AR3, start + box->x1,
636 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
637 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
640 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
641 MGA_PLNWT, ctx->plnwt,
642 MGA_SRCORG, dev_priv->front_offset,
643 MGA_DWGCTL, ctx->dwgctl);
645 ADVANCE_DMA();
647 FLUSH_DMA();
649 DRM_DEBUG("... done.\n");
652 static void mga_dma_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf)
654 drm_mga_private_t *dev_priv = dev->dev_private;
655 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
656 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
657 u32 address = (u32) buf->bus_address;
658 u32 length = (u32) buf->used;
659 int i = 0;
660 DMA_LOCALS;
661 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
663 if (buf->used) {
664 buf_priv->dispatched = 1;
666 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
668 do {
669 if (i < sarea_priv->nbox) {
670 mga_emit_clip_rect(dev_priv,
671 &sarea_priv->boxes[i]);
674 BEGIN_DMA(1);
676 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
677 MGA_DMAPAD, 0x00000000,
678 MGA_SECADDRESS, (address |
679 MGA_DMA_VERTEX),
680 MGA_SECEND, ((address + length) |
681 dev_priv->dma_access));
683 ADVANCE_DMA();
684 } while (++i < sarea_priv->nbox);
687 if (buf_priv->discard) {
688 AGE_BUFFER(buf_priv);
689 buf->pending = 0;
690 buf->used = 0;
691 buf_priv->dispatched = 0;
693 mga_freelist_put(dev, buf);
696 FLUSH_DMA();
699 static void mga_dma_dispatch_indices(struct drm_device * dev, struct drm_buf * buf,
700 unsigned int start, unsigned int end)
702 drm_mga_private_t *dev_priv = dev->dev_private;
703 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
704 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
705 u32 address = (u32) buf->bus_address;
706 int i = 0;
707 DMA_LOCALS;
708 DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
710 if (start != end) {
711 buf_priv->dispatched = 1;
713 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
715 do {
716 if (i < sarea_priv->nbox) {
717 mga_emit_clip_rect(dev_priv,
718 &sarea_priv->boxes[i]);
721 BEGIN_DMA(1);
723 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
724 MGA_DMAPAD, 0x00000000,
725 MGA_SETUPADDRESS, address + start,
726 MGA_SETUPEND, ((address + end) |
727 dev_priv->dma_access));
729 ADVANCE_DMA();
730 } while (++i < sarea_priv->nbox);
733 if (buf_priv->discard) {
734 AGE_BUFFER(buf_priv);
735 buf->pending = 0;
736 buf->used = 0;
737 buf_priv->dispatched = 0;
739 mga_freelist_put(dev, buf);
742 FLUSH_DMA();
745 /* This copies a 64 byte aligned agp region to the frambuffer with a
746 * standard blit, the ioctl needs to do checking.
748 static void mga_dma_dispatch_iload(struct drm_device * dev, struct drm_buf * buf,
749 unsigned int dstorg, unsigned int length)
751 drm_mga_private_t *dev_priv = dev->dev_private;
752 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
753 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
754 u32 srcorg = buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
755 u32 y2;
756 DMA_LOCALS;
757 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
759 y2 = length / 64;
761 BEGIN_DMA(5);
763 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
764 MGA_DMAPAD, 0x00000000,
765 MGA_DWGSYNC, 0x00007100,
766 MGA_DWGSYNC, 0x00007000);
768 DMA_BLOCK(MGA_DSTORG, dstorg,
769 MGA_MACCESS, 0x00000000,
770 MGA_SRCORG, srcorg,
771 MGA_AR5, 64);
773 DMA_BLOCK(MGA_PITCH, 64,
774 MGA_PLNWT, 0xffffffff,
775 MGA_DMAPAD, 0x00000000,
776 MGA_DWGCTL, MGA_DWGCTL_COPY);
778 DMA_BLOCK(MGA_AR0, 63,
779 MGA_AR3, 0,
780 MGA_FXBNDRY, (63 << 16) | 0,
781 MGA_YDSTLEN + MGA_EXEC, y2);
783 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
784 MGA_SRCORG, dev_priv->front_offset,
785 MGA_PITCH, dev_priv->front_pitch,
786 MGA_DWGSYNC, 0x00007000);
788 ADVANCE_DMA();
790 AGE_BUFFER(buf_priv);
792 buf->pending = 0;
793 buf->used = 0;
794 buf_priv->dispatched = 0;
796 mga_freelist_put(dev, buf);
798 FLUSH_DMA();
801 static void mga_dma_dispatch_blit(struct drm_device * dev, drm_mga_blit_t * blit)
803 drm_mga_private_t *dev_priv = dev->dev_private;
804 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
805 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
806 struct drm_clip_rect *pbox = sarea_priv->boxes;
807 int nbox = sarea_priv->nbox;
808 u32 scandir = 0, i;
809 DMA_LOCALS;
810 DRM_DEBUG("\n");
812 BEGIN_DMA(4 + nbox);
814 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
815 MGA_DMAPAD, 0x00000000,
816 MGA_DWGSYNC, 0x00007100,
817 MGA_DWGSYNC, 0x00007000);
819 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
820 MGA_PLNWT, blit->planemask,
821 MGA_SRCORG, blit->srcorg,
822 MGA_DSTORG, blit->dstorg);
824 DMA_BLOCK(MGA_SGN, scandir,
825 MGA_MACCESS, dev_priv->maccess,
826 MGA_AR5, blit->ydir * blit->src_pitch,
827 MGA_PITCH, blit->dst_pitch);
829 for (i = 0; i < nbox; i++) {
830 int srcx = pbox[i].x1 + blit->delta_sx;
831 int srcy = pbox[i].y1 + blit->delta_sy;
832 int dstx = pbox[i].x1 + blit->delta_dx;
833 int dsty = pbox[i].y1 + blit->delta_dy;
834 int h = pbox[i].y2 - pbox[i].y1;
835 int w = pbox[i].x2 - pbox[i].x1 - 1;
836 int start;
838 if (blit->ydir == -1) {
839 srcy = blit->height - srcy - 1;
842 start = srcy * blit->src_pitch + srcx;
844 DMA_BLOCK(MGA_AR0, start + w,
845 MGA_AR3, start,
846 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
847 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
850 /* Do something to flush AGP?
853 /* Force reset of DWGCTL */
854 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
855 MGA_PLNWT, ctx->plnwt,
856 MGA_PITCH, dev_priv->front_pitch,
857 MGA_DWGCTL, ctx->dwgctl);
859 ADVANCE_DMA();
862 /* ================================================================
866 static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
868 drm_mga_private_t *dev_priv = dev->dev_private;
869 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
870 drm_mga_clear_t *clear = data;
872 LOCK_TEST_WITH_RETURN(dev, file_priv);
874 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
875 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
877 WRAP_TEST_WITH_RETURN(dev_priv);
879 mga_dma_dispatch_clear(dev, clear);
881 /* Make sure we restore the 3D state next time.
883 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
885 return 0;
888 static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
890 drm_mga_private_t *dev_priv = dev->dev_private;
891 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
893 LOCK_TEST_WITH_RETURN(dev, file_priv);
895 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
896 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
898 WRAP_TEST_WITH_RETURN(dev_priv);
900 mga_dma_dispatch_swap(dev);
902 /* Make sure we restore the 3D state next time.
904 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
906 return 0;
909 static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
911 drm_mga_private_t *dev_priv = dev->dev_private;
912 struct drm_device_dma *dma = dev->dma;
913 struct drm_buf *buf;
914 drm_mga_buf_priv_t *buf_priv;
915 drm_mga_vertex_t *vertex = data;
917 LOCK_TEST_WITH_RETURN(dev, file_priv);
919 if (vertex->idx < 0 || vertex->idx > dma->buf_count)
920 return -EINVAL;
921 buf = dma->buflist[vertex->idx];
922 buf_priv = buf->dev_private;
924 buf->used = vertex->used;
925 buf_priv->discard = vertex->discard;
927 if (!mga_verify_state(dev_priv)) {
928 if (vertex->discard) {
929 if (buf_priv->dispatched == 1)
930 AGE_BUFFER(buf_priv);
931 buf_priv->dispatched = 0;
932 mga_freelist_put(dev, buf);
934 return -EINVAL;
937 WRAP_TEST_WITH_RETURN(dev_priv);
939 mga_dma_dispatch_vertex(dev, buf);
941 return 0;
944 static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
946 drm_mga_private_t *dev_priv = dev->dev_private;
947 struct drm_device_dma *dma = dev->dma;
948 struct drm_buf *buf;
949 drm_mga_buf_priv_t *buf_priv;
950 drm_mga_indices_t *indices = data;
952 LOCK_TEST_WITH_RETURN(dev, file_priv);
954 if (indices->idx < 0 || indices->idx > dma->buf_count)
955 return -EINVAL;
957 buf = dma->buflist[indices->idx];
958 buf_priv = buf->dev_private;
960 buf_priv->discard = indices->discard;
962 if (!mga_verify_state(dev_priv)) {
963 if (indices->discard) {
964 if (buf_priv->dispatched == 1)
965 AGE_BUFFER(buf_priv);
966 buf_priv->dispatched = 0;
967 mga_freelist_put(dev, buf);
969 return -EINVAL;
972 WRAP_TEST_WITH_RETURN(dev_priv);
974 mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
976 return 0;
979 static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
981 struct drm_device_dma *dma = dev->dma;
982 drm_mga_private_t *dev_priv = dev->dev_private;
983 struct drm_buf *buf;
984 drm_mga_buf_priv_t *buf_priv;
985 drm_mga_iload_t *iload = data;
986 DRM_DEBUG("\n");
988 LOCK_TEST_WITH_RETURN(dev, file_priv);
990 #if 0
991 if (mga_do_wait_for_idle(dev_priv) < 0) {
992 if (MGA_DMA_DEBUG)
993 DRM_INFO("-EBUSY\n");
994 return -EBUSY;
996 #endif
997 if (iload->idx < 0 || iload->idx > dma->buf_count)
998 return -EINVAL;
1000 buf = dma->buflist[iload->idx];
1001 buf_priv = buf->dev_private;
1003 if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
1004 mga_freelist_put(dev, buf);
1005 return -EINVAL;
1008 WRAP_TEST_WITH_RETURN(dev_priv);
1010 mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
1012 /* Make sure we restore the 3D state next time.
1014 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1016 return 0;
1019 static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
1021 drm_mga_private_t *dev_priv = dev->dev_private;
1022 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
1023 drm_mga_blit_t *blit = data;
1024 DRM_DEBUG("\n");
1026 LOCK_TEST_WITH_RETURN(dev, file_priv);
1028 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
1029 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
1031 if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
1032 return -EINVAL;
1034 WRAP_TEST_WITH_RETURN(dev_priv);
1036 mga_dma_dispatch_blit(dev, blit);
1038 /* Make sure we restore the 3D state next time.
1040 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1042 return 0;
1045 static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
1047 drm_mga_private_t *dev_priv = dev->dev_private;
1048 drm_mga_getparam_t *param = data;
1049 int value;
1051 if (!dev_priv) {
1052 DRM_ERROR("called with no initialization\n");
1053 return -EINVAL;
1056 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1058 switch (param->param) {
1059 case MGA_PARAM_IRQ_NR:
1060 value = dev->irq;
1061 break;
1062 case MGA_PARAM_CARD_TYPE:
1063 value = dev_priv->chipset;
1064 break;
1065 default:
1066 return -EINVAL;
1069 if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
1070 DRM_ERROR("copy_to_user\n");
1071 return -EFAULT;
1074 return 0;
1077 static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1079 drm_mga_private_t *dev_priv = dev->dev_private;
1080 u32 *fence = data;
1081 DMA_LOCALS;
1083 if (!dev_priv) {
1084 DRM_ERROR("called with no initialization\n");
1085 return -EINVAL;
1088 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1090 /* I would normal do this assignment in the declaration of fence,
1091 * but dev_priv may be NULL.
1094 *fence = dev_priv->next_fence_to_post;
1095 dev_priv->next_fence_to_post++;
1097 BEGIN_DMA(1);
1098 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1099 MGA_DMAPAD, 0x00000000,
1100 MGA_DMAPAD, 0x00000000,
1101 MGA_SOFTRAP, 0x00000000);
1102 ADVANCE_DMA();
1104 return 0;
1107 static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
1109 drm_mga_private_t *dev_priv = dev->dev_private;
1110 u32 *fence = data;
1112 if (!dev_priv) {
1113 DRM_ERROR("called with no initialization\n");
1114 return -EINVAL;
1117 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1119 mga_driver_fence_wait(dev, fence);
1121 return 0;
1124 struct drm_ioctl_desc mga_ioctls[] = {
1125 DRM_IOCTL_DEF(DRM_MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1126 DRM_IOCTL_DEF(DRM_MGA_FLUSH, mga_dma_flush, DRM_AUTH),
1127 DRM_IOCTL_DEF(DRM_MGA_RESET, mga_dma_reset, DRM_AUTH),
1128 DRM_IOCTL_DEF(DRM_MGA_SWAP, mga_dma_swap, DRM_AUTH),
1129 DRM_IOCTL_DEF(DRM_MGA_CLEAR, mga_dma_clear, DRM_AUTH),
1130 DRM_IOCTL_DEF(DRM_MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
1131 DRM_IOCTL_DEF(DRM_MGA_INDICES, mga_dma_indices, DRM_AUTH),
1132 DRM_IOCTL_DEF(DRM_MGA_ILOAD, mga_dma_iload, DRM_AUTH),
1133 DRM_IOCTL_DEF(DRM_MGA_BLIT, mga_dma_blit, DRM_AUTH),
1134 DRM_IOCTL_DEF(DRM_MGA_GETPARAM, mga_getparam, DRM_AUTH),
1135 DRM_IOCTL_DEF(DRM_MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
1136 DRM_IOCTL_DEF(DRM_MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
1137 DRM_IOCTL_DEF(DRM_MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1141 int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);