1 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2 * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
4 * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5 * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
28 * Jeff Hartmann <jhartmann@valinux.com>
29 * Keith Whitwell <keith@tungstengraphics.com>
32 * Gareth Hughes <gareth@valinux.com>
40 /* ================================================================
41 * DMA hardware state programming functions
44 static void mga_emit_clip_rect(drm_mga_private_t * dev_priv,
45 struct drm_clip_rect * box)
47 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
48 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
49 unsigned int pitch = dev_priv->front_pitch;
54 /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
56 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
57 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
58 MGA_LEN + MGA_EXEC, 0x80000000,
59 MGA_DWGCTL, ctx->dwgctl,
60 MGA_LEN + MGA_EXEC, 0x80000000);
62 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
63 MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
64 MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
69 static __inline__ void mga_g200_emit_context(drm_mga_private_t * dev_priv)
71 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
72 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
77 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
78 MGA_MACCESS, ctx->maccess,
79 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
81 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
82 MGA_FOGCOL, ctx->fogcolor,
83 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
85 DMA_BLOCK(MGA_FCOL, ctx->fcol,
86 MGA_DMAPAD, 0x00000000,
87 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
92 static __inline__ void mga_g400_emit_context(drm_mga_private_t * dev_priv)
94 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
95 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
100 DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
101 MGA_MACCESS, ctx->maccess,
102 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
104 DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
105 MGA_FOGCOL, ctx->fogcolor,
106 MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
108 DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
109 MGA_TDUALSTAGE0, ctx->tdualstage0,
110 MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
112 DMA_BLOCK(MGA_STENCIL, ctx->stencil,
113 MGA_STENCILCTL, ctx->stencilctl,
114 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
119 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t * dev_priv)
121 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
122 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
127 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
128 MGA_TEXCTL, tex->texctl,
129 MGA_TEXFILTER, tex->texfilter,
130 MGA_TEXBORDERCOL, tex->texbordercol);
132 DMA_BLOCK(MGA_TEXORG, tex->texorg,
133 MGA_TEXORG1, tex->texorg1,
134 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
136 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
137 MGA_TEXWIDTH, tex->texwidth,
138 MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
140 DMA_BLOCK(MGA_WR34, tex->texheight,
141 MGA_TEXTRANS, 0x0000ffff,
142 MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
147 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t * dev_priv)
149 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
150 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
153 /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
154 /* tex->texctl, tex->texctl2); */
158 DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
159 MGA_TEXCTL, tex->texctl,
160 MGA_TEXFILTER, tex->texfilter,
161 MGA_TEXBORDERCOL, tex->texbordercol);
163 DMA_BLOCK(MGA_TEXORG, tex->texorg,
164 MGA_TEXORG1, tex->texorg1,
165 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
167 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
168 MGA_TEXWIDTH, tex->texwidth,
169 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
171 DMA_BLOCK(MGA_WR57, 0x00000000,
172 MGA_WR53, 0x00000000,
173 MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
175 DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
176 MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
177 MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
178 MGA_DMAPAD, 0x00000000);
180 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
181 MGA_DMAPAD, 0x00000000,
182 MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
187 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t * dev_priv)
189 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
190 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
193 /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
194 /* tex->texctl, tex->texctl2); */
198 DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
201 MGA_TEXCTL, tex->texctl,
202 MGA_TEXFILTER, tex->texfilter,
203 MGA_TEXBORDERCOL, tex->texbordercol);
205 DMA_BLOCK(MGA_TEXORG, tex->texorg,
206 MGA_TEXORG1, tex->texorg1,
207 MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
209 DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
210 MGA_TEXWIDTH, tex->texwidth,
211 MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
213 DMA_BLOCK(MGA_WR57, 0x00000000,
214 MGA_WR53, 0x00000000,
215 MGA_WR61, 0x00000000,
216 MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
218 DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
219 MGA_TEXTRANS, 0x0000ffff,
220 MGA_TEXTRANSHIGH, 0x0000ffff,
221 MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
226 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t * dev_priv)
228 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
229 unsigned int pipe = sarea_priv->warp_pipe;
234 DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
235 MGA_WVRTXSZ, 0x00000007,
236 MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
238 DMA_BLOCK(MGA_WR25, 0x00000100,
239 MGA_WR34, 0x00000000,
240 MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
242 /* Padding required to to hardware bug.
244 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
245 MGA_DMAPAD, 0xffffffff,
246 MGA_DMAPAD, 0xffffffff,
247 MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
248 MGA_WMODE_START | dev_priv->wagp_enable));
253 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t * dev_priv)
255 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
256 unsigned int pipe = sarea_priv->warp_pipe;
259 /* printk("mga_g400_emit_pipe %x\n", pipe); */
263 DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
264 MGA_DMAPAD, 0x00000000,
265 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
268 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
269 MGA_DMAPAD, 0x00000000,
270 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
272 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
273 MGA_WACCEPTSEQ, 0x00000000,
274 MGA_WACCEPTSEQ, 0x00000000,
275 MGA_WACCEPTSEQ, 0x1e000000);
277 if (dev_priv->warp_pipe & MGA_T2) {
278 /* Flush the WARP pipe */
279 DMA_BLOCK(MGA_YDST, 0x00000000,
280 MGA_FXLEFT, 0x00000000,
281 MGA_FXRIGHT, 0x00000001,
282 MGA_DWGCTL, MGA_DWGCTL_FLUSH);
284 DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
285 MGA_DWGSYNC, 0x00007000,
286 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
287 MGA_LEN + MGA_EXEC, 0x00000000);
289 DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
291 MGA_LEN + MGA_EXEC, 0x00000000,
292 MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
293 MGA_DMAPAD, 0x00000000);
296 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
297 MGA_DMAPAD, 0x00000000,
298 MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
300 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
301 MGA_WACCEPTSEQ, 0x00000000,
302 MGA_WACCEPTSEQ, 0x00000000,
303 MGA_WACCEPTSEQ, 0x18000000);
306 DMA_BLOCK(MGA_WFLAG, 0x00000000,
307 MGA_WFLAG1, 0x00000000,
308 MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
310 DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
311 MGA_WR57, 0x00000000, /* tex0 */
312 MGA_WR53, 0x00000000, /* tex1 */
313 MGA_WR61, 0x00000000); /* tex1 */
315 DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
316 MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
317 MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
318 MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
320 /* Padding required to to hardware bug */
321 DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
322 MGA_DMAPAD, 0xffffffff,
323 MGA_DMAPAD, 0xffffffff,
324 MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
325 MGA_WMODE_START | dev_priv->wagp_enable));
330 static void mga_g200_emit_state(drm_mga_private_t * dev_priv)
332 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
333 unsigned int dirty = sarea_priv->dirty;
335 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
336 mga_g200_emit_pipe(dev_priv);
337 dev_priv->warp_pipe = sarea_priv->warp_pipe;
340 if (dirty & MGA_UPLOAD_CONTEXT) {
341 mga_g200_emit_context(dev_priv);
342 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
345 if (dirty & MGA_UPLOAD_TEX0) {
346 mga_g200_emit_tex0(dev_priv);
347 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
351 static void mga_g400_emit_state(drm_mga_private_t * dev_priv)
353 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
354 unsigned int dirty = sarea_priv->dirty;
355 int multitex = sarea_priv->warp_pipe & MGA_T2;
357 if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
358 mga_g400_emit_pipe(dev_priv);
359 dev_priv->warp_pipe = sarea_priv->warp_pipe;
362 if (dirty & MGA_UPLOAD_CONTEXT) {
363 mga_g400_emit_context(dev_priv);
364 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
367 if (dirty & MGA_UPLOAD_TEX0) {
368 mga_g400_emit_tex0(dev_priv);
369 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
372 if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
373 mga_g400_emit_tex1(dev_priv);
374 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
378 /* ================================================================
379 * SAREA state verification
382 /* Disallow all write destinations except the front and backbuffer.
384 static int mga_verify_context(drm_mga_private_t * dev_priv)
386 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
387 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
389 if (ctx->dstorg != dev_priv->front_offset &&
390 ctx->dstorg != dev_priv->back_offset) {
391 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
392 ctx->dstorg, dev_priv->front_offset,
393 dev_priv->back_offset);
395 return DRM_ERR(EINVAL);
401 /* Disallow texture reads from PCI space.
403 static int mga_verify_tex(drm_mga_private_t * dev_priv, int unit)
405 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
406 drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
409 org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
411 if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
412 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
414 return DRM_ERR(EINVAL);
420 static int mga_verify_state(drm_mga_private_t * dev_priv)
422 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
423 unsigned int dirty = sarea_priv->dirty;
426 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
427 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
429 if (dirty & MGA_UPLOAD_CONTEXT)
430 ret |= mga_verify_context(dev_priv);
432 if (dirty & MGA_UPLOAD_TEX0)
433 ret |= mga_verify_tex(dev_priv, 0);
435 if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
436 if (dirty & MGA_UPLOAD_TEX1)
437 ret |= mga_verify_tex(dev_priv, 1);
439 if (dirty & MGA_UPLOAD_PIPE)
440 ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
442 if (dirty & MGA_UPLOAD_PIPE)
443 ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
449 static int mga_verify_iload(drm_mga_private_t * dev_priv,
450 unsigned int dstorg, unsigned int length)
452 if (dstorg < dev_priv->texture_offset ||
453 dstorg + length > (dev_priv->texture_offset +
454 dev_priv->texture_size)) {
455 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
456 return DRM_ERR(EINVAL);
459 if (length & MGA_ILOAD_MASK) {
460 DRM_ERROR("*** bad iload length: 0x%x\n",
461 length & MGA_ILOAD_MASK);
462 return DRM_ERR(EINVAL);
468 static int mga_verify_blit(drm_mga_private_t * dev_priv,
469 unsigned int srcorg, unsigned int dstorg)
471 if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
472 (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
473 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
474 return DRM_ERR(EINVAL);
479 /* ================================================================
483 static void mga_dma_dispatch_clear(struct drm_device * dev, drm_mga_clear_t * clear)
485 drm_mga_private_t *dev_priv = dev->dev_private;
486 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
487 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
488 struct drm_clip_rect *pbox = sarea_priv->boxes;
489 int nbox = sarea_priv->nbox;
496 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
497 MGA_DMAPAD, 0x00000000,
498 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
502 for (i = 0; i < nbox; i++) {
503 struct drm_clip_rect *box = &pbox[i];
504 u32 height = box->y2 - box->y1;
506 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
507 box->x1, box->y1, box->x2, box->y2);
509 if (clear->flags & MGA_FRONT) {
512 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
513 MGA_PLNWT, clear->color_mask,
514 MGA_YDSTLEN, (box->y1 << 16) | height,
515 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
517 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
518 MGA_FCOL, clear->clear_color,
519 MGA_DSTORG, dev_priv->front_offset,
520 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
525 if (clear->flags & MGA_BACK) {
528 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
529 MGA_PLNWT, clear->color_mask,
530 MGA_YDSTLEN, (box->y1 << 16) | height,
531 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
533 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
534 MGA_FCOL, clear->clear_color,
535 MGA_DSTORG, dev_priv->back_offset,
536 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
541 if (clear->flags & MGA_DEPTH) {
544 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
545 MGA_PLNWT, clear->depth_mask,
546 MGA_YDSTLEN, (box->y1 << 16) | height,
547 MGA_FXBNDRY, (box->x2 << 16) | box->x1);
549 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
550 MGA_FCOL, clear->clear_depth,
551 MGA_DSTORG, dev_priv->depth_offset,
552 MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
561 /* Force reset of DWGCTL */
562 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
563 MGA_DMAPAD, 0x00000000,
564 MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
571 static void mga_dma_dispatch_swap(struct drm_device * dev)
573 drm_mga_private_t *dev_priv = dev->dev_private;
574 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
575 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
576 struct drm_clip_rect *pbox = sarea_priv->boxes;
577 int nbox = sarea_priv->nbox;
582 sarea_priv->last_frame.head = dev_priv->prim.tail;
583 sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
587 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
588 MGA_DMAPAD, 0x00000000,
589 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
591 DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
592 MGA_MACCESS, dev_priv->maccess,
593 MGA_SRCORG, dev_priv->back_offset,
594 MGA_AR5, dev_priv->front_pitch);
596 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
597 MGA_DMAPAD, 0x00000000,
598 MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
600 for (i = 0; i < nbox; i++) {
601 struct drm_clip_rect *box = &pbox[i];
602 u32 height = box->y2 - box->y1;
603 u32 start = box->y1 * dev_priv->front_pitch;
605 DRM_DEBUG(" from=%d,%d to=%d,%d\n",
606 box->x1, box->y1, box->x2, box->y2);
608 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
609 MGA_AR3, start + box->x1,
610 MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
611 MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
614 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
615 MGA_PLNWT, ctx->plnwt,
616 MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
622 DRM_DEBUG("%s... done.\n", __FUNCTION__);
625 static void mga_dma_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf)
627 drm_mga_private_t *dev_priv = dev->dev_private;
628 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
629 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
630 u32 address = (u32) buf->bus_address;
631 u32 length = (u32) buf->used;
634 DRM_DEBUG("vertex: buf=%d used=%d\n", buf->idx, buf->used);
637 buf_priv->dispatched = 1;
639 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
642 if (i < sarea_priv->nbox) {
643 mga_emit_clip_rect(dev_priv,
644 &sarea_priv->boxes[i]);
649 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
650 MGA_DMAPAD, 0x00000000,
651 MGA_SECADDRESS, (address |
653 MGA_SECEND, ((address + length) |
654 dev_priv->dma_access));
657 } while (++i < sarea_priv->nbox);
660 if (buf_priv->discard) {
661 AGE_BUFFER(buf_priv);
664 buf_priv->dispatched = 0;
666 mga_freelist_put(dev, buf);
672 static void mga_dma_dispatch_indices(struct drm_device * dev, struct drm_buf * buf,
673 unsigned int start, unsigned int end)
675 drm_mga_private_t *dev_priv = dev->dev_private;
676 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
677 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
678 u32 address = (u32) buf->bus_address;
681 DRM_DEBUG("indices: buf=%d start=%d end=%d\n", buf->idx, start, end);
684 buf_priv->dispatched = 1;
686 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
689 if (i < sarea_priv->nbox) {
690 mga_emit_clip_rect(dev_priv,
691 &sarea_priv->boxes[i]);
696 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
697 MGA_DMAPAD, 0x00000000,
698 MGA_SETUPADDRESS, address + start,
699 MGA_SETUPEND, ((address + end) |
700 dev_priv->dma_access));
703 } while (++i < sarea_priv->nbox);
706 if (buf_priv->discard) {
707 AGE_BUFFER(buf_priv);
710 buf_priv->dispatched = 0;
712 mga_freelist_put(dev, buf);
718 /* This copies a 64 byte aligned agp region to the frambuffer with a
719 * standard blit, the ioctl needs to do checking.
721 static void mga_dma_dispatch_iload(struct drm_device * dev, struct drm_buf * buf,
722 unsigned int dstorg, unsigned int length)
724 drm_mga_private_t *dev_priv = dev->dev_private;
725 drm_mga_buf_priv_t *buf_priv = buf->dev_private;
726 drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
728 buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
731 DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
737 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
738 MGA_DMAPAD, 0x00000000,
739 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
741 DMA_BLOCK(MGA_DSTORG, dstorg,
742 MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
744 DMA_BLOCK(MGA_PITCH, 64,
745 MGA_PLNWT, 0xffffffff,
746 MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
748 DMA_BLOCK(MGA_AR0, 63,
750 MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
752 DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
753 MGA_SRCORG, dev_priv->front_offset,
754 MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
758 AGE_BUFFER(buf_priv);
762 buf_priv->dispatched = 0;
764 mga_freelist_put(dev, buf);
769 static void mga_dma_dispatch_blit(struct drm_device * dev, drm_mga_blit_t * blit)
771 drm_mga_private_t *dev_priv = dev->dev_private;
772 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
773 drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
774 struct drm_clip_rect *pbox = sarea_priv->boxes;
775 int nbox = sarea_priv->nbox;
782 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
783 MGA_DMAPAD, 0x00000000,
784 MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
786 DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
787 MGA_PLNWT, blit->planemask,
788 MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
790 DMA_BLOCK(MGA_SGN, scandir,
791 MGA_MACCESS, dev_priv->maccess,
792 MGA_AR5, blit->ydir * blit->src_pitch,
793 MGA_PITCH, blit->dst_pitch);
795 for (i = 0; i < nbox; i++) {
796 int srcx = pbox[i].x1 + blit->delta_sx;
797 int srcy = pbox[i].y1 + blit->delta_sy;
798 int dstx = pbox[i].x1 + blit->delta_dx;
799 int dsty = pbox[i].y1 + blit->delta_dy;
800 int h = pbox[i].y2 - pbox[i].y1;
801 int w = pbox[i].x2 - pbox[i].x1 - 1;
804 if (blit->ydir == -1) {
805 srcy = blit->height - srcy - 1;
808 start = srcy * blit->src_pitch + srcx;
810 DMA_BLOCK(MGA_AR0, start + w,
812 MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
813 MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
816 /* Do something to flush AGP?
819 /* Force reset of DWGCTL */
820 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
821 MGA_PLNWT, ctx->plnwt,
822 MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
827 /* ================================================================
831 static int mga_dma_clear(DRM_IOCTL_ARGS)
834 drm_mga_private_t *dev_priv = dev->dev_private;
835 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
836 drm_mga_clear_t clear;
838 LOCK_TEST_WITH_RETURN(dev, filp);
840 DRM_COPY_FROM_USER_IOCTL(clear, (drm_mga_clear_t __user *) data,
843 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
844 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
846 WRAP_TEST_WITH_RETURN(dev_priv);
848 mga_dma_dispatch_clear(dev, &clear);
850 /* Make sure we restore the 3D state next time.
852 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
857 static int mga_dma_swap(DRM_IOCTL_ARGS)
860 drm_mga_private_t *dev_priv = dev->dev_private;
861 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
863 LOCK_TEST_WITH_RETURN(dev, filp);
865 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
866 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
868 WRAP_TEST_WITH_RETURN(dev_priv);
870 mga_dma_dispatch_swap(dev);
872 /* Make sure we restore the 3D state next time.
874 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
879 static int mga_dma_vertex(DRM_IOCTL_ARGS)
882 drm_mga_private_t *dev_priv = dev->dev_private;
883 struct drm_device_dma *dma = dev->dma;
885 drm_mga_buf_priv_t *buf_priv;
886 drm_mga_vertex_t vertex;
888 LOCK_TEST_WITH_RETURN(dev, filp);
890 DRM_COPY_FROM_USER_IOCTL(vertex,
891 (drm_mga_vertex_t __user *) data,
894 if (vertex.idx < 0 || vertex.idx > dma->buf_count)
895 return DRM_ERR(EINVAL);
896 buf = dma->buflist[vertex.idx];
897 buf_priv = buf->dev_private;
899 buf->used = vertex.used;
900 buf_priv->discard = vertex.discard;
902 if (!mga_verify_state(dev_priv)) {
903 if (vertex.discard) {
904 if (buf_priv->dispatched == 1)
905 AGE_BUFFER(buf_priv);
906 buf_priv->dispatched = 0;
907 mga_freelist_put(dev, buf);
909 return DRM_ERR(EINVAL);
912 WRAP_TEST_WITH_RETURN(dev_priv);
914 mga_dma_dispatch_vertex(dev, buf);
919 static int mga_dma_indices(DRM_IOCTL_ARGS)
922 drm_mga_private_t *dev_priv = dev->dev_private;
923 struct drm_device_dma *dma = dev->dma;
925 drm_mga_buf_priv_t *buf_priv;
926 drm_mga_indices_t indices;
928 LOCK_TEST_WITH_RETURN(dev, filp);
930 DRM_COPY_FROM_USER_IOCTL(indices,
931 (drm_mga_indices_t __user *) data,
934 if (indices.idx < 0 || indices.idx > dma->buf_count)
935 return DRM_ERR(EINVAL);
937 buf = dma->buflist[indices.idx];
938 buf_priv = buf->dev_private;
940 buf_priv->discard = indices.discard;
942 if (!mga_verify_state(dev_priv)) {
943 if (indices.discard) {
944 if (buf_priv->dispatched == 1)
945 AGE_BUFFER(buf_priv);
946 buf_priv->dispatched = 0;
947 mga_freelist_put(dev, buf);
949 return DRM_ERR(EINVAL);
952 WRAP_TEST_WITH_RETURN(dev_priv);
954 mga_dma_dispatch_indices(dev, buf, indices.start, indices.end);
959 static int mga_dma_iload(DRM_IOCTL_ARGS)
962 struct drm_device_dma *dma = dev->dma;
963 drm_mga_private_t *dev_priv = dev->dev_private;
965 drm_mga_buf_priv_t *buf_priv;
966 drm_mga_iload_t iload;
969 LOCK_TEST_WITH_RETURN(dev, filp);
971 DRM_COPY_FROM_USER_IOCTL(iload, (drm_mga_iload_t __user *) data,
975 if (mga_do_wait_for_idle(dev_priv) < 0) {
977 DRM_INFO("%s: -EBUSY\n", __FUNCTION__);
978 return DRM_ERR(EBUSY);
981 if (iload.idx < 0 || iload.idx > dma->buf_count)
982 return DRM_ERR(EINVAL);
984 buf = dma->buflist[iload.idx];
985 buf_priv = buf->dev_private;
987 if (mga_verify_iload(dev_priv, iload.dstorg, iload.length)) {
988 mga_freelist_put(dev, buf);
989 return DRM_ERR(EINVAL);
992 WRAP_TEST_WITH_RETURN(dev_priv);
994 mga_dma_dispatch_iload(dev, buf, iload.dstorg, iload.length);
996 /* Make sure we restore the 3D state next time.
998 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1003 static int mga_dma_blit(DRM_IOCTL_ARGS)
1006 drm_mga_private_t *dev_priv = dev->dev_private;
1007 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
1008 drm_mga_blit_t blit;
1011 LOCK_TEST_WITH_RETURN(dev, filp);
1013 DRM_COPY_FROM_USER_IOCTL(blit, (drm_mga_blit_t __user *) data,
1016 if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
1017 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
1019 if (mga_verify_blit(dev_priv, blit.srcorg, blit.dstorg))
1020 return DRM_ERR(EINVAL);
1022 WRAP_TEST_WITH_RETURN(dev_priv);
1024 mga_dma_dispatch_blit(dev, &blit);
1026 /* Make sure we restore the 3D state next time.
1028 dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1033 static int mga_getparam(DRM_IOCTL_ARGS)
1036 drm_mga_private_t *dev_priv = dev->dev_private;
1037 drm_mga_getparam_t param;
1041 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1042 return DRM_ERR(EINVAL);
1045 DRM_COPY_FROM_USER_IOCTL(param, (drm_mga_getparam_t __user *) data,
1048 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1050 switch (param.param) {
1051 case MGA_PARAM_IRQ_NR:
1054 case MGA_PARAM_CARD_TYPE:
1055 value = dev_priv->chipset;
1058 return DRM_ERR(EINVAL);
1061 if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
1062 DRM_ERROR("copy_to_user\n");
1063 return DRM_ERR(EFAULT);
1069 static int mga_set_fence(DRM_IOCTL_ARGS)
1072 drm_mga_private_t *dev_priv = dev->dev_private;
1077 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1078 return DRM_ERR(EINVAL);
1081 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1083 /* I would normal do this assignment in the declaration of temp,
1084 * but dev_priv may be NULL.
1087 temp = dev_priv->next_fence_to_post;
1088 dev_priv->next_fence_to_post++;
1091 DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1092 MGA_DMAPAD, 0x00000000,
1093 MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1096 if (DRM_COPY_TO_USER((u32 __user *) data, &temp, sizeof(u32))) {
1097 DRM_ERROR("copy_to_user\n");
1098 return DRM_ERR(EFAULT);
1104 static int mga_wait_fence(DRM_IOCTL_ARGS)
1107 drm_mga_private_t *dev_priv = dev->dev_private;
1111 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1112 return DRM_ERR(EINVAL);
1115 DRM_COPY_FROM_USER_IOCTL(fence, (u32 __user *) data, sizeof(u32));
1117 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1119 mga_driver_fence_wait(dev, &fence);
1121 if (DRM_COPY_TO_USER((u32 __user *) data, &fence, sizeof(u32))) {
1122 DRM_ERROR("copy_to_user\n");
1123 return DRM_ERR(EFAULT);
1129 drm_ioctl_desc_t mga_ioctls[] = {
1130 [DRM_IOCTL_NR(DRM_MGA_INIT)] = {mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
1131 [DRM_IOCTL_NR(DRM_MGA_FLUSH)] = {mga_dma_flush, DRM_AUTH},
1132 [DRM_IOCTL_NR(DRM_MGA_RESET)] = {mga_dma_reset, DRM_AUTH},
1133 [DRM_IOCTL_NR(DRM_MGA_SWAP)] = {mga_dma_swap, DRM_AUTH},
1134 [DRM_IOCTL_NR(DRM_MGA_CLEAR)] = {mga_dma_clear, DRM_AUTH},
1135 [DRM_IOCTL_NR(DRM_MGA_VERTEX)] = {mga_dma_vertex, DRM_AUTH},
1136 [DRM_IOCTL_NR(DRM_MGA_INDICES)] = {mga_dma_indices, DRM_AUTH},
1137 [DRM_IOCTL_NR(DRM_MGA_ILOAD)] = {mga_dma_iload, DRM_AUTH},
1138 [DRM_IOCTL_NR(DRM_MGA_BLIT)] = {mga_dma_blit, DRM_AUTH},
1139 [DRM_IOCTL_NR(DRM_MGA_GETPARAM)] = {mga_getparam, DRM_AUTH},
1140 [DRM_IOCTL_NR(DRM_MGA_SET_FENCE)] = {mga_set_fence, DRM_AUTH},
1141 [DRM_IOCTL_NR(DRM_MGA_WAIT_FENCE)] = {mga_wait_fence, DRM_AUTH},
1142 [DRM_IOCTL_NR(DRM_MGA_DMA_BOOTSTRAP)] = {mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
1145 int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);