Merge master.kernel.org:/pub/scm/linux/kernel/git/gregkh/driver-2.6
[linux-2.6] / drivers / char / drm / mga_state.c
1 /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
2  * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
3  *
4  * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
5  * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
6  * All Rights Reserved.
7  *
8  * Permission is hereby granted, free of charge, to any person obtaining a
9  * copy of this software and associated documentation files (the "Software"),
10  * to deal in the Software without restriction, including without limitation
11  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12  * and/or sell copies of the Software, and to permit persons to whom the
13  * Software is furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice (including the next
16  * paragraph) shall be included in all copies or substantial portions of the
17  * Software.
18  *
19  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
22  * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
24  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25  * OTHER DEALINGS IN THE SOFTWARE.
26  *
27  * Authors:
28  *    Jeff Hartmann <jhartmann@valinux.com>
29  *    Keith Whitwell <keith@tungstengraphics.com>
30  *
31  * Rewritten by:
32  *    Gareth Hughes <gareth@valinux.com>
33  */
34
35 #include "drmP.h"
36 #include "drm.h"
37 #include "mga_drm.h"
38 #include "mga_drv.h"
39
40 /* ================================================================
41  * DMA hardware state programming functions
42  */
43
44 static void mga_emit_clip_rect(drm_mga_private_t * dev_priv,
45                                struct drm_clip_rect * box)
46 {
47         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
48         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
49         unsigned int pitch = dev_priv->front_pitch;
50         DMA_LOCALS;
51
52         BEGIN_DMA(2);
53
54         /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
55          */
56         if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
57                 DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
58                           MGA_LEN + MGA_EXEC, 0x80000000,
59                           MGA_DWGCTL, ctx->dwgctl,
60                           MGA_LEN + MGA_EXEC, 0x80000000);
61         }
62         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
63                   MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
64                   MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
65
66         ADVANCE_DMA();
67 }
68
69 static __inline__ void mga_g200_emit_context(drm_mga_private_t * dev_priv)
70 {
71         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
72         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
73         DMA_LOCALS;
74
75         BEGIN_DMA(3);
76
77         DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
78                   MGA_MACCESS, ctx->maccess,
79                   MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
80
81         DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
82                   MGA_FOGCOL, ctx->fogcolor,
83                   MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
84
85         DMA_BLOCK(MGA_FCOL, ctx->fcol,
86                   MGA_DMAPAD, 0x00000000,
87                   MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
88
89         ADVANCE_DMA();
90 }
91
92 static __inline__ void mga_g400_emit_context(drm_mga_private_t * dev_priv)
93 {
94         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
95         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
96         DMA_LOCALS;
97
98         BEGIN_DMA(4);
99
100         DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
101                   MGA_MACCESS, ctx->maccess,
102                   MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
103
104         DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
105                   MGA_FOGCOL, ctx->fogcolor,
106                   MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
107
108         DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
109                   MGA_TDUALSTAGE0, ctx->tdualstage0,
110                   MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
111
112         DMA_BLOCK(MGA_STENCIL, ctx->stencil,
113                   MGA_STENCILCTL, ctx->stencilctl,
114                   MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
115
116         ADVANCE_DMA();
117 }
118
119 static __inline__ void mga_g200_emit_tex0(drm_mga_private_t * dev_priv)
120 {
121         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
122         drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
123         DMA_LOCALS;
124
125         BEGIN_DMA(4);
126
127         DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
128                   MGA_TEXCTL, tex->texctl,
129                   MGA_TEXFILTER, tex->texfilter,
130                   MGA_TEXBORDERCOL, tex->texbordercol);
131
132         DMA_BLOCK(MGA_TEXORG, tex->texorg,
133                   MGA_TEXORG1, tex->texorg1,
134                   MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
135
136         DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
137                   MGA_TEXWIDTH, tex->texwidth,
138                   MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
139
140         DMA_BLOCK(MGA_WR34, tex->texheight,
141                   MGA_TEXTRANS, 0x0000ffff,
142                   MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
143
144         ADVANCE_DMA();
145 }
146
147 static __inline__ void mga_g400_emit_tex0(drm_mga_private_t * dev_priv)
148 {
149         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
150         drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
151         DMA_LOCALS;
152
153 /*      printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
154 /*             tex->texctl, tex->texctl2); */
155
156         BEGIN_DMA(6);
157
158         DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
159                   MGA_TEXCTL, tex->texctl,
160                   MGA_TEXFILTER, tex->texfilter,
161                   MGA_TEXBORDERCOL, tex->texbordercol);
162
163         DMA_BLOCK(MGA_TEXORG, tex->texorg,
164                   MGA_TEXORG1, tex->texorg1,
165                   MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
166
167         DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
168                   MGA_TEXWIDTH, tex->texwidth,
169                   MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
170
171         DMA_BLOCK(MGA_WR57, 0x00000000,
172                   MGA_WR53, 0x00000000,
173                   MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
174
175         DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
176                   MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
177                   MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
178                   MGA_DMAPAD, 0x00000000);
179
180         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
181                   MGA_DMAPAD, 0x00000000,
182                   MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
183
184         ADVANCE_DMA();
185 }
186
187 static __inline__ void mga_g400_emit_tex1(drm_mga_private_t * dev_priv)
188 {
189         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
190         drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
191         DMA_LOCALS;
192
193 /*      printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg,  */
194 /*             tex->texctl, tex->texctl2); */
195
196         BEGIN_DMA(5);
197
198         DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
199                                 MGA_MAP1_ENABLE |
200                                 MGA_G400_TC2_MAGIC),
201                   MGA_TEXCTL, tex->texctl,
202                   MGA_TEXFILTER, tex->texfilter,
203                   MGA_TEXBORDERCOL, tex->texbordercol);
204
205         DMA_BLOCK(MGA_TEXORG, tex->texorg,
206                   MGA_TEXORG1, tex->texorg1,
207                   MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
208
209         DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
210                   MGA_TEXWIDTH, tex->texwidth,
211                   MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
212
213         DMA_BLOCK(MGA_WR57, 0x00000000,
214                   MGA_WR53, 0x00000000,
215                   MGA_WR61, 0x00000000,
216                   MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
217
218         DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
219                   MGA_TEXTRANS, 0x0000ffff,
220                   MGA_TEXTRANSHIGH, 0x0000ffff,
221                   MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
222
223         ADVANCE_DMA();
224 }
225
226 static __inline__ void mga_g200_emit_pipe(drm_mga_private_t * dev_priv)
227 {
228         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
229         unsigned int pipe = sarea_priv->warp_pipe;
230         DMA_LOCALS;
231
232         BEGIN_DMA(3);
233
234         DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
235                   MGA_WVRTXSZ, 0x00000007,
236                   MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
237
238         DMA_BLOCK(MGA_WR25, 0x00000100,
239                   MGA_WR34, 0x00000000,
240                   MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
241
242         /* Padding required to to hardware bug.
243          */
244         DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
245                   MGA_DMAPAD, 0xffffffff,
246                   MGA_DMAPAD, 0xffffffff,
247                   MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
248                                MGA_WMODE_START | dev_priv->wagp_enable));
249
250         ADVANCE_DMA();
251 }
252
253 static __inline__ void mga_g400_emit_pipe(drm_mga_private_t * dev_priv)
254 {
255         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
256         unsigned int pipe = sarea_priv->warp_pipe;
257         DMA_LOCALS;
258
259 /*      printk("mga_g400_emit_pipe %x\n", pipe); */
260
261         BEGIN_DMA(10);
262
263         DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
264                   MGA_DMAPAD, 0x00000000,
265                   MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
266
267         if (pipe & MGA_T2) {
268                 DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
269                           MGA_DMAPAD, 0x00000000,
270                           MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
271
272                 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
273                           MGA_WACCEPTSEQ, 0x00000000,
274                           MGA_WACCEPTSEQ, 0x00000000,
275                           MGA_WACCEPTSEQ, 0x1e000000);
276         } else {
277                 if (dev_priv->warp_pipe & MGA_T2) {
278                         /* Flush the WARP pipe */
279                         DMA_BLOCK(MGA_YDST, 0x00000000,
280                                   MGA_FXLEFT, 0x00000000,
281                                   MGA_FXRIGHT, 0x00000001,
282                                   MGA_DWGCTL, MGA_DWGCTL_FLUSH);
283
284                         DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
285                                   MGA_DWGSYNC, 0x00007000,
286                                   MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
287                                   MGA_LEN + MGA_EXEC, 0x00000000);
288
289                         DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
290                                                 MGA_G400_TC2_MAGIC),
291                                   MGA_LEN + MGA_EXEC, 0x00000000,
292                                   MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
293                                   MGA_DMAPAD, 0x00000000);
294                 }
295
296                 DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
297                           MGA_DMAPAD, 0x00000000,
298                           MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
299
300                 DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
301                           MGA_WACCEPTSEQ, 0x00000000,
302                           MGA_WACCEPTSEQ, 0x00000000,
303                           MGA_WACCEPTSEQ, 0x18000000);
304         }
305
306         DMA_BLOCK(MGA_WFLAG, 0x00000000,
307                   MGA_WFLAG1, 0x00000000,
308                   MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
309
310         DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0              */
311                   MGA_WR57, 0x00000000, /* tex0              */
312                   MGA_WR53, 0x00000000, /* tex1              */
313                   MGA_WR61, 0x00000000);        /* tex1              */
314
315         DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC,  /* tex0 width        */
316                   MGA_WR62, MGA_G400_WR_MAGIC,  /* tex0 height       */
317                   MGA_WR52, MGA_G400_WR_MAGIC,  /* tex1 width        */
318                   MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height       */
319
320         /* Padding required to to hardware bug */
321         DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
322                   MGA_DMAPAD, 0xffffffff,
323                   MGA_DMAPAD, 0xffffffff,
324                   MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
325                                 MGA_WMODE_START | dev_priv->wagp_enable));
326
327         ADVANCE_DMA();
328 }
329
330 static void mga_g200_emit_state(drm_mga_private_t * dev_priv)
331 {
332         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
333         unsigned int dirty = sarea_priv->dirty;
334
335         if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
336                 mga_g200_emit_pipe(dev_priv);
337                 dev_priv->warp_pipe = sarea_priv->warp_pipe;
338         }
339
340         if (dirty & MGA_UPLOAD_CONTEXT) {
341                 mga_g200_emit_context(dev_priv);
342                 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
343         }
344
345         if (dirty & MGA_UPLOAD_TEX0) {
346                 mga_g200_emit_tex0(dev_priv);
347                 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
348         }
349 }
350
351 static void mga_g400_emit_state(drm_mga_private_t * dev_priv)
352 {
353         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
354         unsigned int dirty = sarea_priv->dirty;
355         int multitex = sarea_priv->warp_pipe & MGA_T2;
356
357         if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
358                 mga_g400_emit_pipe(dev_priv);
359                 dev_priv->warp_pipe = sarea_priv->warp_pipe;
360         }
361
362         if (dirty & MGA_UPLOAD_CONTEXT) {
363                 mga_g400_emit_context(dev_priv);
364                 sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
365         }
366
367         if (dirty & MGA_UPLOAD_TEX0) {
368                 mga_g400_emit_tex0(dev_priv);
369                 sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
370         }
371
372         if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
373                 mga_g400_emit_tex1(dev_priv);
374                 sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
375         }
376 }
377
378 /* ================================================================
379  * SAREA state verification
380  */
381
382 /* Disallow all write destinations except the front and backbuffer.
383  */
384 static int mga_verify_context(drm_mga_private_t * dev_priv)
385 {
386         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
387         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
388
389         if (ctx->dstorg != dev_priv->front_offset &&
390             ctx->dstorg != dev_priv->back_offset) {
391                 DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
392                           ctx->dstorg, dev_priv->front_offset,
393                           dev_priv->back_offset);
394                 ctx->dstorg = 0;
395                 return DRM_ERR(EINVAL);
396         }
397
398         return 0;
399 }
400
401 /* Disallow texture reads from PCI space.
402  */
403 static int mga_verify_tex(drm_mga_private_t * dev_priv, int unit)
404 {
405         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
406         drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
407         unsigned int org;
408
409         org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
410
411         if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
412                 DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
413                 tex->texorg = 0;
414                 return DRM_ERR(EINVAL);
415         }
416
417         return 0;
418 }
419
420 static int mga_verify_state(drm_mga_private_t * dev_priv)
421 {
422         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
423         unsigned int dirty = sarea_priv->dirty;
424         int ret = 0;
425
426         if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
427                 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
428
429         if (dirty & MGA_UPLOAD_CONTEXT)
430                 ret |= mga_verify_context(dev_priv);
431
432         if (dirty & MGA_UPLOAD_TEX0)
433                 ret |= mga_verify_tex(dev_priv, 0);
434
435         if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
436                 if (dirty & MGA_UPLOAD_TEX1)
437                         ret |= mga_verify_tex(dev_priv, 1);
438
439                 if (dirty & MGA_UPLOAD_PIPE)
440                         ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
441         } else {
442                 if (dirty & MGA_UPLOAD_PIPE)
443                         ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
444         }
445
446         return (ret == 0);
447 }
448
449 static int mga_verify_iload(drm_mga_private_t * dev_priv,
450                             unsigned int dstorg, unsigned int length)
451 {
452         if (dstorg < dev_priv->texture_offset ||
453             dstorg + length > (dev_priv->texture_offset +
454                                dev_priv->texture_size)) {
455                 DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
456                 return DRM_ERR(EINVAL);
457         }
458
459         if (length & MGA_ILOAD_MASK) {
460                 DRM_ERROR("*** bad iload length: 0x%x\n",
461                           length & MGA_ILOAD_MASK);
462                 return DRM_ERR(EINVAL);
463         }
464
465         return 0;
466 }
467
468 static int mga_verify_blit(drm_mga_private_t * dev_priv,
469                            unsigned int srcorg, unsigned int dstorg)
470 {
471         if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
472             (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
473                 DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
474                 return DRM_ERR(EINVAL);
475         }
476         return 0;
477 }
478
479 /* ================================================================
480  *
481  */
482
483 static void mga_dma_dispatch_clear(struct drm_device * dev, drm_mga_clear_t * clear)
484 {
485         drm_mga_private_t *dev_priv = dev->dev_private;
486         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
487         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
488         struct drm_clip_rect *pbox = sarea_priv->boxes;
489         int nbox = sarea_priv->nbox;
490         int i;
491         DMA_LOCALS;
492         DRM_DEBUG("\n");
493
494         BEGIN_DMA(1);
495
496         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
497                   MGA_DMAPAD, 0x00000000,
498                   MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
499
500         ADVANCE_DMA();
501
502         for (i = 0; i < nbox; i++) {
503                 struct drm_clip_rect *box = &pbox[i];
504                 u32 height = box->y2 - box->y1;
505
506                 DRM_DEBUG("   from=%d,%d to=%d,%d\n",
507                           box->x1, box->y1, box->x2, box->y2);
508
509                 if (clear->flags & MGA_FRONT) {
510                         BEGIN_DMA(2);
511
512                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
513                                   MGA_PLNWT, clear->color_mask,
514                                   MGA_YDSTLEN, (box->y1 << 16) | height,
515                                   MGA_FXBNDRY, (box->x2 << 16) | box->x1);
516
517                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
518                                   MGA_FCOL, clear->clear_color,
519                                   MGA_DSTORG, dev_priv->front_offset,
520                                   MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
521
522                         ADVANCE_DMA();
523                 }
524
525                 if (clear->flags & MGA_BACK) {
526                         BEGIN_DMA(2);
527
528                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
529                                   MGA_PLNWT, clear->color_mask,
530                                   MGA_YDSTLEN, (box->y1 << 16) | height,
531                                   MGA_FXBNDRY, (box->x2 << 16) | box->x1);
532
533                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
534                                   MGA_FCOL, clear->clear_color,
535                                   MGA_DSTORG, dev_priv->back_offset,
536                                   MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
537
538                         ADVANCE_DMA();
539                 }
540
541                 if (clear->flags & MGA_DEPTH) {
542                         BEGIN_DMA(2);
543
544                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
545                                   MGA_PLNWT, clear->depth_mask,
546                                   MGA_YDSTLEN, (box->y1 << 16) | height,
547                                   MGA_FXBNDRY, (box->x2 << 16) | box->x1);
548
549                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
550                                   MGA_FCOL, clear->clear_depth,
551                                   MGA_DSTORG, dev_priv->depth_offset,
552                                   MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
553
554                         ADVANCE_DMA();
555                 }
556
557         }
558
559         BEGIN_DMA(1);
560
561         /* Force reset of DWGCTL */
562         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
563                   MGA_DMAPAD, 0x00000000,
564                   MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
565
566         ADVANCE_DMA();
567
568         FLUSH_DMA();
569 }
570
571 static void mga_dma_dispatch_swap(struct drm_device * dev)
572 {
573         drm_mga_private_t *dev_priv = dev->dev_private;
574         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
575         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
576         struct drm_clip_rect *pbox = sarea_priv->boxes;
577         int nbox = sarea_priv->nbox;
578         int i;
579         DMA_LOCALS;
580         DRM_DEBUG("\n");
581
582         sarea_priv->last_frame.head = dev_priv->prim.tail;
583         sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
584
585         BEGIN_DMA(4 + nbox);
586
587         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
588                   MGA_DMAPAD, 0x00000000,
589                   MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
590
591         DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
592                   MGA_MACCESS, dev_priv->maccess,
593                   MGA_SRCORG, dev_priv->back_offset,
594                   MGA_AR5, dev_priv->front_pitch);
595
596         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
597                   MGA_DMAPAD, 0x00000000,
598                   MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
599
600         for (i = 0; i < nbox; i++) {
601                 struct drm_clip_rect *box = &pbox[i];
602                 u32 height = box->y2 - box->y1;
603                 u32 start = box->y1 * dev_priv->front_pitch;
604
605                 DRM_DEBUG("   from=%d,%d to=%d,%d\n",
606                           box->x1, box->y1, box->x2, box->y2);
607
608                 DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
609                           MGA_AR3, start + box->x1,
610                           MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
611                           MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
612         }
613
614         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
615                   MGA_PLNWT, ctx->plnwt,
616                   MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
617
618         ADVANCE_DMA();
619
620         FLUSH_DMA();
621
622         DRM_DEBUG("%s... done.\n", __FUNCTION__);
623 }
624
625 static void mga_dma_dispatch_vertex(struct drm_device * dev, struct drm_buf * buf)
626 {
627         drm_mga_private_t *dev_priv = dev->dev_private;
628         drm_mga_buf_priv_t *buf_priv = buf->dev_private;
629         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
630         u32 address = (u32) buf->bus_address;
631         u32 length = (u32) buf->used;
632         int i = 0;
633         DMA_LOCALS;
634         DRM_DEBUG("vertex: buf=%d used=%d\n", buf->idx, buf->used);
635
636         if (buf->used) {
637                 buf_priv->dispatched = 1;
638
639                 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
640
641                 do {
642                         if (i < sarea_priv->nbox) {
643                                 mga_emit_clip_rect(dev_priv,
644                                                    &sarea_priv->boxes[i]);
645                         }
646
647                         BEGIN_DMA(1);
648
649                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
650                                   MGA_DMAPAD, 0x00000000,
651                                   MGA_SECADDRESS, (address |
652                                                    MGA_DMA_VERTEX),
653                                   MGA_SECEND, ((address + length) |
654                                                dev_priv->dma_access));
655
656                         ADVANCE_DMA();
657                 } while (++i < sarea_priv->nbox);
658         }
659
660         if (buf_priv->discard) {
661                 AGE_BUFFER(buf_priv);
662                 buf->pending = 0;
663                 buf->used = 0;
664                 buf_priv->dispatched = 0;
665
666                 mga_freelist_put(dev, buf);
667         }
668
669         FLUSH_DMA();
670 }
671
672 static void mga_dma_dispatch_indices(struct drm_device * dev, struct drm_buf * buf,
673                                      unsigned int start, unsigned int end)
674 {
675         drm_mga_private_t *dev_priv = dev->dev_private;
676         drm_mga_buf_priv_t *buf_priv = buf->dev_private;
677         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
678         u32 address = (u32) buf->bus_address;
679         int i = 0;
680         DMA_LOCALS;
681         DRM_DEBUG("indices: buf=%d start=%d end=%d\n", buf->idx, start, end);
682
683         if (start != end) {
684                 buf_priv->dispatched = 1;
685
686                 MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
687
688                 do {
689                         if (i < sarea_priv->nbox) {
690                                 mga_emit_clip_rect(dev_priv,
691                                                    &sarea_priv->boxes[i]);
692                         }
693
694                         BEGIN_DMA(1);
695
696                         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
697                                   MGA_DMAPAD, 0x00000000,
698                                   MGA_SETUPADDRESS, address + start,
699                                   MGA_SETUPEND, ((address + end) |
700                                                  dev_priv->dma_access));
701
702                         ADVANCE_DMA();
703                 } while (++i < sarea_priv->nbox);
704         }
705
706         if (buf_priv->discard) {
707                 AGE_BUFFER(buf_priv);
708                 buf->pending = 0;
709                 buf->used = 0;
710                 buf_priv->dispatched = 0;
711
712                 mga_freelist_put(dev, buf);
713         }
714
715         FLUSH_DMA();
716 }
717
718 /* This copies a 64 byte aligned agp region to the frambuffer with a
719  * standard blit, the ioctl needs to do checking.
720  */
721 static void mga_dma_dispatch_iload(struct drm_device * dev, struct drm_buf * buf,
722                                    unsigned int dstorg, unsigned int length)
723 {
724         drm_mga_private_t *dev_priv = dev->dev_private;
725         drm_mga_buf_priv_t *buf_priv = buf->dev_private;
726         drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
727         u32 srcorg =
728             buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
729         u32 y2;
730         DMA_LOCALS;
731         DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
732
733         y2 = length / 64;
734
735         BEGIN_DMA(5);
736
737         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
738                   MGA_DMAPAD, 0x00000000,
739                   MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
740
741         DMA_BLOCK(MGA_DSTORG, dstorg,
742                   MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
743
744         DMA_BLOCK(MGA_PITCH, 64,
745                   MGA_PLNWT, 0xffffffff,
746                   MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
747
748         DMA_BLOCK(MGA_AR0, 63,
749                   MGA_AR3, 0,
750                   MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
751
752         DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
753                   MGA_SRCORG, dev_priv->front_offset,
754                   MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
755
756         ADVANCE_DMA();
757
758         AGE_BUFFER(buf_priv);
759
760         buf->pending = 0;
761         buf->used = 0;
762         buf_priv->dispatched = 0;
763
764         mga_freelist_put(dev, buf);
765
766         FLUSH_DMA();
767 }
768
769 static void mga_dma_dispatch_blit(struct drm_device * dev, drm_mga_blit_t * blit)
770 {
771         drm_mga_private_t *dev_priv = dev->dev_private;
772         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
773         drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
774         struct drm_clip_rect *pbox = sarea_priv->boxes;
775         int nbox = sarea_priv->nbox;
776         u32 scandir = 0, i;
777         DMA_LOCALS;
778         DRM_DEBUG("\n");
779
780         BEGIN_DMA(4 + nbox);
781
782         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
783                   MGA_DMAPAD, 0x00000000,
784                   MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
785
786         DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
787                   MGA_PLNWT, blit->planemask,
788                   MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
789
790         DMA_BLOCK(MGA_SGN, scandir,
791                   MGA_MACCESS, dev_priv->maccess,
792                   MGA_AR5, blit->ydir * blit->src_pitch,
793                   MGA_PITCH, blit->dst_pitch);
794
795         for (i = 0; i < nbox; i++) {
796                 int srcx = pbox[i].x1 + blit->delta_sx;
797                 int srcy = pbox[i].y1 + blit->delta_sy;
798                 int dstx = pbox[i].x1 + blit->delta_dx;
799                 int dsty = pbox[i].y1 + blit->delta_dy;
800                 int h = pbox[i].y2 - pbox[i].y1;
801                 int w = pbox[i].x2 - pbox[i].x1 - 1;
802                 int start;
803
804                 if (blit->ydir == -1) {
805                         srcy = blit->height - srcy - 1;
806                 }
807
808                 start = srcy * blit->src_pitch + srcx;
809
810                 DMA_BLOCK(MGA_AR0, start + w,
811                           MGA_AR3, start,
812                           MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
813                           MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
814         }
815
816         /* Do something to flush AGP?
817          */
818
819         /* Force reset of DWGCTL */
820         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
821                   MGA_PLNWT, ctx->plnwt,
822                   MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
823
824         ADVANCE_DMA();
825 }
826
827 /* ================================================================
828  *
829  */
830
831 static int mga_dma_clear(DRM_IOCTL_ARGS)
832 {
833         DRM_DEVICE;
834         drm_mga_private_t *dev_priv = dev->dev_private;
835         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
836         drm_mga_clear_t clear;
837
838         LOCK_TEST_WITH_RETURN(dev, filp);
839
840         DRM_COPY_FROM_USER_IOCTL(clear, (drm_mga_clear_t __user *) data,
841                                  sizeof(clear));
842
843         if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
844                 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
845
846         WRAP_TEST_WITH_RETURN(dev_priv);
847
848         mga_dma_dispatch_clear(dev, &clear);
849
850         /* Make sure we restore the 3D state next time.
851          */
852         dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
853
854         return 0;
855 }
856
857 static int mga_dma_swap(DRM_IOCTL_ARGS)
858 {
859         DRM_DEVICE;
860         drm_mga_private_t *dev_priv = dev->dev_private;
861         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
862
863         LOCK_TEST_WITH_RETURN(dev, filp);
864
865         if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
866                 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
867
868         WRAP_TEST_WITH_RETURN(dev_priv);
869
870         mga_dma_dispatch_swap(dev);
871
872         /* Make sure we restore the 3D state next time.
873          */
874         dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
875
876         return 0;
877 }
878
879 static int mga_dma_vertex(DRM_IOCTL_ARGS)
880 {
881         DRM_DEVICE;
882         drm_mga_private_t *dev_priv = dev->dev_private;
883         struct drm_device_dma *dma = dev->dma;
884         struct drm_buf *buf;
885         drm_mga_buf_priv_t *buf_priv;
886         drm_mga_vertex_t vertex;
887
888         LOCK_TEST_WITH_RETURN(dev, filp);
889
890         DRM_COPY_FROM_USER_IOCTL(vertex,
891                                  (drm_mga_vertex_t __user *) data,
892                                  sizeof(vertex));
893
894         if (vertex.idx < 0 || vertex.idx > dma->buf_count)
895                 return DRM_ERR(EINVAL);
896         buf = dma->buflist[vertex.idx];
897         buf_priv = buf->dev_private;
898
899         buf->used = vertex.used;
900         buf_priv->discard = vertex.discard;
901
902         if (!mga_verify_state(dev_priv)) {
903                 if (vertex.discard) {
904                         if (buf_priv->dispatched == 1)
905                                 AGE_BUFFER(buf_priv);
906                         buf_priv->dispatched = 0;
907                         mga_freelist_put(dev, buf);
908                 }
909                 return DRM_ERR(EINVAL);
910         }
911
912         WRAP_TEST_WITH_RETURN(dev_priv);
913
914         mga_dma_dispatch_vertex(dev, buf);
915
916         return 0;
917 }
918
919 static int mga_dma_indices(DRM_IOCTL_ARGS)
920 {
921         DRM_DEVICE;
922         drm_mga_private_t *dev_priv = dev->dev_private;
923         struct drm_device_dma *dma = dev->dma;
924         struct drm_buf *buf;
925         drm_mga_buf_priv_t *buf_priv;
926         drm_mga_indices_t indices;
927
928         LOCK_TEST_WITH_RETURN(dev, filp);
929
930         DRM_COPY_FROM_USER_IOCTL(indices,
931                                  (drm_mga_indices_t __user *) data,
932                                  sizeof(indices));
933
934         if (indices.idx < 0 || indices.idx > dma->buf_count)
935                 return DRM_ERR(EINVAL);
936
937         buf = dma->buflist[indices.idx];
938         buf_priv = buf->dev_private;
939
940         buf_priv->discard = indices.discard;
941
942         if (!mga_verify_state(dev_priv)) {
943                 if (indices.discard) {
944                         if (buf_priv->dispatched == 1)
945                                 AGE_BUFFER(buf_priv);
946                         buf_priv->dispatched = 0;
947                         mga_freelist_put(dev, buf);
948                 }
949                 return DRM_ERR(EINVAL);
950         }
951
952         WRAP_TEST_WITH_RETURN(dev_priv);
953
954         mga_dma_dispatch_indices(dev, buf, indices.start, indices.end);
955
956         return 0;
957 }
958
959 static int mga_dma_iload(DRM_IOCTL_ARGS)
960 {
961         DRM_DEVICE;
962         struct drm_device_dma *dma = dev->dma;
963         drm_mga_private_t *dev_priv = dev->dev_private;
964         struct drm_buf *buf;
965         drm_mga_buf_priv_t *buf_priv;
966         drm_mga_iload_t iload;
967         DRM_DEBUG("\n");
968
969         LOCK_TEST_WITH_RETURN(dev, filp);
970
971         DRM_COPY_FROM_USER_IOCTL(iload, (drm_mga_iload_t __user *) data,
972                                  sizeof(iload));
973
974 #if 0
975         if (mga_do_wait_for_idle(dev_priv) < 0) {
976                 if (MGA_DMA_DEBUG)
977                         DRM_INFO("%s: -EBUSY\n", __FUNCTION__);
978                 return DRM_ERR(EBUSY);
979         }
980 #endif
981         if (iload.idx < 0 || iload.idx > dma->buf_count)
982                 return DRM_ERR(EINVAL);
983
984         buf = dma->buflist[iload.idx];
985         buf_priv = buf->dev_private;
986
987         if (mga_verify_iload(dev_priv, iload.dstorg, iload.length)) {
988                 mga_freelist_put(dev, buf);
989                 return DRM_ERR(EINVAL);
990         }
991
992         WRAP_TEST_WITH_RETURN(dev_priv);
993
994         mga_dma_dispatch_iload(dev, buf, iload.dstorg, iload.length);
995
996         /* Make sure we restore the 3D state next time.
997          */
998         dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
999
1000         return 0;
1001 }
1002
1003 static int mga_dma_blit(DRM_IOCTL_ARGS)
1004 {
1005         DRM_DEVICE;
1006         drm_mga_private_t *dev_priv = dev->dev_private;
1007         drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
1008         drm_mga_blit_t blit;
1009         DRM_DEBUG("\n");
1010
1011         LOCK_TEST_WITH_RETURN(dev, filp);
1012
1013         DRM_COPY_FROM_USER_IOCTL(blit, (drm_mga_blit_t __user *) data,
1014                                  sizeof(blit));
1015
1016         if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
1017                 sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
1018
1019         if (mga_verify_blit(dev_priv, blit.srcorg, blit.dstorg))
1020                 return DRM_ERR(EINVAL);
1021
1022         WRAP_TEST_WITH_RETURN(dev_priv);
1023
1024         mga_dma_dispatch_blit(dev, &blit);
1025
1026         /* Make sure we restore the 3D state next time.
1027          */
1028         dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
1029
1030         return 0;
1031 }
1032
1033 static int mga_getparam(DRM_IOCTL_ARGS)
1034 {
1035         DRM_DEVICE;
1036         drm_mga_private_t *dev_priv = dev->dev_private;
1037         drm_mga_getparam_t param;
1038         int value;
1039
1040         if (!dev_priv) {
1041                 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1042                 return DRM_ERR(EINVAL);
1043         }
1044
1045         DRM_COPY_FROM_USER_IOCTL(param, (drm_mga_getparam_t __user *) data,
1046                                  sizeof(param));
1047
1048         DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1049
1050         switch (param.param) {
1051         case MGA_PARAM_IRQ_NR:
1052                 value = dev->irq;
1053                 break;
1054         case MGA_PARAM_CARD_TYPE:
1055                 value = dev_priv->chipset;
1056                 break;
1057         default:
1058                 return DRM_ERR(EINVAL);
1059         }
1060
1061         if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
1062                 DRM_ERROR("copy_to_user\n");
1063                 return DRM_ERR(EFAULT);
1064         }
1065
1066         return 0;
1067 }
1068
1069 static int mga_set_fence(DRM_IOCTL_ARGS)
1070 {
1071         DRM_DEVICE;
1072         drm_mga_private_t *dev_priv = dev->dev_private;
1073         u32 temp;
1074         DMA_LOCALS;
1075
1076         if (!dev_priv) {
1077                 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1078                 return DRM_ERR(EINVAL);
1079         }
1080
1081         DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1082
1083         /* I would normal do this assignment in the declaration of temp,
1084          * but dev_priv may be NULL.
1085          */
1086
1087         temp = dev_priv->next_fence_to_post;
1088         dev_priv->next_fence_to_post++;
1089
1090         BEGIN_DMA(1);
1091         DMA_BLOCK(MGA_DMAPAD, 0x00000000,
1092                   MGA_DMAPAD, 0x00000000,
1093                   MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
1094         ADVANCE_DMA();
1095
1096         if (DRM_COPY_TO_USER((u32 __user *) data, &temp, sizeof(u32))) {
1097                 DRM_ERROR("copy_to_user\n");
1098                 return DRM_ERR(EFAULT);
1099         }
1100
1101         return 0;
1102 }
1103
1104 static int mga_wait_fence(DRM_IOCTL_ARGS)
1105 {
1106         DRM_DEVICE;
1107         drm_mga_private_t *dev_priv = dev->dev_private;
1108         u32 fence;
1109
1110         if (!dev_priv) {
1111                 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
1112                 return DRM_ERR(EINVAL);
1113         }
1114
1115         DRM_COPY_FROM_USER_IOCTL(fence, (u32 __user *) data, sizeof(u32));
1116
1117         DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
1118
1119         mga_driver_fence_wait(dev, &fence);
1120
1121         if (DRM_COPY_TO_USER((u32 __user *) data, &fence, sizeof(u32))) {
1122                 DRM_ERROR("copy_to_user\n");
1123                 return DRM_ERR(EFAULT);
1124         }
1125
1126         return 0;
1127 }
1128
1129 drm_ioctl_desc_t mga_ioctls[] = {
1130         [DRM_IOCTL_NR(DRM_MGA_INIT)] = {mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
1131         [DRM_IOCTL_NR(DRM_MGA_FLUSH)] = {mga_dma_flush, DRM_AUTH},
1132         [DRM_IOCTL_NR(DRM_MGA_RESET)] = {mga_dma_reset, DRM_AUTH},
1133         [DRM_IOCTL_NR(DRM_MGA_SWAP)] = {mga_dma_swap, DRM_AUTH},
1134         [DRM_IOCTL_NR(DRM_MGA_CLEAR)] = {mga_dma_clear, DRM_AUTH},
1135         [DRM_IOCTL_NR(DRM_MGA_VERTEX)] = {mga_dma_vertex, DRM_AUTH},
1136         [DRM_IOCTL_NR(DRM_MGA_INDICES)] = {mga_dma_indices, DRM_AUTH},
1137         [DRM_IOCTL_NR(DRM_MGA_ILOAD)] = {mga_dma_iload, DRM_AUTH},
1138         [DRM_IOCTL_NR(DRM_MGA_BLIT)] = {mga_dma_blit, DRM_AUTH},
1139         [DRM_IOCTL_NR(DRM_MGA_GETPARAM)] = {mga_getparam, DRM_AUTH},
1140         [DRM_IOCTL_NR(DRM_MGA_SET_FENCE)] = {mga_set_fence, DRM_AUTH},
1141         [DRM_IOCTL_NR(DRM_MGA_WAIT_FENCE)] = {mga_wait_fence, DRM_AUTH},
1142         [DRM_IOCTL_NR(DRM_MGA_DMA_BOOTSTRAP)] = {mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
1143 };
1144
1145 int mga_max_ioctl = DRM_ARRAY_SIZE(mga_ioctls);