mga_state.c 29 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102
  1. /* mga_state.c -- State support for MGA G200/G400 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by jhartmann@precisioninsight.com
  3. *
  4. * Copyright 1999 Precision Insight, Inc., Cedar Park, Texas.
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * VA LINUX SYSTEMS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  25. * OTHER DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Jeff Hartmann <jhartmann@valinux.com>
  29. * Keith Whitwell <keith@tungstengraphics.com>
  30. *
  31. * Rewritten by:
  32. * Gareth Hughes <gareth@valinux.com>
  33. */
  34. #include <drm/drmP.h>
  35. #include <drm/mga_drm.h>
  36. #include "mga_drv.h"
  37. /* ================================================================
  38. * DMA hardware state programming functions
  39. */
  40. static void mga_emit_clip_rect(drm_mga_private_t *dev_priv,
  41. struct drm_clip_rect *box)
  42. {
  43. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  44. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  45. unsigned int pitch = dev_priv->front_pitch;
  46. DMA_LOCALS;
  47. BEGIN_DMA(2);
  48. /* Force reset of DWGCTL on G400 (eliminates clip disable bit).
  49. */
  50. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  51. DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
  52. MGA_LEN + MGA_EXEC, 0x80000000,
  53. MGA_DWGCTL, ctx->dwgctl,
  54. MGA_LEN + MGA_EXEC, 0x80000000);
  55. }
  56. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  57. MGA_CXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  58. MGA_YTOP, box->y1 * pitch, MGA_YBOT, (box->y2 - 1) * pitch);
  59. ADVANCE_DMA();
  60. }
  61. static __inline__ void mga_g200_emit_context(drm_mga_private_t *dev_priv)
  62. {
  63. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  64. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  65. DMA_LOCALS;
  66. BEGIN_DMA(3);
  67. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  68. MGA_MACCESS, ctx->maccess,
  69. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  70. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  71. MGA_FOGCOL, ctx->fogcolor,
  72. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  73. DMA_BLOCK(MGA_FCOL, ctx->fcol,
  74. MGA_DMAPAD, 0x00000000,
  75. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  76. ADVANCE_DMA();
  77. }
  78. static __inline__ void mga_g400_emit_context(drm_mga_private_t *dev_priv)
  79. {
  80. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  81. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  82. DMA_LOCALS;
  83. BEGIN_DMA(4);
  84. DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
  85. MGA_MACCESS, ctx->maccess,
  86. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  87. DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
  88. MGA_FOGCOL, ctx->fogcolor,
  89. MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
  90. DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
  91. MGA_TDUALSTAGE0, ctx->tdualstage0,
  92. MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
  93. DMA_BLOCK(MGA_STENCIL, ctx->stencil,
  94. MGA_STENCILCTL, ctx->stencilctl,
  95. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  96. ADVANCE_DMA();
  97. }
  98. static __inline__ void mga_g200_emit_tex0(drm_mga_private_t *dev_priv)
  99. {
  100. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  101. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  102. DMA_LOCALS;
  103. BEGIN_DMA(4);
  104. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2,
  105. MGA_TEXCTL, tex->texctl,
  106. MGA_TEXFILTER, tex->texfilter,
  107. MGA_TEXBORDERCOL, tex->texbordercol);
  108. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  109. MGA_TEXORG1, tex->texorg1,
  110. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  111. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  112. MGA_TEXWIDTH, tex->texwidth,
  113. MGA_TEXHEIGHT, tex->texheight, MGA_WR24, tex->texwidth);
  114. DMA_BLOCK(MGA_WR34, tex->texheight,
  115. MGA_TEXTRANS, 0x0000ffff,
  116. MGA_TEXTRANSHIGH, 0x0000ffff, MGA_DMAPAD, 0x00000000);
  117. ADVANCE_DMA();
  118. }
  119. static __inline__ void mga_g400_emit_tex0(drm_mga_private_t *dev_priv)
  120. {
  121. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  122. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[0];
  123. DMA_LOCALS;
  124. /* printk("mga_g400_emit_tex0 %x %x %x\n", tex->texorg, */
  125. /* tex->texctl, tex->texctl2); */
  126. BEGIN_DMA(6);
  127. DMA_BLOCK(MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC,
  128. MGA_TEXCTL, tex->texctl,
  129. MGA_TEXFILTER, tex->texfilter,
  130. MGA_TEXBORDERCOL, tex->texbordercol);
  131. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  132. MGA_TEXORG1, tex->texorg1,
  133. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  134. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  135. MGA_TEXWIDTH, tex->texwidth,
  136. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  137. DMA_BLOCK(MGA_WR57, 0x00000000,
  138. MGA_WR53, 0x00000000,
  139. MGA_WR61, 0x00000000, MGA_WR52, MGA_G400_WR_MAGIC);
  140. DMA_BLOCK(MGA_WR60, MGA_G400_WR_MAGIC,
  141. MGA_WR54, tex->texwidth | MGA_G400_WR_MAGIC,
  142. MGA_WR62, tex->texheight | MGA_G400_WR_MAGIC,
  143. MGA_DMAPAD, 0x00000000);
  144. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  145. MGA_DMAPAD, 0x00000000,
  146. MGA_TEXTRANS, 0x0000ffff, MGA_TEXTRANSHIGH, 0x0000ffff);
  147. ADVANCE_DMA();
  148. }
  149. static __inline__ void mga_g400_emit_tex1(drm_mga_private_t *dev_priv)
  150. {
  151. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  152. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[1];
  153. DMA_LOCALS;
  154. /* printk("mga_g400_emit_tex1 %x %x %x\n", tex->texorg, */
  155. /* tex->texctl, tex->texctl2); */
  156. BEGIN_DMA(5);
  157. DMA_BLOCK(MGA_TEXCTL2, (tex->texctl2 |
  158. MGA_MAP1_ENABLE |
  159. MGA_G400_TC2_MAGIC),
  160. MGA_TEXCTL, tex->texctl,
  161. MGA_TEXFILTER, tex->texfilter,
  162. MGA_TEXBORDERCOL, tex->texbordercol);
  163. DMA_BLOCK(MGA_TEXORG, tex->texorg,
  164. MGA_TEXORG1, tex->texorg1,
  165. MGA_TEXORG2, tex->texorg2, MGA_TEXORG3, tex->texorg3);
  166. DMA_BLOCK(MGA_TEXORG4, tex->texorg4,
  167. MGA_TEXWIDTH, tex->texwidth,
  168. MGA_TEXHEIGHT, tex->texheight, MGA_WR49, 0x00000000);
  169. DMA_BLOCK(MGA_WR57, 0x00000000,
  170. MGA_WR53, 0x00000000,
  171. MGA_WR61, 0x00000000,
  172. MGA_WR52, tex->texwidth | MGA_G400_WR_MAGIC);
  173. DMA_BLOCK(MGA_WR60, tex->texheight | MGA_G400_WR_MAGIC,
  174. MGA_TEXTRANS, 0x0000ffff,
  175. MGA_TEXTRANSHIGH, 0x0000ffff,
  176. MGA_TEXCTL2, tex->texctl2 | MGA_G400_TC2_MAGIC);
  177. ADVANCE_DMA();
  178. }
  179. static __inline__ void mga_g200_emit_pipe(drm_mga_private_t *dev_priv)
  180. {
  181. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  182. unsigned int pipe = sarea_priv->warp_pipe;
  183. DMA_LOCALS;
  184. BEGIN_DMA(3);
  185. DMA_BLOCK(MGA_WIADDR, MGA_WMODE_SUSPEND,
  186. MGA_WVRTXSZ, 0x00000007,
  187. MGA_WFLAG, 0x00000000, MGA_WR24, 0x00000000);
  188. DMA_BLOCK(MGA_WR25, 0x00000100,
  189. MGA_WR34, 0x00000000,
  190. MGA_WR42, 0x0000ffff, MGA_WR60, 0x0000ffff);
  191. /* Padding required due to hardware bug.
  192. */
  193. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  194. MGA_DMAPAD, 0xffffffff,
  195. MGA_DMAPAD, 0xffffffff,
  196. MGA_WIADDR, (dev_priv->warp_pipe_phys[pipe] |
  197. MGA_WMODE_START | dev_priv->wagp_enable));
  198. ADVANCE_DMA();
  199. }
  200. static __inline__ void mga_g400_emit_pipe(drm_mga_private_t *dev_priv)
  201. {
  202. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  203. unsigned int pipe = sarea_priv->warp_pipe;
  204. DMA_LOCALS;
  205. /* printk("mga_g400_emit_pipe %x\n", pipe); */
  206. BEGIN_DMA(10);
  207. DMA_BLOCK(MGA_WIADDR2, MGA_WMODE_SUSPEND,
  208. MGA_DMAPAD, 0x00000000,
  209. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  210. if (pipe & MGA_T2) {
  211. DMA_BLOCK(MGA_WVRTXSZ, 0x00001e09,
  212. MGA_DMAPAD, 0x00000000,
  213. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  214. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  215. MGA_WACCEPTSEQ, 0x00000000,
  216. MGA_WACCEPTSEQ, 0x00000000,
  217. MGA_WACCEPTSEQ, 0x1e000000);
  218. } else {
  219. if (dev_priv->warp_pipe & MGA_T2) {
  220. /* Flush the WARP pipe */
  221. DMA_BLOCK(MGA_YDST, 0x00000000,
  222. MGA_FXLEFT, 0x00000000,
  223. MGA_FXRIGHT, 0x00000001,
  224. MGA_DWGCTL, MGA_DWGCTL_FLUSH);
  225. DMA_BLOCK(MGA_LEN + MGA_EXEC, 0x00000001,
  226. MGA_DWGSYNC, 0x00007000,
  227. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  228. MGA_LEN + MGA_EXEC, 0x00000000);
  229. DMA_BLOCK(MGA_TEXCTL2, (MGA_DUALTEX |
  230. MGA_G400_TC2_MAGIC),
  231. MGA_LEN + MGA_EXEC, 0x00000000,
  232. MGA_TEXCTL2, MGA_G400_TC2_MAGIC,
  233. MGA_DMAPAD, 0x00000000);
  234. }
  235. DMA_BLOCK(MGA_WVRTXSZ, 0x00001807,
  236. MGA_DMAPAD, 0x00000000,
  237. MGA_DMAPAD, 0x00000000, MGA_DMAPAD, 0x00000000);
  238. DMA_BLOCK(MGA_WACCEPTSEQ, 0x00000000,
  239. MGA_WACCEPTSEQ, 0x00000000,
  240. MGA_WACCEPTSEQ, 0x00000000,
  241. MGA_WACCEPTSEQ, 0x18000000);
  242. }
  243. DMA_BLOCK(MGA_WFLAG, 0x00000000,
  244. MGA_WFLAG1, 0x00000000,
  245. MGA_WR56, MGA_G400_WR56_MAGIC, MGA_DMAPAD, 0x00000000);
  246. DMA_BLOCK(MGA_WR49, 0x00000000, /* tex0 */
  247. MGA_WR57, 0x00000000, /* tex0 */
  248. MGA_WR53, 0x00000000, /* tex1 */
  249. MGA_WR61, 0x00000000); /* tex1 */
  250. DMA_BLOCK(MGA_WR54, MGA_G400_WR_MAGIC, /* tex0 width */
  251. MGA_WR62, MGA_G400_WR_MAGIC, /* tex0 height */
  252. MGA_WR52, MGA_G400_WR_MAGIC, /* tex1 width */
  253. MGA_WR60, MGA_G400_WR_MAGIC); /* tex1 height */
  254. /* Padding required due to hardware bug */
  255. DMA_BLOCK(MGA_DMAPAD, 0xffffffff,
  256. MGA_DMAPAD, 0xffffffff,
  257. MGA_DMAPAD, 0xffffffff,
  258. MGA_WIADDR2, (dev_priv->warp_pipe_phys[pipe] |
  259. MGA_WMODE_START | dev_priv->wagp_enable));
  260. ADVANCE_DMA();
  261. }
  262. static void mga_g200_emit_state(drm_mga_private_t *dev_priv)
  263. {
  264. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  265. unsigned int dirty = sarea_priv->dirty;
  266. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  267. mga_g200_emit_pipe(dev_priv);
  268. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  269. }
  270. if (dirty & MGA_UPLOAD_CONTEXT) {
  271. mga_g200_emit_context(dev_priv);
  272. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  273. }
  274. if (dirty & MGA_UPLOAD_TEX0) {
  275. mga_g200_emit_tex0(dev_priv);
  276. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  277. }
  278. }
  279. static void mga_g400_emit_state(drm_mga_private_t *dev_priv)
  280. {
  281. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  282. unsigned int dirty = sarea_priv->dirty;
  283. int multitex = sarea_priv->warp_pipe & MGA_T2;
  284. if (sarea_priv->warp_pipe != dev_priv->warp_pipe) {
  285. mga_g400_emit_pipe(dev_priv);
  286. dev_priv->warp_pipe = sarea_priv->warp_pipe;
  287. }
  288. if (dirty & MGA_UPLOAD_CONTEXT) {
  289. mga_g400_emit_context(dev_priv);
  290. sarea_priv->dirty &= ~MGA_UPLOAD_CONTEXT;
  291. }
  292. if (dirty & MGA_UPLOAD_TEX0) {
  293. mga_g400_emit_tex0(dev_priv);
  294. sarea_priv->dirty &= ~MGA_UPLOAD_TEX0;
  295. }
  296. if ((dirty & MGA_UPLOAD_TEX1) && multitex) {
  297. mga_g400_emit_tex1(dev_priv);
  298. sarea_priv->dirty &= ~MGA_UPLOAD_TEX1;
  299. }
  300. }
  301. /* ================================================================
  302. * SAREA state verification
  303. */
  304. /* Disallow all write destinations except the front and backbuffer.
  305. */
  306. static int mga_verify_context(drm_mga_private_t *dev_priv)
  307. {
  308. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  309. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  310. if (ctx->dstorg != dev_priv->front_offset &&
  311. ctx->dstorg != dev_priv->back_offset) {
  312. DRM_ERROR("*** bad DSTORG: %x (front %x, back %x)\n\n",
  313. ctx->dstorg, dev_priv->front_offset,
  314. dev_priv->back_offset);
  315. ctx->dstorg = 0;
  316. return -EINVAL;
  317. }
  318. return 0;
  319. }
  320. /* Disallow texture reads from PCI space.
  321. */
  322. static int mga_verify_tex(drm_mga_private_t *dev_priv, int unit)
  323. {
  324. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  325. drm_mga_texture_regs_t *tex = &sarea_priv->tex_state[unit];
  326. unsigned int org;
  327. org = tex->texorg & (MGA_TEXORGMAP_MASK | MGA_TEXORGACC_MASK);
  328. if (org == (MGA_TEXORGMAP_SYSMEM | MGA_TEXORGACC_PCI)) {
  329. DRM_ERROR("*** bad TEXORG: 0x%x, unit %d\n", tex->texorg, unit);
  330. tex->texorg = 0;
  331. return -EINVAL;
  332. }
  333. return 0;
  334. }
  335. static int mga_verify_state(drm_mga_private_t *dev_priv)
  336. {
  337. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  338. unsigned int dirty = sarea_priv->dirty;
  339. int ret = 0;
  340. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  341. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  342. if (dirty & MGA_UPLOAD_CONTEXT)
  343. ret |= mga_verify_context(dev_priv);
  344. if (dirty & MGA_UPLOAD_TEX0)
  345. ret |= mga_verify_tex(dev_priv, 0);
  346. if (dev_priv->chipset >= MGA_CARD_TYPE_G400) {
  347. if (dirty & MGA_UPLOAD_TEX1)
  348. ret |= mga_verify_tex(dev_priv, 1);
  349. if (dirty & MGA_UPLOAD_PIPE)
  350. ret |= (sarea_priv->warp_pipe > MGA_MAX_G400_PIPES);
  351. } else {
  352. if (dirty & MGA_UPLOAD_PIPE)
  353. ret |= (sarea_priv->warp_pipe > MGA_MAX_G200_PIPES);
  354. }
  355. return (ret == 0);
  356. }
  357. static int mga_verify_iload(drm_mga_private_t *dev_priv,
  358. unsigned int dstorg, unsigned int length)
  359. {
  360. if (dstorg < dev_priv->texture_offset ||
  361. dstorg + length > (dev_priv->texture_offset +
  362. dev_priv->texture_size)) {
  363. DRM_ERROR("*** bad iload DSTORG: 0x%x\n", dstorg);
  364. return -EINVAL;
  365. }
  366. if (length & MGA_ILOAD_MASK) {
  367. DRM_ERROR("*** bad iload length: 0x%x\n",
  368. length & MGA_ILOAD_MASK);
  369. return -EINVAL;
  370. }
  371. return 0;
  372. }
  373. static int mga_verify_blit(drm_mga_private_t *dev_priv,
  374. unsigned int srcorg, unsigned int dstorg)
  375. {
  376. if ((srcorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM) ||
  377. (dstorg & 0x3) == (MGA_SRCACC_PCI | MGA_SRCMAP_SYSMEM)) {
  378. DRM_ERROR("*** bad blit: src=0x%x dst=0x%x\n", srcorg, dstorg);
  379. return -EINVAL;
  380. }
  381. return 0;
  382. }
  383. /* ================================================================
  384. *
  385. */
  386. static void mga_dma_dispatch_clear(struct drm_device *dev, drm_mga_clear_t *clear)
  387. {
  388. drm_mga_private_t *dev_priv = dev->dev_private;
  389. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  390. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  391. struct drm_clip_rect *pbox = sarea_priv->boxes;
  392. int nbox = sarea_priv->nbox;
  393. int i;
  394. DMA_LOCALS;
  395. DRM_DEBUG("\n");
  396. BEGIN_DMA(1);
  397. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  398. MGA_DMAPAD, 0x00000000,
  399. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  400. ADVANCE_DMA();
  401. for (i = 0; i < nbox; i++) {
  402. struct drm_clip_rect *box = &pbox[i];
  403. u32 height = box->y2 - box->y1;
  404. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  405. box->x1, box->y1, box->x2, box->y2);
  406. if (clear->flags & MGA_FRONT) {
  407. BEGIN_DMA(2);
  408. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  409. MGA_PLNWT, clear->color_mask,
  410. MGA_YDSTLEN, (box->y1 << 16) | height,
  411. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  412. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  413. MGA_FCOL, clear->clear_color,
  414. MGA_DSTORG, dev_priv->front_offset,
  415. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  416. ADVANCE_DMA();
  417. }
  418. if (clear->flags & MGA_BACK) {
  419. BEGIN_DMA(2);
  420. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  421. MGA_PLNWT, clear->color_mask,
  422. MGA_YDSTLEN, (box->y1 << 16) | height,
  423. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  424. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  425. MGA_FCOL, clear->clear_color,
  426. MGA_DSTORG, dev_priv->back_offset,
  427. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  428. ADVANCE_DMA();
  429. }
  430. if (clear->flags & MGA_DEPTH) {
  431. BEGIN_DMA(2);
  432. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  433. MGA_PLNWT, clear->depth_mask,
  434. MGA_YDSTLEN, (box->y1 << 16) | height,
  435. MGA_FXBNDRY, (box->x2 << 16) | box->x1);
  436. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  437. MGA_FCOL, clear->clear_depth,
  438. MGA_DSTORG, dev_priv->depth_offset,
  439. MGA_DWGCTL + MGA_EXEC, dev_priv->clear_cmd);
  440. ADVANCE_DMA();
  441. }
  442. }
  443. BEGIN_DMA(1);
  444. /* Force reset of DWGCTL */
  445. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  446. MGA_DMAPAD, 0x00000000,
  447. MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
  448. ADVANCE_DMA();
  449. FLUSH_DMA();
  450. }
  451. static void mga_dma_dispatch_swap(struct drm_device *dev)
  452. {
  453. drm_mga_private_t *dev_priv = dev->dev_private;
  454. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  455. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  456. struct drm_clip_rect *pbox = sarea_priv->boxes;
  457. int nbox = sarea_priv->nbox;
  458. int i;
  459. DMA_LOCALS;
  460. DRM_DEBUG("\n");
  461. sarea_priv->last_frame.head = dev_priv->prim.tail;
  462. sarea_priv->last_frame.wrap = dev_priv->prim.last_wrap;
  463. BEGIN_DMA(4 + nbox);
  464. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  465. MGA_DMAPAD, 0x00000000,
  466. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  467. DMA_BLOCK(MGA_DSTORG, dev_priv->front_offset,
  468. MGA_MACCESS, dev_priv->maccess,
  469. MGA_SRCORG, dev_priv->back_offset,
  470. MGA_AR5, dev_priv->front_pitch);
  471. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  472. MGA_DMAPAD, 0x00000000,
  473. MGA_PLNWT, 0xffffffff, MGA_DWGCTL, MGA_DWGCTL_COPY);
  474. for (i = 0; i < nbox; i++) {
  475. struct drm_clip_rect *box = &pbox[i];
  476. u32 height = box->y2 - box->y1;
  477. u32 start = box->y1 * dev_priv->front_pitch;
  478. DRM_DEBUG(" from=%d,%d to=%d,%d\n",
  479. box->x1, box->y1, box->x2, box->y2);
  480. DMA_BLOCK(MGA_AR0, start + box->x2 - 1,
  481. MGA_AR3, start + box->x1,
  482. MGA_FXBNDRY, ((box->x2 - 1) << 16) | box->x1,
  483. MGA_YDSTLEN + MGA_EXEC, (box->y1 << 16) | height);
  484. }
  485. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  486. MGA_PLNWT, ctx->plnwt,
  487. MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
  488. ADVANCE_DMA();
  489. FLUSH_DMA();
  490. DRM_DEBUG("... done.\n");
  491. }
  492. static void mga_dma_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  493. {
  494. drm_mga_private_t *dev_priv = dev->dev_private;
  495. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  496. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  497. u32 address = (u32) buf->bus_address;
  498. u32 length = (u32) buf->used;
  499. int i = 0;
  500. DMA_LOCALS;
  501. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  502. if (buf->used) {
  503. buf_priv->dispatched = 1;
  504. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  505. do {
  506. if (i < sarea_priv->nbox) {
  507. mga_emit_clip_rect(dev_priv,
  508. &sarea_priv->boxes[i]);
  509. }
  510. BEGIN_DMA(1);
  511. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  512. MGA_DMAPAD, 0x00000000,
  513. MGA_SECADDRESS, (address |
  514. MGA_DMA_VERTEX),
  515. MGA_SECEND, ((address + length) |
  516. dev_priv->dma_access));
  517. ADVANCE_DMA();
  518. } while (++i < sarea_priv->nbox);
  519. }
  520. if (buf_priv->discard) {
  521. AGE_BUFFER(buf_priv);
  522. buf->pending = 0;
  523. buf->used = 0;
  524. buf_priv->dispatched = 0;
  525. mga_freelist_put(dev, buf);
  526. }
  527. FLUSH_DMA();
  528. }
  529. static void mga_dma_dispatch_indices(struct drm_device *dev, struct drm_buf *buf,
  530. unsigned int start, unsigned int end)
  531. {
  532. drm_mga_private_t *dev_priv = dev->dev_private;
  533. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  534. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  535. u32 address = (u32) buf->bus_address;
  536. int i = 0;
  537. DMA_LOCALS;
  538. DRM_DEBUG("buf=%d start=%d end=%d\n", buf->idx, start, end);
  539. if (start != end) {
  540. buf_priv->dispatched = 1;
  541. MGA_EMIT_STATE(dev_priv, sarea_priv->dirty);
  542. do {
  543. if (i < sarea_priv->nbox) {
  544. mga_emit_clip_rect(dev_priv,
  545. &sarea_priv->boxes[i]);
  546. }
  547. BEGIN_DMA(1);
  548. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  549. MGA_DMAPAD, 0x00000000,
  550. MGA_SETUPADDRESS, address + start,
  551. MGA_SETUPEND, ((address + end) |
  552. dev_priv->dma_access));
  553. ADVANCE_DMA();
  554. } while (++i < sarea_priv->nbox);
  555. }
  556. if (buf_priv->discard) {
  557. AGE_BUFFER(buf_priv);
  558. buf->pending = 0;
  559. buf->used = 0;
  560. buf_priv->dispatched = 0;
  561. mga_freelist_put(dev, buf);
  562. }
  563. FLUSH_DMA();
  564. }
  565. /* This copies a 64 byte aligned agp region to the frambuffer with a
  566. * standard blit, the ioctl needs to do checking.
  567. */
  568. static void mga_dma_dispatch_iload(struct drm_device *dev, struct drm_buf *buf,
  569. unsigned int dstorg, unsigned int length)
  570. {
  571. drm_mga_private_t *dev_priv = dev->dev_private;
  572. drm_mga_buf_priv_t *buf_priv = buf->dev_private;
  573. drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
  574. u32 srcorg =
  575. buf->bus_address | dev_priv->dma_access | MGA_SRCMAP_SYSMEM;
  576. u32 y2;
  577. DMA_LOCALS;
  578. DRM_DEBUG("buf=%d used=%d\n", buf->idx, buf->used);
  579. y2 = length / 64;
  580. BEGIN_DMA(5);
  581. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  582. MGA_DMAPAD, 0x00000000,
  583. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  584. DMA_BLOCK(MGA_DSTORG, dstorg,
  585. MGA_MACCESS, 0x00000000, MGA_SRCORG, srcorg, MGA_AR5, 64);
  586. DMA_BLOCK(MGA_PITCH, 64,
  587. MGA_PLNWT, 0xffffffff,
  588. MGA_DMAPAD, 0x00000000, MGA_DWGCTL, MGA_DWGCTL_COPY);
  589. DMA_BLOCK(MGA_AR0, 63,
  590. MGA_AR3, 0,
  591. MGA_FXBNDRY, (63 << 16) | 0, MGA_YDSTLEN + MGA_EXEC, y2);
  592. DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
  593. MGA_SRCORG, dev_priv->front_offset,
  594. MGA_PITCH, dev_priv->front_pitch, MGA_DWGSYNC, 0x00007000);
  595. ADVANCE_DMA();
  596. AGE_BUFFER(buf_priv);
  597. buf->pending = 0;
  598. buf->used = 0;
  599. buf_priv->dispatched = 0;
  600. mga_freelist_put(dev, buf);
  601. FLUSH_DMA();
  602. }
  603. static void mga_dma_dispatch_blit(struct drm_device *dev, drm_mga_blit_t *blit)
  604. {
  605. drm_mga_private_t *dev_priv = dev->dev_private;
  606. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  607. drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
  608. struct drm_clip_rect *pbox = sarea_priv->boxes;
  609. int nbox = sarea_priv->nbox;
  610. u32 scandir = 0, i;
  611. DMA_LOCALS;
  612. DRM_DEBUG("\n");
  613. BEGIN_DMA(4 + nbox);
  614. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  615. MGA_DMAPAD, 0x00000000,
  616. MGA_DWGSYNC, 0x00007100, MGA_DWGSYNC, 0x00007000);
  617. DMA_BLOCK(MGA_DWGCTL, MGA_DWGCTL_COPY,
  618. MGA_PLNWT, blit->planemask,
  619. MGA_SRCORG, blit->srcorg, MGA_DSTORG, blit->dstorg);
  620. DMA_BLOCK(MGA_SGN, scandir,
  621. MGA_MACCESS, dev_priv->maccess,
  622. MGA_AR5, blit->ydir * blit->src_pitch,
  623. MGA_PITCH, blit->dst_pitch);
  624. for (i = 0; i < nbox; i++) {
  625. int srcx = pbox[i].x1 + blit->delta_sx;
  626. int srcy = pbox[i].y1 + blit->delta_sy;
  627. int dstx = pbox[i].x1 + blit->delta_dx;
  628. int dsty = pbox[i].y1 + blit->delta_dy;
  629. int h = pbox[i].y2 - pbox[i].y1;
  630. int w = pbox[i].x2 - pbox[i].x1 - 1;
  631. int start;
  632. if (blit->ydir == -1)
  633. srcy = blit->height - srcy - 1;
  634. start = srcy * blit->src_pitch + srcx;
  635. DMA_BLOCK(MGA_AR0, start + w,
  636. MGA_AR3, start,
  637. MGA_FXBNDRY, ((dstx + w) << 16) | (dstx & 0xffff),
  638. MGA_YDSTLEN + MGA_EXEC, (dsty << 16) | h);
  639. }
  640. /* Do something to flush AGP?
  641. */
  642. /* Force reset of DWGCTL */
  643. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  644. MGA_PLNWT, ctx->plnwt,
  645. MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
  646. ADVANCE_DMA();
  647. }
  648. /* ================================================================
  649. *
  650. */
  651. static int mga_dma_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  652. {
  653. drm_mga_private_t *dev_priv = dev->dev_private;
  654. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  655. drm_mga_clear_t *clear = data;
  656. LOCK_TEST_WITH_RETURN(dev, file_priv);
  657. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  658. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  659. WRAP_TEST_WITH_RETURN(dev_priv);
  660. mga_dma_dispatch_clear(dev, clear);
  661. /* Make sure we restore the 3D state next time.
  662. */
  663. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  664. return 0;
  665. }
  666. static int mga_dma_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  667. {
  668. drm_mga_private_t *dev_priv = dev->dev_private;
  669. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  670. LOCK_TEST_WITH_RETURN(dev, file_priv);
  671. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  672. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  673. WRAP_TEST_WITH_RETURN(dev_priv);
  674. mga_dma_dispatch_swap(dev);
  675. /* Make sure we restore the 3D state next time.
  676. */
  677. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  678. return 0;
  679. }
  680. static int mga_dma_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  681. {
  682. drm_mga_private_t *dev_priv = dev->dev_private;
  683. struct drm_device_dma *dma = dev->dma;
  684. struct drm_buf *buf;
  685. drm_mga_buf_priv_t *buf_priv;
  686. drm_mga_vertex_t *vertex = data;
  687. LOCK_TEST_WITH_RETURN(dev, file_priv);
  688. if (vertex->idx < 0 || vertex->idx > dma->buf_count)
  689. return -EINVAL;
  690. buf = dma->buflist[vertex->idx];
  691. buf_priv = buf->dev_private;
  692. buf->used = vertex->used;
  693. buf_priv->discard = vertex->discard;
  694. if (!mga_verify_state(dev_priv)) {
  695. if (vertex->discard) {
  696. if (buf_priv->dispatched == 1)
  697. AGE_BUFFER(buf_priv);
  698. buf_priv->dispatched = 0;
  699. mga_freelist_put(dev, buf);
  700. }
  701. return -EINVAL;
  702. }
  703. WRAP_TEST_WITH_RETURN(dev_priv);
  704. mga_dma_dispatch_vertex(dev, buf);
  705. return 0;
  706. }
  707. static int mga_dma_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  708. {
  709. drm_mga_private_t *dev_priv = dev->dev_private;
  710. struct drm_device_dma *dma = dev->dma;
  711. struct drm_buf *buf;
  712. drm_mga_buf_priv_t *buf_priv;
  713. drm_mga_indices_t *indices = data;
  714. LOCK_TEST_WITH_RETURN(dev, file_priv);
  715. if (indices->idx < 0 || indices->idx > dma->buf_count)
  716. return -EINVAL;
  717. buf = dma->buflist[indices->idx];
  718. buf_priv = buf->dev_private;
  719. buf_priv->discard = indices->discard;
  720. if (!mga_verify_state(dev_priv)) {
  721. if (indices->discard) {
  722. if (buf_priv->dispatched == 1)
  723. AGE_BUFFER(buf_priv);
  724. buf_priv->dispatched = 0;
  725. mga_freelist_put(dev, buf);
  726. }
  727. return -EINVAL;
  728. }
  729. WRAP_TEST_WITH_RETURN(dev_priv);
  730. mga_dma_dispatch_indices(dev, buf, indices->start, indices->end);
  731. return 0;
  732. }
  733. static int mga_dma_iload(struct drm_device *dev, void *data, struct drm_file *file_priv)
  734. {
  735. struct drm_device_dma *dma = dev->dma;
  736. drm_mga_private_t *dev_priv = dev->dev_private;
  737. struct drm_buf *buf;
  738. drm_mga_buf_priv_t *buf_priv;
  739. drm_mga_iload_t *iload = data;
  740. DRM_DEBUG("\n");
  741. LOCK_TEST_WITH_RETURN(dev, file_priv);
  742. #if 0
  743. if (mga_do_wait_for_idle(dev_priv) < 0) {
  744. if (MGA_DMA_DEBUG)
  745. DRM_INFO("-EBUSY\n");
  746. return -EBUSY;
  747. }
  748. #endif
  749. if (iload->idx < 0 || iload->idx > dma->buf_count)
  750. return -EINVAL;
  751. buf = dma->buflist[iload->idx];
  752. buf_priv = buf->dev_private;
  753. if (mga_verify_iload(dev_priv, iload->dstorg, iload->length)) {
  754. mga_freelist_put(dev, buf);
  755. return -EINVAL;
  756. }
  757. WRAP_TEST_WITH_RETURN(dev_priv);
  758. mga_dma_dispatch_iload(dev, buf, iload->dstorg, iload->length);
  759. /* Make sure we restore the 3D state next time.
  760. */
  761. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  762. return 0;
  763. }
  764. static int mga_dma_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  765. {
  766. drm_mga_private_t *dev_priv = dev->dev_private;
  767. drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
  768. drm_mga_blit_t *blit = data;
  769. DRM_DEBUG("\n");
  770. LOCK_TEST_WITH_RETURN(dev, file_priv);
  771. if (sarea_priv->nbox > MGA_NR_SAREA_CLIPRECTS)
  772. sarea_priv->nbox = MGA_NR_SAREA_CLIPRECTS;
  773. if (mga_verify_blit(dev_priv, blit->srcorg, blit->dstorg))
  774. return -EINVAL;
  775. WRAP_TEST_WITH_RETURN(dev_priv);
  776. mga_dma_dispatch_blit(dev, blit);
  777. /* Make sure we restore the 3D state next time.
  778. */
  779. dev_priv->sarea_priv->dirty |= MGA_UPLOAD_CONTEXT;
  780. return 0;
  781. }
  782. static int mga_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  783. {
  784. drm_mga_private_t *dev_priv = dev->dev_private;
  785. drm_mga_getparam_t *param = data;
  786. int value;
  787. if (!dev_priv) {
  788. DRM_ERROR("called with no initialization\n");
  789. return -EINVAL;
  790. }
  791. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  792. switch (param->param) {
  793. case MGA_PARAM_IRQ_NR:
  794. value = dev->pdev->irq;
  795. break;
  796. case MGA_PARAM_CARD_TYPE:
  797. value = dev_priv->chipset;
  798. break;
  799. default:
  800. return -EINVAL;
  801. }
  802. if (copy_to_user(param->value, &value, sizeof(int))) {
  803. DRM_ERROR("copy_to_user\n");
  804. return -EFAULT;
  805. }
  806. return 0;
  807. }
  808. static int mga_set_fence(struct drm_device *dev, void *data, struct drm_file *file_priv)
  809. {
  810. drm_mga_private_t *dev_priv = dev->dev_private;
  811. u32 *fence = data;
  812. DMA_LOCALS;
  813. if (!dev_priv) {
  814. DRM_ERROR("called with no initialization\n");
  815. return -EINVAL;
  816. }
  817. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  818. /* I would normal do this assignment in the declaration of fence,
  819. * but dev_priv may be NULL.
  820. */
  821. *fence = dev_priv->next_fence_to_post;
  822. dev_priv->next_fence_to_post++;
  823. BEGIN_DMA(1);
  824. DMA_BLOCK(MGA_DMAPAD, 0x00000000,
  825. MGA_DMAPAD, 0x00000000,
  826. MGA_DMAPAD, 0x00000000, MGA_SOFTRAP, 0x00000000);
  827. ADVANCE_DMA();
  828. return 0;
  829. }
  830. static int mga_wait_fence(struct drm_device *dev, void *data, struct drm_file *
  831. file_priv)
  832. {
  833. drm_mga_private_t *dev_priv = dev->dev_private;
  834. u32 *fence = data;
  835. if (!dev_priv) {
  836. DRM_ERROR("called with no initialization\n");
  837. return -EINVAL;
  838. }
  839. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  840. mga_driver_fence_wait(dev, fence);
  841. return 0;
  842. }
  843. const struct drm_ioctl_desc mga_ioctls[] = {
  844. DRM_IOCTL_DEF_DRV(MGA_INIT, mga_dma_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  845. DRM_IOCTL_DEF_DRV(MGA_FLUSH, mga_dma_flush, DRM_AUTH),
  846. DRM_IOCTL_DEF_DRV(MGA_RESET, mga_dma_reset, DRM_AUTH),
  847. DRM_IOCTL_DEF_DRV(MGA_SWAP, mga_dma_swap, DRM_AUTH),
  848. DRM_IOCTL_DEF_DRV(MGA_CLEAR, mga_dma_clear, DRM_AUTH),
  849. DRM_IOCTL_DEF_DRV(MGA_VERTEX, mga_dma_vertex, DRM_AUTH),
  850. DRM_IOCTL_DEF_DRV(MGA_INDICES, mga_dma_indices, DRM_AUTH),
  851. DRM_IOCTL_DEF_DRV(MGA_ILOAD, mga_dma_iload, DRM_AUTH),
  852. DRM_IOCTL_DEF_DRV(MGA_BLIT, mga_dma_blit, DRM_AUTH),
  853. DRM_IOCTL_DEF_DRV(MGA_GETPARAM, mga_getparam, DRM_AUTH),
  854. DRM_IOCTL_DEF_DRV(MGA_SET_FENCE, mga_set_fence, DRM_AUTH),
  855. DRM_IOCTL_DEF_DRV(MGA_WAIT_FENCE, mga_wait_fence, DRM_AUTH),
  856. DRM_IOCTL_DEF_DRV(MGA_DMA_BOOTSTRAP, mga_dma_bootstrap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  857. };
  858. int mga_max_ioctl = ARRAY_SIZE(mga_ioctls);