r128_state.c 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. */
  4. /*
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  25. * DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Gareth Hughes <gareth@valinux.com>
  29. */
  30. #include <drm/drmP.h>
  31. #include <drm/r128_drm.h>
  32. #include "r128_drv.h"
  33. /* ================================================================
  34. * CCE hardware state programming functions
  35. */
  36. static void r128_emit_clip_rects(drm_r128_private_t *dev_priv,
  37. struct drm_clip_rect *boxes, int count)
  38. {
  39. u32 aux_sc_cntl = 0x00000000;
  40. RING_LOCALS;
  41. DRM_DEBUG("\n");
  42. BEGIN_RING((count < 3 ? count : 3) * 5 + 2);
  43. if (count >= 1) {
  44. OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3));
  45. OUT_RING(boxes[0].x1);
  46. OUT_RING(boxes[0].x2 - 1);
  47. OUT_RING(boxes[0].y1);
  48. OUT_RING(boxes[0].y2 - 1);
  49. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  50. }
  51. if (count >= 2) {
  52. OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3));
  53. OUT_RING(boxes[1].x1);
  54. OUT_RING(boxes[1].x2 - 1);
  55. OUT_RING(boxes[1].y1);
  56. OUT_RING(boxes[1].y2 - 1);
  57. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  58. }
  59. if (count >= 3) {
  60. OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3));
  61. OUT_RING(boxes[2].x1);
  62. OUT_RING(boxes[2].x2 - 1);
  63. OUT_RING(boxes[2].y1);
  64. OUT_RING(boxes[2].y2 - 1);
  65. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  66. }
  67. OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0));
  68. OUT_RING(aux_sc_cntl);
  69. ADVANCE_RING();
  70. }
  71. static __inline__ void r128_emit_core(drm_r128_private_t *dev_priv)
  72. {
  73. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  74. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  75. RING_LOCALS;
  76. DRM_DEBUG("\n");
  77. BEGIN_RING(2);
  78. OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0));
  79. OUT_RING(ctx->scale_3d_cntl);
  80. ADVANCE_RING();
  81. }
  82. static __inline__ void r128_emit_context(drm_r128_private_t *dev_priv)
  83. {
  84. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  85. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  86. RING_LOCALS;
  87. DRM_DEBUG("\n");
  88. BEGIN_RING(13);
  89. OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11));
  90. OUT_RING(ctx->dst_pitch_offset_c);
  91. OUT_RING(ctx->dp_gui_master_cntl_c);
  92. OUT_RING(ctx->sc_top_left_c);
  93. OUT_RING(ctx->sc_bottom_right_c);
  94. OUT_RING(ctx->z_offset_c);
  95. OUT_RING(ctx->z_pitch_c);
  96. OUT_RING(ctx->z_sten_cntl_c);
  97. OUT_RING(ctx->tex_cntl_c);
  98. OUT_RING(ctx->misc_3d_state_cntl_reg);
  99. OUT_RING(ctx->texture_clr_cmp_clr_c);
  100. OUT_RING(ctx->texture_clr_cmp_msk_c);
  101. OUT_RING(ctx->fog_color_c);
  102. ADVANCE_RING();
  103. }
  104. static __inline__ void r128_emit_setup(drm_r128_private_t *dev_priv)
  105. {
  106. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  107. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  108. RING_LOCALS;
  109. DRM_DEBUG("\n");
  110. BEGIN_RING(3);
  111. OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP));
  112. OUT_RING(ctx->setup_cntl);
  113. OUT_RING(ctx->pm4_vc_fpu_setup);
  114. ADVANCE_RING();
  115. }
  116. static __inline__ void r128_emit_masks(drm_r128_private_t *dev_priv)
  117. {
  118. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  119. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  120. RING_LOCALS;
  121. DRM_DEBUG("\n");
  122. BEGIN_RING(5);
  123. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  124. OUT_RING(ctx->dp_write_mask);
  125. OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1));
  126. OUT_RING(ctx->sten_ref_mask_c);
  127. OUT_RING(ctx->plane_3d_mask_c);
  128. ADVANCE_RING();
  129. }
  130. static __inline__ void r128_emit_window(drm_r128_private_t *dev_priv)
  131. {
  132. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  133. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  134. RING_LOCALS;
  135. DRM_DEBUG("\n");
  136. BEGIN_RING(2);
  137. OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0));
  138. OUT_RING(ctx->window_xy_offset);
  139. ADVANCE_RING();
  140. }
  141. static __inline__ void r128_emit_tex0(drm_r128_private_t *dev_priv)
  142. {
  143. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  144. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  145. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  146. int i;
  147. RING_LOCALS;
  148. DRM_DEBUG("\n");
  149. BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS);
  150. OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C,
  151. 2 + R128_MAX_TEXTURE_LEVELS));
  152. OUT_RING(tex->tex_cntl);
  153. OUT_RING(tex->tex_combine_cntl);
  154. OUT_RING(ctx->tex_size_pitch_c);
  155. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  156. OUT_RING(tex->tex_offset[i]);
  157. OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1));
  158. OUT_RING(ctx->constant_color_c);
  159. OUT_RING(tex->tex_border_color);
  160. ADVANCE_RING();
  161. }
  162. static __inline__ void r128_emit_tex1(drm_r128_private_t *dev_priv)
  163. {
  164. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  165. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  166. int i;
  167. RING_LOCALS;
  168. DRM_DEBUG("\n");
  169. BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS);
  170. OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS));
  171. OUT_RING(tex->tex_cntl);
  172. OUT_RING(tex->tex_combine_cntl);
  173. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  174. OUT_RING(tex->tex_offset[i]);
  175. OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0));
  176. OUT_RING(tex->tex_border_color);
  177. ADVANCE_RING();
  178. }
  179. static void r128_emit_state(drm_r128_private_t *dev_priv)
  180. {
  181. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  182. unsigned int dirty = sarea_priv->dirty;
  183. DRM_DEBUG("dirty=0x%08x\n", dirty);
  184. if (dirty & R128_UPLOAD_CORE) {
  185. r128_emit_core(dev_priv);
  186. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  187. }
  188. if (dirty & R128_UPLOAD_CONTEXT) {
  189. r128_emit_context(dev_priv);
  190. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  191. }
  192. if (dirty & R128_UPLOAD_SETUP) {
  193. r128_emit_setup(dev_priv);
  194. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  195. }
  196. if (dirty & R128_UPLOAD_MASKS) {
  197. r128_emit_masks(dev_priv);
  198. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  199. }
  200. if (dirty & R128_UPLOAD_WINDOW) {
  201. r128_emit_window(dev_priv);
  202. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  203. }
  204. if (dirty & R128_UPLOAD_TEX0) {
  205. r128_emit_tex0(dev_priv);
  206. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  207. }
  208. if (dirty & R128_UPLOAD_TEX1) {
  209. r128_emit_tex1(dev_priv);
  210. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  211. }
  212. /* Turn off the texture cache flushing */
  213. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  214. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  215. }
  216. #if R128_PERFORMANCE_BOXES
  217. /* ================================================================
  218. * Performance monitoring functions
  219. */
  220. static void r128_clear_box(drm_r128_private_t *dev_priv,
  221. int x, int y, int w, int h, int r, int g, int b)
  222. {
  223. u32 pitch, offset;
  224. u32 fb_bpp, color;
  225. RING_LOCALS;
  226. switch (dev_priv->fb_bpp) {
  227. case 16:
  228. fb_bpp = R128_GMC_DST_16BPP;
  229. color = (((r & 0xf8) << 8) |
  230. ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
  231. break;
  232. case 24:
  233. fb_bpp = R128_GMC_DST_24BPP;
  234. color = ((r << 16) | (g << 8) | b);
  235. break;
  236. case 32:
  237. fb_bpp = R128_GMC_DST_32BPP;
  238. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  239. break;
  240. default:
  241. return;
  242. }
  243. offset = dev_priv->back_offset;
  244. pitch = dev_priv->back_pitch >> 3;
  245. BEGIN_RING(6);
  246. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  247. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  248. R128_GMC_BRUSH_SOLID_COLOR |
  249. fb_bpp |
  250. R128_GMC_SRC_DATATYPE_COLOR |
  251. R128_ROP3_P |
  252. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS);
  253. OUT_RING((pitch << 21) | (offset >> 5));
  254. OUT_RING(color);
  255. OUT_RING((x << 16) | y);
  256. OUT_RING((w << 16) | h);
  257. ADVANCE_RING();
  258. }
  259. static void r128_cce_performance_boxes(drm_r128_private_t *dev_priv)
  260. {
  261. if (atomic_read(&dev_priv->idle_count) == 0)
  262. r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
  263. else
  264. atomic_set(&dev_priv->idle_count, 0);
  265. }
  266. #endif
  267. /* ================================================================
  268. * CCE command dispatch functions
  269. */
  270. static void r128_print_dirty(const char *msg, unsigned int flags)
  271. {
  272. DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  273. msg,
  274. flags,
  275. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  276. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  277. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  278. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  279. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  280. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  281. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  282. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  283. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "");
  284. }
  285. static void r128_cce_dispatch_clear(struct drm_device *dev,
  286. drm_r128_clear_t *clear)
  287. {
  288. drm_r128_private_t *dev_priv = dev->dev_private;
  289. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  290. int nbox = sarea_priv->nbox;
  291. struct drm_clip_rect *pbox = sarea_priv->boxes;
  292. unsigned int flags = clear->flags;
  293. int i;
  294. RING_LOCALS;
  295. DRM_DEBUG("\n");
  296. if (dev_priv->page_flipping && dev_priv->current_page == 1) {
  297. unsigned int tmp = flags;
  298. flags &= ~(R128_FRONT | R128_BACK);
  299. if (tmp & R128_FRONT)
  300. flags |= R128_BACK;
  301. if (tmp & R128_BACK)
  302. flags |= R128_FRONT;
  303. }
  304. for (i = 0; i < nbox; i++) {
  305. int x = pbox[i].x1;
  306. int y = pbox[i].y1;
  307. int w = pbox[i].x2 - x;
  308. int h = pbox[i].y2 - y;
  309. DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
  310. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  311. pbox[i].y2, flags);
  312. if (flags & (R128_FRONT | R128_BACK)) {
  313. BEGIN_RING(2);
  314. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  315. OUT_RING(clear->color_mask);
  316. ADVANCE_RING();
  317. }
  318. if (flags & R128_FRONT) {
  319. BEGIN_RING(6);
  320. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  321. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  322. R128_GMC_BRUSH_SOLID_COLOR |
  323. (dev_priv->color_fmt << 8) |
  324. R128_GMC_SRC_DATATYPE_COLOR |
  325. R128_ROP3_P |
  326. R128_GMC_CLR_CMP_CNTL_DIS |
  327. R128_GMC_AUX_CLIP_DIS);
  328. OUT_RING(dev_priv->front_pitch_offset_c);
  329. OUT_RING(clear->clear_color);
  330. OUT_RING((x << 16) | y);
  331. OUT_RING((w << 16) | h);
  332. ADVANCE_RING();
  333. }
  334. if (flags & R128_BACK) {
  335. BEGIN_RING(6);
  336. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  337. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  338. R128_GMC_BRUSH_SOLID_COLOR |
  339. (dev_priv->color_fmt << 8) |
  340. R128_GMC_SRC_DATATYPE_COLOR |
  341. R128_ROP3_P |
  342. R128_GMC_CLR_CMP_CNTL_DIS |
  343. R128_GMC_AUX_CLIP_DIS);
  344. OUT_RING(dev_priv->back_pitch_offset_c);
  345. OUT_RING(clear->clear_color);
  346. OUT_RING((x << 16) | y);
  347. OUT_RING((w << 16) | h);
  348. ADVANCE_RING();
  349. }
  350. if (flags & R128_DEPTH) {
  351. BEGIN_RING(6);
  352. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  353. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  354. R128_GMC_BRUSH_SOLID_COLOR |
  355. (dev_priv->depth_fmt << 8) |
  356. R128_GMC_SRC_DATATYPE_COLOR |
  357. R128_ROP3_P |
  358. R128_GMC_CLR_CMP_CNTL_DIS |
  359. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  360. OUT_RING(dev_priv->depth_pitch_offset_c);
  361. OUT_RING(clear->clear_depth);
  362. OUT_RING((x << 16) | y);
  363. OUT_RING((w << 16) | h);
  364. ADVANCE_RING();
  365. }
  366. }
  367. }
  368. static void r128_cce_dispatch_swap(struct drm_device *dev)
  369. {
  370. drm_r128_private_t *dev_priv = dev->dev_private;
  371. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  372. int nbox = sarea_priv->nbox;
  373. struct drm_clip_rect *pbox = sarea_priv->boxes;
  374. int i;
  375. RING_LOCALS;
  376. DRM_DEBUG("\n");
  377. #if R128_PERFORMANCE_BOXES
  378. /* Do some trivial performance monitoring...
  379. */
  380. r128_cce_performance_boxes(dev_priv);
  381. #endif
  382. for (i = 0; i < nbox; i++) {
  383. int x = pbox[i].x1;
  384. int y = pbox[i].y1;
  385. int w = pbox[i].x2 - x;
  386. int h = pbox[i].y2 - y;
  387. BEGIN_RING(7);
  388. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  389. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  390. R128_GMC_DST_PITCH_OFFSET_CNTL |
  391. R128_GMC_BRUSH_NONE |
  392. (dev_priv->color_fmt << 8) |
  393. R128_GMC_SRC_DATATYPE_COLOR |
  394. R128_ROP3_S |
  395. R128_DP_SRC_SOURCE_MEMORY |
  396. R128_GMC_CLR_CMP_CNTL_DIS |
  397. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  398. /* Make this work even if front & back are flipped:
  399. */
  400. if (dev_priv->current_page == 0) {
  401. OUT_RING(dev_priv->back_pitch_offset_c);
  402. OUT_RING(dev_priv->front_pitch_offset_c);
  403. } else {
  404. OUT_RING(dev_priv->front_pitch_offset_c);
  405. OUT_RING(dev_priv->back_pitch_offset_c);
  406. }
  407. OUT_RING((x << 16) | y);
  408. OUT_RING((x << 16) | y);
  409. OUT_RING((w << 16) | h);
  410. ADVANCE_RING();
  411. }
  412. /* Increment the frame counter. The client-side 3D driver must
  413. * throttle the framerate by waiting for this value before
  414. * performing the swapbuffer ioctl.
  415. */
  416. dev_priv->sarea_priv->last_frame++;
  417. BEGIN_RING(2);
  418. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  419. OUT_RING(dev_priv->sarea_priv->last_frame);
  420. ADVANCE_RING();
  421. }
  422. static void r128_cce_dispatch_flip(struct drm_device *dev)
  423. {
  424. drm_r128_private_t *dev_priv = dev->dev_private;
  425. RING_LOCALS;
  426. DRM_DEBUG("page=%d pfCurrentPage=%d\n",
  427. dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
  428. #if R128_PERFORMANCE_BOXES
  429. /* Do some trivial performance monitoring...
  430. */
  431. r128_cce_performance_boxes(dev_priv);
  432. #endif
  433. BEGIN_RING(4);
  434. R128_WAIT_UNTIL_PAGE_FLIPPED();
  435. OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0));
  436. if (dev_priv->current_page == 0)
  437. OUT_RING(dev_priv->back_offset);
  438. else
  439. OUT_RING(dev_priv->front_offset);
  440. ADVANCE_RING();
  441. /* Increment the frame counter. The client-side 3D driver must
  442. * throttle the framerate by waiting for this value before
  443. * performing the swapbuffer ioctl.
  444. */
  445. dev_priv->sarea_priv->last_frame++;
  446. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  447. 1 - dev_priv->current_page;
  448. BEGIN_RING(2);
  449. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  450. OUT_RING(dev_priv->sarea_priv->last_frame);
  451. ADVANCE_RING();
  452. }
  453. static void r128_cce_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  454. {
  455. drm_r128_private_t *dev_priv = dev->dev_private;
  456. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  457. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  458. int format = sarea_priv->vc_format;
  459. int offset = buf->bus_address;
  460. int size = buf->used;
  461. int prim = buf_priv->prim;
  462. int i = 0;
  463. RING_LOCALS;
  464. DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox);
  465. if (0)
  466. r128_print_dirty("dispatch_vertex", sarea_priv->dirty);
  467. if (buf->used) {
  468. buf_priv->dispatched = 1;
  469. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  470. r128_emit_state(dev_priv);
  471. do {
  472. /* Emit the next set of up to three cliprects */
  473. if (i < sarea_priv->nbox) {
  474. r128_emit_clip_rects(dev_priv,
  475. &sarea_priv->boxes[i],
  476. sarea_priv->nbox - i);
  477. }
  478. /* Emit the vertex buffer rendering commands */
  479. BEGIN_RING(5);
  480. OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3));
  481. OUT_RING(offset);
  482. OUT_RING(size);
  483. OUT_RING(format);
  484. OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  485. (size << R128_CCE_VC_CNTL_NUM_SHIFT));
  486. ADVANCE_RING();
  487. i += 3;
  488. } while (i < sarea_priv->nbox);
  489. }
  490. if (buf_priv->discard) {
  491. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  492. /* Emit the vertex buffer age */
  493. BEGIN_RING(2);
  494. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  495. OUT_RING(buf_priv->age);
  496. ADVANCE_RING();
  497. buf->pending = 1;
  498. buf->used = 0;
  499. /* FIXME: Check dispatched field */
  500. buf_priv->dispatched = 0;
  501. }
  502. dev_priv->sarea_priv->last_dispatch++;
  503. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  504. sarea_priv->nbox = 0;
  505. }
  506. static void r128_cce_dispatch_indirect(struct drm_device *dev,
  507. struct drm_buf *buf, int start, int end)
  508. {
  509. drm_r128_private_t *dev_priv = dev->dev_private;
  510. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  511. RING_LOCALS;
  512. DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
  513. if (start != end) {
  514. int offset = buf->bus_address + start;
  515. int dwords = (end - start + 3) / sizeof(u32);
  516. /* Indirect buffer data must be an even number of
  517. * dwords, so if we've been given an odd number we must
  518. * pad the data with a Type-2 CCE packet.
  519. */
  520. if (dwords & 1) {
  521. u32 *data = (u32 *)
  522. ((char *)dev->agp_buffer_map->handle
  523. + buf->offset + start);
  524. data[dwords++] = cpu_to_le32(R128_CCE_PACKET2);
  525. }
  526. buf_priv->dispatched = 1;
  527. /* Fire off the indirect buffer */
  528. BEGIN_RING(3);
  529. OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1));
  530. OUT_RING(offset);
  531. OUT_RING(dwords);
  532. ADVANCE_RING();
  533. }
  534. if (buf_priv->discard) {
  535. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  536. /* Emit the indirect buffer age */
  537. BEGIN_RING(2);
  538. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  539. OUT_RING(buf_priv->age);
  540. ADVANCE_RING();
  541. buf->pending = 1;
  542. buf->used = 0;
  543. /* FIXME: Check dispatched field */
  544. buf_priv->dispatched = 0;
  545. }
  546. dev_priv->sarea_priv->last_dispatch++;
  547. }
  548. static void r128_cce_dispatch_indices(struct drm_device *dev,
  549. struct drm_buf *buf,
  550. int start, int end, int count)
  551. {
  552. drm_r128_private_t *dev_priv = dev->dev_private;
  553. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  554. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  555. int format = sarea_priv->vc_format;
  556. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  557. int prim = buf_priv->prim;
  558. u32 *data;
  559. int dwords;
  560. int i = 0;
  561. RING_LOCALS;
  562. DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count);
  563. if (0)
  564. r128_print_dirty("dispatch_indices", sarea_priv->dirty);
  565. if (start != end) {
  566. buf_priv->dispatched = 1;
  567. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  568. r128_emit_state(dev_priv);
  569. dwords = (end - start + 3) / sizeof(u32);
  570. data = (u32 *) ((char *)dev->agp_buffer_map->handle
  571. + buf->offset + start);
  572. data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM,
  573. dwords - 2));
  574. data[1] = cpu_to_le32(offset);
  575. data[2] = cpu_to_le32(R128_MAX_VB_VERTS);
  576. data[3] = cpu_to_le32(format);
  577. data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  578. (count << 16)));
  579. if (count & 0x1) {
  580. #ifdef __LITTLE_ENDIAN
  581. data[dwords - 1] &= 0x0000ffff;
  582. #else
  583. data[dwords - 1] &= 0xffff0000;
  584. #endif
  585. }
  586. do {
  587. /* Emit the next set of up to three cliprects */
  588. if (i < sarea_priv->nbox) {
  589. r128_emit_clip_rects(dev_priv,
  590. &sarea_priv->boxes[i],
  591. sarea_priv->nbox - i);
  592. }
  593. r128_cce_dispatch_indirect(dev, buf, start, end);
  594. i += 3;
  595. } while (i < sarea_priv->nbox);
  596. }
  597. if (buf_priv->discard) {
  598. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  599. /* Emit the vertex buffer age */
  600. BEGIN_RING(2);
  601. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  602. OUT_RING(buf_priv->age);
  603. ADVANCE_RING();
  604. buf->pending = 1;
  605. /* FIXME: Check dispatched field */
  606. buf_priv->dispatched = 0;
  607. }
  608. dev_priv->sarea_priv->last_dispatch++;
  609. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  610. sarea_priv->nbox = 0;
  611. }
  612. static int r128_cce_dispatch_blit(struct drm_device *dev,
  613. struct drm_file *file_priv,
  614. drm_r128_blit_t *blit)
  615. {
  616. drm_r128_private_t *dev_priv = dev->dev_private;
  617. struct drm_device_dma *dma = dev->dma;
  618. struct drm_buf *buf;
  619. drm_r128_buf_priv_t *buf_priv;
  620. u32 *data;
  621. int dword_shift, dwords;
  622. RING_LOCALS;
  623. DRM_DEBUG("\n");
  624. /* The compiler won't optimize away a division by a variable,
  625. * even if the only legal values are powers of two. Thus, we'll
  626. * use a shift instead.
  627. */
  628. switch (blit->format) {
  629. case R128_DATATYPE_ARGB8888:
  630. dword_shift = 0;
  631. break;
  632. case R128_DATATYPE_ARGB1555:
  633. case R128_DATATYPE_RGB565:
  634. case R128_DATATYPE_ARGB4444:
  635. case R128_DATATYPE_YVYU422:
  636. case R128_DATATYPE_VYUY422:
  637. dword_shift = 1;
  638. break;
  639. case R128_DATATYPE_CI8:
  640. case R128_DATATYPE_RGB8:
  641. dword_shift = 2;
  642. break;
  643. default:
  644. DRM_ERROR("invalid blit format %d\n", blit->format);
  645. return -EINVAL;
  646. }
  647. /* Flush the pixel cache, and mark the contents as Read Invalid.
  648. * This ensures no pixel data gets mixed up with the texture
  649. * data from the host data blit, otherwise part of the texture
  650. * image may be corrupted.
  651. */
  652. BEGIN_RING(2);
  653. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  654. OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI);
  655. ADVANCE_RING();
  656. /* Dispatch the indirect buffer.
  657. */
  658. buf = dma->buflist[blit->idx];
  659. buf_priv = buf->dev_private;
  660. if (buf->file_priv != file_priv) {
  661. DRM_ERROR("process %d using buffer owned by %p\n",
  662. DRM_CURRENTPID, buf->file_priv);
  663. return -EINVAL;
  664. }
  665. if (buf->pending) {
  666. DRM_ERROR("sending pending buffer %d\n", blit->idx);
  667. return -EINVAL;
  668. }
  669. buf_priv->discard = 1;
  670. dwords = (blit->width * blit->height) >> dword_shift;
  671. data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
  672. data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6));
  673. data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL |
  674. R128_GMC_BRUSH_NONE |
  675. (blit->format << 8) |
  676. R128_GMC_SRC_DATATYPE_COLOR |
  677. R128_ROP3_S |
  678. R128_DP_SRC_SOURCE_HOST_DATA |
  679. R128_GMC_CLR_CMP_CNTL_DIS |
  680. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS));
  681. data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5));
  682. data[3] = cpu_to_le32(0xffffffff);
  683. data[4] = cpu_to_le32(0xffffffff);
  684. data[5] = cpu_to_le32((blit->y << 16) | blit->x);
  685. data[6] = cpu_to_le32((blit->height << 16) | blit->width);
  686. data[7] = cpu_to_le32(dwords);
  687. buf->used = (dwords + 8) * sizeof(u32);
  688. r128_cce_dispatch_indirect(dev, buf, 0, buf->used);
  689. /* Flush the pixel cache after the blit completes. This ensures
  690. * the texture data is written out to memory before rendering
  691. * continues.
  692. */
  693. BEGIN_RING(2);
  694. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  695. OUT_RING(R128_PC_FLUSH_GUI);
  696. ADVANCE_RING();
  697. return 0;
  698. }
  699. /* ================================================================
  700. * Tiled depth buffer management
  701. *
  702. * FIXME: These should all set the destination write mask for when we
  703. * have hardware stencil support.
  704. */
  705. static int r128_cce_dispatch_write_span(struct drm_device *dev,
  706. drm_r128_depth_t *depth)
  707. {
  708. drm_r128_private_t *dev_priv = dev->dev_private;
  709. int count, x, y;
  710. u32 *buffer;
  711. u8 *mask;
  712. int i, buffer_size, mask_size;
  713. RING_LOCALS;
  714. DRM_DEBUG("\n");
  715. count = depth->n;
  716. if (count > 4096 || count <= 0)
  717. return -EMSGSIZE;
  718. if (copy_from_user(&x, depth->x, sizeof(x)))
  719. return -EFAULT;
  720. if (copy_from_user(&y, depth->y, sizeof(y)))
  721. return -EFAULT;
  722. buffer_size = depth->n * sizeof(u32);
  723. buffer = memdup_user(depth->buffer, buffer_size);
  724. if (IS_ERR(buffer))
  725. return PTR_ERR(buffer);
  726. mask_size = depth->n;
  727. if (depth->mask) {
  728. mask = memdup_user(depth->mask, mask_size);
  729. if (IS_ERR(mask)) {
  730. kfree(buffer);
  731. return PTR_ERR(mask);
  732. }
  733. for (i = 0; i < count; i++, x++) {
  734. if (mask[i]) {
  735. BEGIN_RING(6);
  736. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  737. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  738. R128_GMC_BRUSH_SOLID_COLOR |
  739. (dev_priv->depth_fmt << 8) |
  740. R128_GMC_SRC_DATATYPE_COLOR |
  741. R128_ROP3_P |
  742. R128_GMC_CLR_CMP_CNTL_DIS |
  743. R128_GMC_WR_MSK_DIS);
  744. OUT_RING(dev_priv->depth_pitch_offset_c);
  745. OUT_RING(buffer[i]);
  746. OUT_RING((x << 16) | y);
  747. OUT_RING((1 << 16) | 1);
  748. ADVANCE_RING();
  749. }
  750. }
  751. kfree(mask);
  752. } else {
  753. for (i = 0; i < count; i++, x++) {
  754. BEGIN_RING(6);
  755. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  756. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  757. R128_GMC_BRUSH_SOLID_COLOR |
  758. (dev_priv->depth_fmt << 8) |
  759. R128_GMC_SRC_DATATYPE_COLOR |
  760. R128_ROP3_P |
  761. R128_GMC_CLR_CMP_CNTL_DIS |
  762. R128_GMC_WR_MSK_DIS);
  763. OUT_RING(dev_priv->depth_pitch_offset_c);
  764. OUT_RING(buffer[i]);
  765. OUT_RING((x << 16) | y);
  766. OUT_RING((1 << 16) | 1);
  767. ADVANCE_RING();
  768. }
  769. }
  770. kfree(buffer);
  771. return 0;
  772. }
  773. static int r128_cce_dispatch_write_pixels(struct drm_device *dev,
  774. drm_r128_depth_t *depth)
  775. {
  776. drm_r128_private_t *dev_priv = dev->dev_private;
  777. int count, *x, *y;
  778. u32 *buffer;
  779. u8 *mask;
  780. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  781. RING_LOCALS;
  782. DRM_DEBUG("\n");
  783. count = depth->n;
  784. if (count > 4096 || count <= 0)
  785. return -EMSGSIZE;
  786. xbuf_size = count * sizeof(*x);
  787. ybuf_size = count * sizeof(*y);
  788. x = kmalloc(xbuf_size, GFP_KERNEL);
  789. if (x == NULL)
  790. return -ENOMEM;
  791. y = kmalloc(ybuf_size, GFP_KERNEL);
  792. if (y == NULL) {
  793. kfree(x);
  794. return -ENOMEM;
  795. }
  796. if (copy_from_user(x, depth->x, xbuf_size)) {
  797. kfree(x);
  798. kfree(y);
  799. return -EFAULT;
  800. }
  801. if (copy_from_user(y, depth->y, xbuf_size)) {
  802. kfree(x);
  803. kfree(y);
  804. return -EFAULT;
  805. }
  806. buffer_size = depth->n * sizeof(u32);
  807. buffer = memdup_user(depth->buffer, buffer_size);
  808. if (IS_ERR(buffer)) {
  809. kfree(x);
  810. kfree(y);
  811. return PTR_ERR(buffer);
  812. }
  813. if (depth->mask) {
  814. mask_size = depth->n;
  815. mask = memdup_user(depth->mask, mask_size);
  816. if (IS_ERR(mask)) {
  817. kfree(x);
  818. kfree(y);
  819. kfree(buffer);
  820. return PTR_ERR(mask);
  821. }
  822. for (i = 0; i < count; i++) {
  823. if (mask[i]) {
  824. BEGIN_RING(6);
  825. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  826. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  827. R128_GMC_BRUSH_SOLID_COLOR |
  828. (dev_priv->depth_fmt << 8) |
  829. R128_GMC_SRC_DATATYPE_COLOR |
  830. R128_ROP3_P |
  831. R128_GMC_CLR_CMP_CNTL_DIS |
  832. R128_GMC_WR_MSK_DIS);
  833. OUT_RING(dev_priv->depth_pitch_offset_c);
  834. OUT_RING(buffer[i]);
  835. OUT_RING((x[i] << 16) | y[i]);
  836. OUT_RING((1 << 16) | 1);
  837. ADVANCE_RING();
  838. }
  839. }
  840. kfree(mask);
  841. } else {
  842. for (i = 0; i < count; i++) {
  843. BEGIN_RING(6);
  844. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  845. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  846. R128_GMC_BRUSH_SOLID_COLOR |
  847. (dev_priv->depth_fmt << 8) |
  848. R128_GMC_SRC_DATATYPE_COLOR |
  849. R128_ROP3_P |
  850. R128_GMC_CLR_CMP_CNTL_DIS |
  851. R128_GMC_WR_MSK_DIS);
  852. OUT_RING(dev_priv->depth_pitch_offset_c);
  853. OUT_RING(buffer[i]);
  854. OUT_RING((x[i] << 16) | y[i]);
  855. OUT_RING((1 << 16) | 1);
  856. ADVANCE_RING();
  857. }
  858. }
  859. kfree(x);
  860. kfree(y);
  861. kfree(buffer);
  862. return 0;
  863. }
  864. static int r128_cce_dispatch_read_span(struct drm_device *dev,
  865. drm_r128_depth_t *depth)
  866. {
  867. drm_r128_private_t *dev_priv = dev->dev_private;
  868. int count, x, y;
  869. RING_LOCALS;
  870. DRM_DEBUG("\n");
  871. count = depth->n;
  872. if (count > 4096 || count <= 0)
  873. return -EMSGSIZE;
  874. if (copy_from_user(&x, depth->x, sizeof(x)))
  875. return -EFAULT;
  876. if (copy_from_user(&y, depth->y, sizeof(y)))
  877. return -EFAULT;
  878. BEGIN_RING(7);
  879. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  880. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  881. R128_GMC_DST_PITCH_OFFSET_CNTL |
  882. R128_GMC_BRUSH_NONE |
  883. (dev_priv->depth_fmt << 8) |
  884. R128_GMC_SRC_DATATYPE_COLOR |
  885. R128_ROP3_S |
  886. R128_DP_SRC_SOURCE_MEMORY |
  887. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  888. OUT_RING(dev_priv->depth_pitch_offset_c);
  889. OUT_RING(dev_priv->span_pitch_offset_c);
  890. OUT_RING((x << 16) | y);
  891. OUT_RING((0 << 16) | 0);
  892. OUT_RING((count << 16) | 1);
  893. ADVANCE_RING();
  894. return 0;
  895. }
  896. static int r128_cce_dispatch_read_pixels(struct drm_device *dev,
  897. drm_r128_depth_t *depth)
  898. {
  899. drm_r128_private_t *dev_priv = dev->dev_private;
  900. int count, *x, *y;
  901. int i, xbuf_size, ybuf_size;
  902. RING_LOCALS;
  903. DRM_DEBUG("\n");
  904. count = depth->n;
  905. if (count > 4096 || count <= 0)
  906. return -EMSGSIZE;
  907. if (count > dev_priv->depth_pitch)
  908. count = dev_priv->depth_pitch;
  909. xbuf_size = count * sizeof(*x);
  910. ybuf_size = count * sizeof(*y);
  911. x = kmalloc(xbuf_size, GFP_KERNEL);
  912. if (x == NULL)
  913. return -ENOMEM;
  914. y = kmalloc(ybuf_size, GFP_KERNEL);
  915. if (y == NULL) {
  916. kfree(x);
  917. return -ENOMEM;
  918. }
  919. if (copy_from_user(x, depth->x, xbuf_size)) {
  920. kfree(x);
  921. kfree(y);
  922. return -EFAULT;
  923. }
  924. if (copy_from_user(y, depth->y, ybuf_size)) {
  925. kfree(x);
  926. kfree(y);
  927. return -EFAULT;
  928. }
  929. for (i = 0; i < count; i++) {
  930. BEGIN_RING(7);
  931. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  932. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  933. R128_GMC_DST_PITCH_OFFSET_CNTL |
  934. R128_GMC_BRUSH_NONE |
  935. (dev_priv->depth_fmt << 8) |
  936. R128_GMC_SRC_DATATYPE_COLOR |
  937. R128_ROP3_S |
  938. R128_DP_SRC_SOURCE_MEMORY |
  939. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  940. OUT_RING(dev_priv->depth_pitch_offset_c);
  941. OUT_RING(dev_priv->span_pitch_offset_c);
  942. OUT_RING((x[i] << 16) | y[i]);
  943. OUT_RING((i << 16) | 0);
  944. OUT_RING((1 << 16) | 1);
  945. ADVANCE_RING();
  946. }
  947. kfree(x);
  948. kfree(y);
  949. return 0;
  950. }
  951. /* ================================================================
  952. * Polygon stipple
  953. */
  954. static void r128_cce_dispatch_stipple(struct drm_device *dev, u32 *stipple)
  955. {
  956. drm_r128_private_t *dev_priv = dev->dev_private;
  957. int i;
  958. RING_LOCALS;
  959. DRM_DEBUG("\n");
  960. BEGIN_RING(33);
  961. OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31));
  962. for (i = 0; i < 32; i++)
  963. OUT_RING(stipple[i]);
  964. ADVANCE_RING();
  965. }
  966. /* ================================================================
  967. * IOCTL functions
  968. */
  969. static int r128_cce_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  970. {
  971. drm_r128_private_t *dev_priv = dev->dev_private;
  972. drm_r128_sarea_t *sarea_priv;
  973. drm_r128_clear_t *clear = data;
  974. DRM_DEBUG("\n");
  975. LOCK_TEST_WITH_RETURN(dev, file_priv);
  976. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  977. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  978. sarea_priv = dev_priv->sarea_priv;
  979. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  980. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  981. r128_cce_dispatch_clear(dev, clear);
  982. COMMIT_RING();
  983. /* Make sure we restore the 3D state next time.
  984. */
  985. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  986. return 0;
  987. }
  988. static int r128_do_init_pageflip(struct drm_device *dev)
  989. {
  990. drm_r128_private_t *dev_priv = dev->dev_private;
  991. DRM_DEBUG("\n");
  992. dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET);
  993. dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL);
  994. R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset);
  995. R128_WRITE(R128_CRTC_OFFSET_CNTL,
  996. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL);
  997. dev_priv->page_flipping = 1;
  998. dev_priv->current_page = 0;
  999. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1000. return 0;
  1001. }
  1002. static int r128_do_cleanup_pageflip(struct drm_device *dev)
  1003. {
  1004. drm_r128_private_t *dev_priv = dev->dev_private;
  1005. DRM_DEBUG("\n");
  1006. R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset);
  1007. R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl);
  1008. if (dev_priv->current_page != 0) {
  1009. r128_cce_dispatch_flip(dev);
  1010. COMMIT_RING();
  1011. }
  1012. dev_priv->page_flipping = 0;
  1013. return 0;
  1014. }
  1015. /* Swapping and flipping are different operations, need different ioctls.
  1016. * They can & should be intermixed to support multiple 3d windows.
  1017. */
  1018. static int r128_cce_flip(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1019. {
  1020. drm_r128_private_t *dev_priv = dev->dev_private;
  1021. DRM_DEBUG("\n");
  1022. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1023. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1024. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1025. if (!dev_priv->page_flipping)
  1026. r128_do_init_pageflip(dev);
  1027. r128_cce_dispatch_flip(dev);
  1028. COMMIT_RING();
  1029. return 0;
  1030. }
  1031. static int r128_cce_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1032. {
  1033. drm_r128_private_t *dev_priv = dev->dev_private;
  1034. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1035. DRM_DEBUG("\n");
  1036. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1037. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1038. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1039. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1040. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1041. r128_cce_dispatch_swap(dev);
  1042. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1043. R128_UPLOAD_MASKS);
  1044. COMMIT_RING();
  1045. return 0;
  1046. }
  1047. static int r128_cce_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1048. {
  1049. drm_r128_private_t *dev_priv = dev->dev_private;
  1050. struct drm_device_dma *dma = dev->dma;
  1051. struct drm_buf *buf;
  1052. drm_r128_buf_priv_t *buf_priv;
  1053. drm_r128_vertex_t *vertex = data;
  1054. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1055. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1056. DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
  1057. DRM_CURRENTPID, vertex->idx, vertex->count, vertex->discard);
  1058. if (vertex->idx < 0 || vertex->idx >= dma->buf_count) {
  1059. DRM_ERROR("buffer index %d (of %d max)\n",
  1060. vertex->idx, dma->buf_count - 1);
  1061. return -EINVAL;
  1062. }
  1063. if (vertex->prim < 0 ||
  1064. vertex->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1065. DRM_ERROR("buffer prim %d\n", vertex->prim);
  1066. return -EINVAL;
  1067. }
  1068. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1069. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1070. buf = dma->buflist[vertex->idx];
  1071. buf_priv = buf->dev_private;
  1072. if (buf->file_priv != file_priv) {
  1073. DRM_ERROR("process %d using buffer owned by %p\n",
  1074. DRM_CURRENTPID, buf->file_priv);
  1075. return -EINVAL;
  1076. }
  1077. if (buf->pending) {
  1078. DRM_ERROR("sending pending buffer %d\n", vertex->idx);
  1079. return -EINVAL;
  1080. }
  1081. buf->used = vertex->count;
  1082. buf_priv->prim = vertex->prim;
  1083. buf_priv->discard = vertex->discard;
  1084. r128_cce_dispatch_vertex(dev, buf);
  1085. COMMIT_RING();
  1086. return 0;
  1087. }
  1088. static int r128_cce_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1089. {
  1090. drm_r128_private_t *dev_priv = dev->dev_private;
  1091. struct drm_device_dma *dma = dev->dma;
  1092. struct drm_buf *buf;
  1093. drm_r128_buf_priv_t *buf_priv;
  1094. drm_r128_indices_t *elts = data;
  1095. int count;
  1096. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1097. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1098. DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1099. elts->idx, elts->start, elts->end, elts->discard);
  1100. if (elts->idx < 0 || elts->idx >= dma->buf_count) {
  1101. DRM_ERROR("buffer index %d (of %d max)\n",
  1102. elts->idx, dma->buf_count - 1);
  1103. return -EINVAL;
  1104. }
  1105. if (elts->prim < 0 ||
  1106. elts->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1107. DRM_ERROR("buffer prim %d\n", elts->prim);
  1108. return -EINVAL;
  1109. }
  1110. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1111. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1112. buf = dma->buflist[elts->idx];
  1113. buf_priv = buf->dev_private;
  1114. if (buf->file_priv != file_priv) {
  1115. DRM_ERROR("process %d using buffer owned by %p\n",
  1116. DRM_CURRENTPID, buf->file_priv);
  1117. return -EINVAL;
  1118. }
  1119. if (buf->pending) {
  1120. DRM_ERROR("sending pending buffer %d\n", elts->idx);
  1121. return -EINVAL;
  1122. }
  1123. count = (elts->end - elts->start) / sizeof(u16);
  1124. elts->start -= R128_INDEX_PRIM_OFFSET;
  1125. if (elts->start & 0x7) {
  1126. DRM_ERROR("misaligned buffer 0x%x\n", elts->start);
  1127. return -EINVAL;
  1128. }
  1129. if (elts->start < buf->used) {
  1130. DRM_ERROR("no header 0x%x - 0x%x\n", elts->start, buf->used);
  1131. return -EINVAL;
  1132. }
  1133. buf->used = elts->end;
  1134. buf_priv->prim = elts->prim;
  1135. buf_priv->discard = elts->discard;
  1136. r128_cce_dispatch_indices(dev, buf, elts->start, elts->end, count);
  1137. COMMIT_RING();
  1138. return 0;
  1139. }
  1140. static int r128_cce_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1141. {
  1142. struct drm_device_dma *dma = dev->dma;
  1143. drm_r128_private_t *dev_priv = dev->dev_private;
  1144. drm_r128_blit_t *blit = data;
  1145. int ret;
  1146. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1147. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1148. DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit->idx);
  1149. if (blit->idx < 0 || blit->idx >= dma->buf_count) {
  1150. DRM_ERROR("buffer index %d (of %d max)\n",
  1151. blit->idx, dma->buf_count - 1);
  1152. return -EINVAL;
  1153. }
  1154. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1155. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1156. ret = r128_cce_dispatch_blit(dev, file_priv, blit);
  1157. COMMIT_RING();
  1158. return ret;
  1159. }
  1160. static int r128_cce_depth(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1161. {
  1162. drm_r128_private_t *dev_priv = dev->dev_private;
  1163. drm_r128_depth_t *depth = data;
  1164. int ret;
  1165. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1166. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1167. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1168. ret = -EINVAL;
  1169. switch (depth->func) {
  1170. case R128_WRITE_SPAN:
  1171. ret = r128_cce_dispatch_write_span(dev, depth);
  1172. break;
  1173. case R128_WRITE_PIXELS:
  1174. ret = r128_cce_dispatch_write_pixels(dev, depth);
  1175. break;
  1176. case R128_READ_SPAN:
  1177. ret = r128_cce_dispatch_read_span(dev, depth);
  1178. break;
  1179. case R128_READ_PIXELS:
  1180. ret = r128_cce_dispatch_read_pixels(dev, depth);
  1181. break;
  1182. }
  1183. COMMIT_RING();
  1184. return ret;
  1185. }
  1186. static int r128_cce_stipple(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1187. {
  1188. drm_r128_private_t *dev_priv = dev->dev_private;
  1189. drm_r128_stipple_t *stipple = data;
  1190. u32 mask[32];
  1191. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1192. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1193. if (copy_from_user(&mask, stipple->mask, 32 * sizeof(u32)))
  1194. return -EFAULT;
  1195. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1196. r128_cce_dispatch_stipple(dev, mask);
  1197. COMMIT_RING();
  1198. return 0;
  1199. }
  1200. static int r128_cce_indirect(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1201. {
  1202. drm_r128_private_t *dev_priv = dev->dev_private;
  1203. struct drm_device_dma *dma = dev->dma;
  1204. struct drm_buf *buf;
  1205. drm_r128_buf_priv_t *buf_priv;
  1206. drm_r128_indirect_t *indirect = data;
  1207. #if 0
  1208. RING_LOCALS;
  1209. #endif
  1210. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1211. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1212. DRM_DEBUG("idx=%d s=%d e=%d d=%d\n",
  1213. indirect->idx, indirect->start, indirect->end,
  1214. indirect->discard);
  1215. if (indirect->idx < 0 || indirect->idx >= dma->buf_count) {
  1216. DRM_ERROR("buffer index %d (of %d max)\n",
  1217. indirect->idx, dma->buf_count - 1);
  1218. return -EINVAL;
  1219. }
  1220. buf = dma->buflist[indirect->idx];
  1221. buf_priv = buf->dev_private;
  1222. if (buf->file_priv != file_priv) {
  1223. DRM_ERROR("process %d using buffer owned by %p\n",
  1224. DRM_CURRENTPID, buf->file_priv);
  1225. return -EINVAL;
  1226. }
  1227. if (buf->pending) {
  1228. DRM_ERROR("sending pending buffer %d\n", indirect->idx);
  1229. return -EINVAL;
  1230. }
  1231. if (indirect->start < buf->used) {
  1232. DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
  1233. indirect->start, buf->used);
  1234. return -EINVAL;
  1235. }
  1236. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1237. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1238. buf->used = indirect->end;
  1239. buf_priv->discard = indirect->discard;
  1240. #if 0
  1241. /* Wait for the 3D stream to idle before the indirect buffer
  1242. * containing 2D acceleration commands is processed.
  1243. */
  1244. BEGIN_RING(2);
  1245. RADEON_WAIT_UNTIL_3D_IDLE();
  1246. ADVANCE_RING();
  1247. #endif
  1248. /* Dispatch the indirect buffer full of commands from the
  1249. * X server. This is insecure and is thus only available to
  1250. * privileged clients.
  1251. */
  1252. r128_cce_dispatch_indirect(dev, buf, indirect->start, indirect->end);
  1253. COMMIT_RING();
  1254. return 0;
  1255. }
  1256. static int r128_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1257. {
  1258. drm_r128_private_t *dev_priv = dev->dev_private;
  1259. drm_r128_getparam_t *param = data;
  1260. int value;
  1261. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1262. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  1263. switch (param->param) {
  1264. case R128_PARAM_IRQ_NR:
  1265. value = dev->pdev->irq;
  1266. break;
  1267. default:
  1268. return -EINVAL;
  1269. }
  1270. if (copy_to_user(param->value, &value, sizeof(int))) {
  1271. DRM_ERROR("copy_to_user\n");
  1272. return -EFAULT;
  1273. }
  1274. return 0;
  1275. }
  1276. void r128_driver_preclose(struct drm_device *dev, struct drm_file *file_priv)
  1277. {
  1278. if (dev->dev_private) {
  1279. drm_r128_private_t *dev_priv = dev->dev_private;
  1280. if (dev_priv->page_flipping)
  1281. r128_do_cleanup_pageflip(dev);
  1282. }
  1283. }
  1284. void r128_driver_lastclose(struct drm_device *dev)
  1285. {
  1286. r128_do_cleanup_cce(dev);
  1287. }
  1288. const struct drm_ioctl_desc r128_ioctls[] = {
  1289. DRM_IOCTL_DEF_DRV(R128_INIT, r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1290. DRM_IOCTL_DEF_DRV(R128_CCE_START, r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1291. DRM_IOCTL_DEF_DRV(R128_CCE_STOP, r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1292. DRM_IOCTL_DEF_DRV(R128_CCE_RESET, r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1293. DRM_IOCTL_DEF_DRV(R128_CCE_IDLE, r128_cce_idle, DRM_AUTH),
  1294. DRM_IOCTL_DEF_DRV(R128_RESET, r128_engine_reset, DRM_AUTH),
  1295. DRM_IOCTL_DEF_DRV(R128_FULLSCREEN, r128_fullscreen, DRM_AUTH),
  1296. DRM_IOCTL_DEF_DRV(R128_SWAP, r128_cce_swap, DRM_AUTH),
  1297. DRM_IOCTL_DEF_DRV(R128_FLIP, r128_cce_flip, DRM_AUTH),
  1298. DRM_IOCTL_DEF_DRV(R128_CLEAR, r128_cce_clear, DRM_AUTH),
  1299. DRM_IOCTL_DEF_DRV(R128_VERTEX, r128_cce_vertex, DRM_AUTH),
  1300. DRM_IOCTL_DEF_DRV(R128_INDICES, r128_cce_indices, DRM_AUTH),
  1301. DRM_IOCTL_DEF_DRV(R128_BLIT, r128_cce_blit, DRM_AUTH),
  1302. DRM_IOCTL_DEF_DRV(R128_DEPTH, r128_cce_depth, DRM_AUTH),
  1303. DRM_IOCTL_DEF_DRV(R128_STIPPLE, r128_cce_stipple, DRM_AUTH),
  1304. DRM_IOCTL_DEF_DRV(R128_INDIRECT, r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1305. DRM_IOCTL_DEF_DRV(R128_GETPARAM, r128_getparam, DRM_AUTH),
  1306. };
  1307. int r128_max_ioctl = ARRAY_SIZE(r128_ioctls);