s5p_mfc_opr_v6.c 68 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331
  1. /*
  2. * drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c
  3. *
  4. * Samsung MFC (Multi Function Codec - FIMV) driver
  5. * This file contains hw related functions.
  6. *
  7. * Copyright (c) 2012 Samsung Electronics Co., Ltd.
  8. * http://www.samsung.com/
  9. *
  10. * This program is free software; you can redistribute it and/or modify
  11. * it under the terms of the GNU General Public License version 2 as
  12. * published by the Free Software Foundation.
  13. */
  14. #undef DEBUG
  15. #include <linux/delay.h>
  16. #include <linux/mm.h>
  17. #include <linux/io.h>
  18. #include <linux/jiffies.h>
  19. #include <linux/firmware.h>
  20. #include <linux/err.h>
  21. #include <linux/sched.h>
  22. #include <linux/dma-mapping.h>
  23. #include <asm/cacheflush.h>
  24. #include "s5p_mfc_common.h"
  25. #include "s5p_mfc_cmd.h"
  26. #include "s5p_mfc_intr.h"
  27. #include "s5p_mfc_pm.h"
  28. #include "s5p_mfc_debug.h"
  29. #include "s5p_mfc_opr.h"
  30. #include "s5p_mfc_opr_v6.h"
  31. /* #define S5P_MFC_DEBUG_REGWRITE */
  32. #ifdef S5P_MFC_DEBUG_REGWRITE
  33. #undef writel
  34. #define writel(v, r) \
  35. do { \
  36. pr_err("MFCWRITE(%p): %08x\n", r, (unsigned int)v); \
  37. __raw_writel(v, r); \
  38. } while (0)
  39. #endif /* S5P_MFC_DEBUG_REGWRITE */
  40. #define IS_MFCV6_V2(dev) (!IS_MFCV7_PLUS(dev) && dev->fw_ver == MFC_FW_V2)
  41. /* Allocate temporary buffers for decoding */
  42. static int s5p_mfc_alloc_dec_temp_buffers_v6(struct s5p_mfc_ctx *ctx)
  43. {
  44. /* NOP */
  45. return 0;
  46. }
  47. /* Release temproary buffers for decoding */
  48. static void s5p_mfc_release_dec_desc_buffer_v6(struct s5p_mfc_ctx *ctx)
  49. {
  50. /* NOP */
  51. }
  52. /* Allocate codec buffers */
  53. static int s5p_mfc_alloc_codec_buffers_v6(struct s5p_mfc_ctx *ctx)
  54. {
  55. struct s5p_mfc_dev *dev = ctx->dev;
  56. unsigned int mb_width, mb_height;
  57. int ret;
  58. mb_width = MB_WIDTH(ctx->img_width);
  59. mb_height = MB_HEIGHT(ctx->img_height);
  60. if (ctx->type == MFCINST_DECODER) {
  61. mfc_debug(2, "Luma size:%d Chroma size:%d MV size:%d\n",
  62. ctx->luma_size, ctx->chroma_size, ctx->mv_size);
  63. mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
  64. } else if (ctx->type == MFCINST_ENCODER) {
  65. if (IS_MFCV8(dev))
  66. ctx->tmv_buffer_size = S5P_FIMV_NUM_TMV_BUFFERS_V6 *
  67. ALIGN(S5P_FIMV_TMV_BUFFER_SIZE_V8(mb_width, mb_height),
  68. S5P_FIMV_TMV_BUFFER_ALIGN_V6);
  69. else
  70. ctx->tmv_buffer_size = S5P_FIMV_NUM_TMV_BUFFERS_V6 *
  71. ALIGN(S5P_FIMV_TMV_BUFFER_SIZE_V6(mb_width, mb_height),
  72. S5P_FIMV_TMV_BUFFER_ALIGN_V6);
  73. ctx->luma_dpb_size = ALIGN((mb_width * mb_height) *
  74. S5P_FIMV_LUMA_MB_TO_PIXEL_V6,
  75. S5P_FIMV_LUMA_DPB_BUFFER_ALIGN_V6);
  76. ctx->chroma_dpb_size = ALIGN((mb_width * mb_height) *
  77. S5P_FIMV_CHROMA_MB_TO_PIXEL_V6,
  78. S5P_FIMV_CHROMA_DPB_BUFFER_ALIGN_V6);
  79. if (IS_MFCV8(dev))
  80. ctx->me_buffer_size = ALIGN(S5P_FIMV_ME_BUFFER_SIZE_V8(
  81. ctx->img_width, ctx->img_height,
  82. mb_width, mb_height),
  83. S5P_FIMV_ME_BUFFER_ALIGN_V6);
  84. else
  85. ctx->me_buffer_size = ALIGN(S5P_FIMV_ME_BUFFER_SIZE_V6(
  86. ctx->img_width, ctx->img_height,
  87. mb_width, mb_height),
  88. S5P_FIMV_ME_BUFFER_ALIGN_V6);
  89. mfc_debug(2, "recon luma size: %zu chroma size: %zu\n",
  90. ctx->luma_dpb_size, ctx->chroma_dpb_size);
  91. } else {
  92. return -EINVAL;
  93. }
  94. /* Codecs have different memory requirements */
  95. switch (ctx->codec_mode) {
  96. case S5P_MFC_CODEC_H264_DEC:
  97. case S5P_MFC_CODEC_H264_MVC_DEC:
  98. if (IS_MFCV8(dev))
  99. ctx->scratch_buf_size =
  100. S5P_FIMV_SCRATCH_BUF_SIZE_H264_DEC_V8(
  101. mb_width,
  102. mb_height);
  103. else
  104. ctx->scratch_buf_size =
  105. S5P_FIMV_SCRATCH_BUF_SIZE_H264_DEC_V6(
  106. mb_width,
  107. mb_height);
  108. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  109. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  110. ctx->bank1.size =
  111. ctx->scratch_buf_size +
  112. (ctx->mv_count * ctx->mv_size);
  113. break;
  114. case S5P_MFC_CODEC_MPEG4_DEC:
  115. if (IS_MFCV7_PLUS(dev)) {
  116. ctx->scratch_buf_size =
  117. S5P_FIMV_SCRATCH_BUF_SIZE_MPEG4_DEC_V7(
  118. mb_width,
  119. mb_height);
  120. } else {
  121. ctx->scratch_buf_size =
  122. S5P_FIMV_SCRATCH_BUF_SIZE_MPEG4_DEC_V6(
  123. mb_width,
  124. mb_height);
  125. }
  126. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  127. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  128. ctx->bank1.size = ctx->scratch_buf_size;
  129. break;
  130. case S5P_MFC_CODEC_VC1RCV_DEC:
  131. case S5P_MFC_CODEC_VC1_DEC:
  132. ctx->scratch_buf_size =
  133. S5P_FIMV_SCRATCH_BUF_SIZE_VC1_DEC_V6(
  134. mb_width,
  135. mb_height);
  136. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  137. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  138. ctx->bank1.size = ctx->scratch_buf_size;
  139. break;
  140. case S5P_MFC_CODEC_MPEG2_DEC:
  141. ctx->bank1.size = 0;
  142. ctx->bank2.size = 0;
  143. break;
  144. case S5P_MFC_CODEC_H263_DEC:
  145. ctx->scratch_buf_size =
  146. S5P_FIMV_SCRATCH_BUF_SIZE_H263_DEC_V6(
  147. mb_width,
  148. mb_height);
  149. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  150. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  151. ctx->bank1.size = ctx->scratch_buf_size;
  152. break;
  153. case S5P_MFC_CODEC_VP8_DEC:
  154. if (IS_MFCV8(dev))
  155. ctx->scratch_buf_size =
  156. S5P_FIMV_SCRATCH_BUF_SIZE_VP8_DEC_V8(
  157. mb_width,
  158. mb_height);
  159. else
  160. ctx->scratch_buf_size =
  161. S5P_FIMV_SCRATCH_BUF_SIZE_VP8_DEC_V6(
  162. mb_width,
  163. mb_height);
  164. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  165. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  166. ctx->bank1.size = ctx->scratch_buf_size;
  167. break;
  168. case S5P_MFC_CODEC_H264_ENC:
  169. if (IS_MFCV8(dev))
  170. ctx->scratch_buf_size =
  171. S5P_FIMV_SCRATCH_BUF_SIZE_H264_ENC_V8(
  172. mb_width,
  173. mb_height);
  174. else
  175. ctx->scratch_buf_size =
  176. S5P_FIMV_SCRATCH_BUF_SIZE_H264_ENC_V6(
  177. mb_width,
  178. mb_height);
  179. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  180. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  181. ctx->bank1.size =
  182. ctx->scratch_buf_size + ctx->tmv_buffer_size +
  183. (ctx->pb_count * (ctx->luma_dpb_size +
  184. ctx->chroma_dpb_size + ctx->me_buffer_size));
  185. ctx->bank2.size = 0;
  186. break;
  187. case S5P_MFC_CODEC_MPEG4_ENC:
  188. case S5P_MFC_CODEC_H263_ENC:
  189. ctx->scratch_buf_size =
  190. S5P_FIMV_SCRATCH_BUF_SIZE_MPEG4_ENC_V6(
  191. mb_width,
  192. mb_height);
  193. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  194. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  195. ctx->bank1.size =
  196. ctx->scratch_buf_size + ctx->tmv_buffer_size +
  197. (ctx->pb_count * (ctx->luma_dpb_size +
  198. ctx->chroma_dpb_size + ctx->me_buffer_size));
  199. ctx->bank2.size = 0;
  200. break;
  201. case S5P_MFC_CODEC_VP8_ENC:
  202. if (IS_MFCV8(dev))
  203. ctx->scratch_buf_size =
  204. S5P_FIMV_SCRATCH_BUF_SIZE_VP8_ENC_V8(
  205. mb_width,
  206. mb_height);
  207. else
  208. ctx->scratch_buf_size =
  209. S5P_FIMV_SCRATCH_BUF_SIZE_VP8_ENC_V7(
  210. mb_width,
  211. mb_height);
  212. ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
  213. S5P_FIMV_SCRATCH_BUFFER_ALIGN_V6);
  214. ctx->bank1.size =
  215. ctx->scratch_buf_size + ctx->tmv_buffer_size +
  216. (ctx->pb_count * (ctx->luma_dpb_size +
  217. ctx->chroma_dpb_size + ctx->me_buffer_size));
  218. ctx->bank2.size = 0;
  219. break;
  220. default:
  221. break;
  222. }
  223. /* Allocate only if memory from bank 1 is necessary */
  224. if (ctx->bank1.size > 0) {
  225. ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, dev->bank1,
  226. &ctx->bank1);
  227. if (ret) {
  228. mfc_err("Failed to allocate Bank1 memory\n");
  229. return ret;
  230. }
  231. BUG_ON(ctx->bank1.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
  232. }
  233. return 0;
  234. }
  235. /* Release buffers allocated for codec */
  236. static void s5p_mfc_release_codec_buffers_v6(struct s5p_mfc_ctx *ctx)
  237. {
  238. s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->bank1);
  239. }
  240. /* Allocate memory for instance data buffer */
  241. static int s5p_mfc_alloc_instance_buffer_v6(struct s5p_mfc_ctx *ctx)
  242. {
  243. struct s5p_mfc_dev *dev = ctx->dev;
  244. struct s5p_mfc_buf_size_v6 *buf_size = dev->variant->buf_size->priv;
  245. int ret;
  246. mfc_debug_enter();
  247. switch (ctx->codec_mode) {
  248. case S5P_MFC_CODEC_H264_DEC:
  249. case S5P_MFC_CODEC_H264_MVC_DEC:
  250. ctx->ctx.size = buf_size->h264_dec_ctx;
  251. break;
  252. case S5P_MFC_CODEC_MPEG4_DEC:
  253. case S5P_MFC_CODEC_H263_DEC:
  254. case S5P_MFC_CODEC_VC1RCV_DEC:
  255. case S5P_MFC_CODEC_VC1_DEC:
  256. case S5P_MFC_CODEC_MPEG2_DEC:
  257. case S5P_MFC_CODEC_VP8_DEC:
  258. ctx->ctx.size = buf_size->other_dec_ctx;
  259. break;
  260. case S5P_MFC_CODEC_H264_ENC:
  261. ctx->ctx.size = buf_size->h264_enc_ctx;
  262. break;
  263. case S5P_MFC_CODEC_MPEG4_ENC:
  264. case S5P_MFC_CODEC_H263_ENC:
  265. case S5P_MFC_CODEC_VP8_ENC:
  266. ctx->ctx.size = buf_size->other_enc_ctx;
  267. break;
  268. default:
  269. ctx->ctx.size = 0;
  270. mfc_err("Codec type(%d) should be checked!\n", ctx->codec_mode);
  271. break;
  272. }
  273. ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, dev->bank1, &ctx->ctx);
  274. if (ret) {
  275. mfc_err("Failed to allocate instance buffer\n");
  276. return ret;
  277. }
  278. memset(ctx->ctx.virt, 0, ctx->ctx.size);
  279. wmb();
  280. mfc_debug_leave();
  281. return 0;
  282. }
  283. /* Release instance buffer */
  284. static void s5p_mfc_release_instance_buffer_v6(struct s5p_mfc_ctx *ctx)
  285. {
  286. s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->ctx);
  287. }
  288. /* Allocate context buffers for SYS_INIT */
  289. static int s5p_mfc_alloc_dev_context_buffer_v6(struct s5p_mfc_dev *dev)
  290. {
  291. struct s5p_mfc_buf_size_v6 *buf_size = dev->variant->buf_size->priv;
  292. int ret;
  293. mfc_debug_enter();
  294. dev->ctx_buf.size = buf_size->dev_ctx;
  295. ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, dev->bank1,
  296. &dev->ctx_buf);
  297. if (ret) {
  298. mfc_err("Failed to allocate device context buffer\n");
  299. return ret;
  300. }
  301. memset(dev->ctx_buf.virt, 0, buf_size->dev_ctx);
  302. wmb();
  303. mfc_debug_leave();
  304. return 0;
  305. }
  306. /* Release context buffers for SYS_INIT */
  307. static void s5p_mfc_release_dev_context_buffer_v6(struct s5p_mfc_dev *dev)
  308. {
  309. s5p_mfc_release_priv_buf(dev->mem_dev_l, &dev->ctx_buf);
  310. }
  311. static int calc_plane(int width, int height)
  312. {
  313. int mbX, mbY;
  314. mbX = DIV_ROUND_UP(width, S5P_FIMV_NUM_PIXELS_IN_MB_ROW_V6);
  315. mbY = DIV_ROUND_UP(height, S5P_FIMV_NUM_PIXELS_IN_MB_COL_V6);
  316. if (width * height < S5P_FIMV_MAX_FRAME_SIZE_V6)
  317. mbY = (mbY + 1) / 2 * 2;
  318. return (mbX * S5P_FIMV_NUM_PIXELS_IN_MB_COL_V6) *
  319. (mbY * S5P_FIMV_NUM_PIXELS_IN_MB_ROW_V6);
  320. }
  321. static void s5p_mfc_dec_calc_dpb_size_v6(struct s5p_mfc_ctx *ctx)
  322. {
  323. ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN_V6);
  324. ctx->buf_height = ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN_V6);
  325. mfc_debug(2, "SEQ Done: Movie dimensions %dx%d,\n"
  326. "buffer dimensions: %dx%d\n", ctx->img_width,
  327. ctx->img_height, ctx->buf_width, ctx->buf_height);
  328. ctx->luma_size = calc_plane(ctx->img_width, ctx->img_height);
  329. ctx->chroma_size = calc_plane(ctx->img_width, (ctx->img_height >> 1));
  330. if (IS_MFCV8(ctx->dev)) {
  331. /* MFCv8 needs additional 64 bytes for luma,chroma dpb*/
  332. ctx->luma_size += S5P_FIMV_D_ALIGN_PLANE_SIZE_V8;
  333. ctx->chroma_size += S5P_FIMV_D_ALIGN_PLANE_SIZE_V8;
  334. }
  335. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
  336. ctx->codec_mode == S5P_MFC_CODEC_H264_MVC_DEC) {
  337. ctx->mv_size = S5P_MFC_DEC_MV_SIZE_V6(ctx->img_width,
  338. ctx->img_height);
  339. ctx->mv_size = ALIGN(ctx->mv_size, 16);
  340. } else {
  341. ctx->mv_size = 0;
  342. }
  343. }
  344. static void s5p_mfc_enc_calc_src_size_v6(struct s5p_mfc_ctx *ctx)
  345. {
  346. unsigned int mb_width, mb_height;
  347. mb_width = MB_WIDTH(ctx->img_width);
  348. mb_height = MB_HEIGHT(ctx->img_height);
  349. ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN_V6);
  350. ctx->luma_size = ALIGN((mb_width * mb_height) * 256, 256);
  351. ctx->chroma_size = ALIGN((mb_width * mb_height) * 128, 256);
  352. /* MFCv7 needs pad bytes for Luma and Chroma */
  353. if (IS_MFCV7_PLUS(ctx->dev)) {
  354. ctx->luma_size += MFC_LUMA_PAD_BYTES_V7;
  355. ctx->chroma_size += MFC_CHROMA_PAD_BYTES_V7;
  356. }
  357. }
  358. /* Set registers for decoding stream buffer */
  359. static int s5p_mfc_set_dec_stream_buffer_v6(struct s5p_mfc_ctx *ctx,
  360. int buf_addr, unsigned int start_num_byte,
  361. unsigned int strm_size)
  362. {
  363. struct s5p_mfc_dev *dev = ctx->dev;
  364. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  365. struct s5p_mfc_buf_size *buf_size = dev->variant->buf_size;
  366. mfc_debug_enter();
  367. mfc_debug(2, "inst_no: %d, buf_addr: 0x%08x,\n"
  368. "buf_size: 0x%08x (%d)\n",
  369. ctx->inst_no, buf_addr, strm_size, strm_size);
  370. writel(strm_size, mfc_regs->d_stream_data_size);
  371. writel(buf_addr, mfc_regs->d_cpb_buffer_addr);
  372. writel(buf_size->cpb, mfc_regs->d_cpb_buffer_size);
  373. writel(start_num_byte, mfc_regs->d_cpb_buffer_offset);
  374. mfc_debug_leave();
  375. return 0;
  376. }
  377. /* Set decoding frame buffer */
  378. static int s5p_mfc_set_dec_frame_buffer_v6(struct s5p_mfc_ctx *ctx)
  379. {
  380. unsigned int frame_size, i;
  381. unsigned int frame_size_ch, frame_size_mv;
  382. struct s5p_mfc_dev *dev = ctx->dev;
  383. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  384. size_t buf_addr1;
  385. int buf_size1;
  386. int align_gap;
  387. buf_addr1 = ctx->bank1.dma;
  388. buf_size1 = ctx->bank1.size;
  389. mfc_debug(2, "Buf1: %p (%d)\n", (void *)buf_addr1, buf_size1);
  390. mfc_debug(2, "Total DPB COUNT: %d\n", ctx->total_dpb_count);
  391. mfc_debug(2, "Setting display delay to %d\n", ctx->display_delay);
  392. writel(ctx->total_dpb_count, mfc_regs->d_num_dpb);
  393. writel(ctx->luma_size, mfc_regs->d_first_plane_dpb_size);
  394. writel(ctx->chroma_size, mfc_regs->d_second_plane_dpb_size);
  395. writel(buf_addr1, mfc_regs->d_scratch_buffer_addr);
  396. writel(ctx->scratch_buf_size, mfc_regs->d_scratch_buffer_size);
  397. if (IS_MFCV8(dev)) {
  398. writel(ctx->img_width,
  399. mfc_regs->d_first_plane_dpb_stride_size);
  400. writel(ctx->img_width,
  401. mfc_regs->d_second_plane_dpb_stride_size);
  402. }
  403. buf_addr1 += ctx->scratch_buf_size;
  404. buf_size1 -= ctx->scratch_buf_size;
  405. if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC ||
  406. ctx->codec_mode == S5P_FIMV_CODEC_H264_MVC_DEC){
  407. writel(ctx->mv_size, mfc_regs->d_mv_buffer_size);
  408. writel(ctx->mv_count, mfc_regs->d_num_mv);
  409. }
  410. frame_size = ctx->luma_size;
  411. frame_size_ch = ctx->chroma_size;
  412. frame_size_mv = ctx->mv_size;
  413. mfc_debug(2, "Frame size: %d ch: %d mv: %d\n",
  414. frame_size, frame_size_ch, frame_size_mv);
  415. for (i = 0; i < ctx->total_dpb_count; i++) {
  416. /* Bank2 */
  417. mfc_debug(2, "Luma %d: %zx\n", i,
  418. ctx->dst_bufs[i].cookie.raw.luma);
  419. writel(ctx->dst_bufs[i].cookie.raw.luma,
  420. mfc_regs->d_first_plane_dpb + i * 4);
  421. mfc_debug(2, "\tChroma %d: %zx\n", i,
  422. ctx->dst_bufs[i].cookie.raw.chroma);
  423. writel(ctx->dst_bufs[i].cookie.raw.chroma,
  424. mfc_regs->d_second_plane_dpb + i * 4);
  425. }
  426. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
  427. ctx->codec_mode == S5P_MFC_CODEC_H264_MVC_DEC) {
  428. for (i = 0; i < ctx->mv_count; i++) {
  429. /* To test alignment */
  430. align_gap = buf_addr1;
  431. buf_addr1 = ALIGN(buf_addr1, 16);
  432. align_gap = buf_addr1 - align_gap;
  433. buf_size1 -= align_gap;
  434. mfc_debug(2, "\tBuf1: %zx, size: %d\n",
  435. buf_addr1, buf_size1);
  436. writel(buf_addr1, mfc_regs->d_mv_buffer + i * 4);
  437. buf_addr1 += frame_size_mv;
  438. buf_size1 -= frame_size_mv;
  439. }
  440. }
  441. mfc_debug(2, "Buf1: %zu, buf_size1: %d (frames %d)\n",
  442. buf_addr1, buf_size1, ctx->total_dpb_count);
  443. if (buf_size1 < 0) {
  444. mfc_debug(2, "Not enough memory has been allocated.\n");
  445. return -ENOMEM;
  446. }
  447. writel(ctx->inst_no, mfc_regs->instance_id);
  448. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  449. S5P_FIMV_CH_INIT_BUFS_V6, NULL);
  450. mfc_debug(2, "After setting buffers.\n");
  451. return 0;
  452. }
  453. /* Set registers for encoding stream buffer */
  454. static int s5p_mfc_set_enc_stream_buffer_v6(struct s5p_mfc_ctx *ctx,
  455. unsigned long addr, unsigned int size)
  456. {
  457. struct s5p_mfc_dev *dev = ctx->dev;
  458. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  459. writel(addr, mfc_regs->e_stream_buffer_addr); /* 16B align */
  460. writel(size, mfc_regs->e_stream_buffer_size);
  461. mfc_debug(2, "stream buf addr: 0x%08lx, size: 0x%x\n",
  462. addr, size);
  463. return 0;
  464. }
  465. static void s5p_mfc_set_enc_frame_buffer_v6(struct s5p_mfc_ctx *ctx,
  466. unsigned long y_addr, unsigned long c_addr)
  467. {
  468. struct s5p_mfc_dev *dev = ctx->dev;
  469. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  470. writel(y_addr, mfc_regs->e_source_first_plane_addr);
  471. writel(c_addr, mfc_regs->e_source_second_plane_addr);
  472. mfc_debug(2, "enc src y buf addr: 0x%08lx\n", y_addr);
  473. mfc_debug(2, "enc src c buf addr: 0x%08lx\n", c_addr);
  474. }
  475. static void s5p_mfc_get_enc_frame_buffer_v6(struct s5p_mfc_ctx *ctx,
  476. unsigned long *y_addr, unsigned long *c_addr)
  477. {
  478. struct s5p_mfc_dev *dev = ctx->dev;
  479. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  480. unsigned long enc_recon_y_addr, enc_recon_c_addr;
  481. *y_addr = readl(mfc_regs->e_encoded_source_first_plane_addr);
  482. *c_addr = readl(mfc_regs->e_encoded_source_second_plane_addr);
  483. enc_recon_y_addr = readl(mfc_regs->e_recon_luma_dpb_addr);
  484. enc_recon_c_addr = readl(mfc_regs->e_recon_chroma_dpb_addr);
  485. mfc_debug(2, "recon y addr: 0x%08lx y_addr: 0x%08lx\n", enc_recon_y_addr, *y_addr);
  486. mfc_debug(2, "recon c addr: 0x%08lx\n", enc_recon_c_addr);
  487. }
  488. /* Set encoding ref & codec buffer */
  489. static int s5p_mfc_set_enc_ref_buffer_v6(struct s5p_mfc_ctx *ctx)
  490. {
  491. struct s5p_mfc_dev *dev = ctx->dev;
  492. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  493. size_t buf_addr1;
  494. int i, buf_size1;
  495. mfc_debug_enter();
  496. buf_addr1 = ctx->bank1.dma;
  497. buf_size1 = ctx->bank1.size;
  498. mfc_debug(2, "Buf1: %p (%d)\n", (void *)buf_addr1, buf_size1);
  499. for (i = 0; i < ctx->pb_count; i++) {
  500. writel(buf_addr1, mfc_regs->e_luma_dpb + (4 * i));
  501. buf_addr1 += ctx->luma_dpb_size;
  502. writel(buf_addr1, mfc_regs->e_chroma_dpb + (4 * i));
  503. buf_addr1 += ctx->chroma_dpb_size;
  504. writel(buf_addr1, mfc_regs->e_me_buffer + (4 * i));
  505. buf_addr1 += ctx->me_buffer_size;
  506. buf_size1 -= (ctx->luma_dpb_size + ctx->chroma_dpb_size +
  507. ctx->me_buffer_size);
  508. }
  509. writel(buf_addr1, mfc_regs->e_scratch_buffer_addr);
  510. writel(ctx->scratch_buf_size, mfc_regs->e_scratch_buffer_size);
  511. buf_addr1 += ctx->scratch_buf_size;
  512. buf_size1 -= ctx->scratch_buf_size;
  513. writel(buf_addr1, mfc_regs->e_tmv_buffer0);
  514. buf_addr1 += ctx->tmv_buffer_size >> 1;
  515. writel(buf_addr1, mfc_regs->e_tmv_buffer1);
  516. buf_addr1 += ctx->tmv_buffer_size >> 1;
  517. buf_size1 -= ctx->tmv_buffer_size;
  518. mfc_debug(2, "Buf1: %zu, buf_size1: %d (ref frames %d)\n",
  519. buf_addr1, buf_size1, ctx->pb_count);
  520. if (buf_size1 < 0) {
  521. mfc_debug(2, "Not enough memory has been allocated.\n");
  522. return -ENOMEM;
  523. }
  524. writel(ctx->inst_no, mfc_regs->instance_id);
  525. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  526. S5P_FIMV_CH_INIT_BUFS_V6, NULL);
  527. mfc_debug_leave();
  528. return 0;
  529. }
  530. static int s5p_mfc_set_slice_mode(struct s5p_mfc_ctx *ctx)
  531. {
  532. struct s5p_mfc_dev *dev = ctx->dev;
  533. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  534. /* multi-slice control */
  535. /* multi-slice MB number or bit size */
  536. writel(ctx->slice_mode, mfc_regs->e_mslice_mode);
  537. if (ctx->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB) {
  538. writel(ctx->slice_size.mb, mfc_regs->e_mslice_size_mb);
  539. } else if (ctx->slice_mode ==
  540. V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES) {
  541. writel(ctx->slice_size.bits, mfc_regs->e_mslice_size_bits);
  542. } else {
  543. writel(0x0, mfc_regs->e_mslice_size_mb);
  544. writel(0x0, mfc_regs->e_mslice_size_bits);
  545. }
  546. return 0;
  547. }
  548. static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
  549. {
  550. struct s5p_mfc_dev *dev = ctx->dev;
  551. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  552. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  553. unsigned int reg = 0;
  554. mfc_debug_enter();
  555. /* width */
  556. writel(ctx->img_width, mfc_regs->e_frame_width); /* 16 align */
  557. /* height */
  558. writel(ctx->img_height, mfc_regs->e_frame_height); /* 16 align */
  559. /* cropped width */
  560. writel(ctx->img_width, mfc_regs->e_cropped_frame_width);
  561. /* cropped height */
  562. writel(ctx->img_height, mfc_regs->e_cropped_frame_height);
  563. /* cropped offset */
  564. writel(0x0, mfc_regs->e_frame_crop_offset);
  565. /* pictype : IDR period */
  566. reg = 0;
  567. reg |= p->gop_size & 0xFFFF;
  568. writel(reg, mfc_regs->e_gop_config);
  569. /* multi-slice control */
  570. /* multi-slice MB number or bit size */
  571. ctx->slice_mode = p->slice_mode;
  572. reg = 0;
  573. if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB) {
  574. reg |= (0x1 << 3);
  575. writel(reg, mfc_regs->e_enc_options);
  576. ctx->slice_size.mb = p->slice_mb;
  577. } else if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES) {
  578. reg |= (0x1 << 3);
  579. writel(reg, mfc_regs->e_enc_options);
  580. ctx->slice_size.bits = p->slice_bit;
  581. } else {
  582. reg &= ~(0x1 << 3);
  583. writel(reg, mfc_regs->e_enc_options);
  584. }
  585. s5p_mfc_set_slice_mode(ctx);
  586. /* cyclic intra refresh */
  587. writel(p->intra_refresh_mb, mfc_regs->e_ir_size);
  588. reg = readl(mfc_regs->e_enc_options);
  589. if (p->intra_refresh_mb == 0)
  590. reg &= ~(0x1 << 4);
  591. else
  592. reg |= (0x1 << 4);
  593. writel(reg, mfc_regs->e_enc_options);
  594. /* 'NON_REFERENCE_STORE_ENABLE' for debugging */
  595. reg = readl(mfc_regs->e_enc_options);
  596. reg &= ~(0x1 << 9);
  597. writel(reg, mfc_regs->e_enc_options);
  598. /* memory structure cur. frame */
  599. if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M) {
  600. /* 0: Linear, 1: 2D tiled*/
  601. reg = readl(mfc_regs->e_enc_options);
  602. reg &= ~(0x1 << 7);
  603. writel(reg, mfc_regs->e_enc_options);
  604. /* 0: NV12(CbCr), 1: NV21(CrCb) */
  605. writel(0x0, mfc_regs->pixel_format);
  606. } else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV21M) {
  607. /* 0: Linear, 1: 2D tiled*/
  608. reg = readl(mfc_regs->e_enc_options);
  609. reg &= ~(0x1 << 7);
  610. writel(reg, mfc_regs->e_enc_options);
  611. /* 0: NV12(CbCr), 1: NV21(CrCb) */
  612. writel(0x1, mfc_regs->pixel_format);
  613. } else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16) {
  614. /* 0: Linear, 1: 2D tiled*/
  615. reg = readl(mfc_regs->e_enc_options);
  616. reg |= (0x1 << 7);
  617. writel(reg, mfc_regs->e_enc_options);
  618. /* 0: NV12(CbCr), 1: NV21(CrCb) */
  619. writel(0x0, mfc_regs->pixel_format);
  620. }
  621. /* memory structure recon. frame */
  622. /* 0: Linear, 1: 2D tiled */
  623. reg = readl(mfc_regs->e_enc_options);
  624. reg |= (0x1 << 8);
  625. writel(reg, mfc_regs->e_enc_options);
  626. /* padding control & value */
  627. writel(0x0, mfc_regs->e_padding_ctrl);
  628. if (p->pad) {
  629. reg = 0;
  630. /** enable */
  631. reg |= (1 << 31);
  632. /** cr value */
  633. reg |= ((p->pad_cr & 0xFF) << 16);
  634. /** cb value */
  635. reg |= ((p->pad_cb & 0xFF) << 8);
  636. /** y value */
  637. reg |= p->pad_luma & 0xFF;
  638. writel(reg, mfc_regs->e_padding_ctrl);
  639. }
  640. /* rate control config. */
  641. reg = 0;
  642. /* frame-level rate control */
  643. reg |= ((p->rc_frame & 0x1) << 9);
  644. writel(reg, mfc_regs->e_rc_config);
  645. /* bit rate */
  646. if (p->rc_frame)
  647. writel(p->rc_bitrate,
  648. mfc_regs->e_rc_bit_rate);
  649. else
  650. writel(1, mfc_regs->e_rc_bit_rate);
  651. /* reaction coefficient */
  652. if (p->rc_frame) {
  653. if (p->rc_reaction_coeff < TIGHT_CBR_MAX) /* tight CBR */
  654. writel(1, mfc_regs->e_rc_mode);
  655. else /* loose CBR */
  656. writel(2, mfc_regs->e_rc_mode);
  657. }
  658. /* seq header ctrl */
  659. reg = readl(mfc_regs->e_enc_options);
  660. reg &= ~(0x1 << 2);
  661. reg |= ((p->seq_hdr_mode & 0x1) << 2);
  662. /* frame skip mode */
  663. reg &= ~(0x3);
  664. reg |= (p->frame_skip_mode & 0x3);
  665. writel(reg, mfc_regs->e_enc_options);
  666. /* 'DROP_CONTROL_ENABLE', disable */
  667. reg = readl(mfc_regs->e_rc_config);
  668. reg &= ~(0x1 << 10);
  669. writel(reg, mfc_regs->e_rc_config);
  670. /* setting for MV range [16, 256] */
  671. reg = (p->mv_h_range & S5P_FIMV_E_MV_RANGE_V6_MASK);
  672. writel(reg, mfc_regs->e_mv_hor_range);
  673. reg = (p->mv_v_range & S5P_FIMV_E_MV_RANGE_V6_MASK);
  674. writel(reg, mfc_regs->e_mv_ver_range);
  675. writel(0x0, mfc_regs->e_frame_insertion);
  676. writel(0x0, mfc_regs->e_roi_buffer_addr);
  677. writel(0x0, mfc_regs->e_param_change);
  678. writel(0x0, mfc_regs->e_rc_roi_ctrl);
  679. writel(0x0, mfc_regs->e_picture_tag);
  680. writel(0x0, mfc_regs->e_bit_count_enable);
  681. writel(0x0, mfc_regs->e_max_bit_count);
  682. writel(0x0, mfc_regs->e_min_bit_count);
  683. writel(0x0, mfc_regs->e_metadata_buffer_addr);
  684. writel(0x0, mfc_regs->e_metadata_buffer_size);
  685. mfc_debug_leave();
  686. return 0;
  687. }
  688. static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
  689. {
  690. struct s5p_mfc_dev *dev = ctx->dev;
  691. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  692. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  693. struct s5p_mfc_h264_enc_params *p_h264 = &p->codec.h264;
  694. unsigned int reg = 0;
  695. int i;
  696. mfc_debug_enter();
  697. s5p_mfc_set_enc_params(ctx);
  698. /* pictype : number of B */
  699. reg = readl(mfc_regs->e_gop_config);
  700. reg &= ~(0x3 << 16);
  701. reg |= ((p->num_b_frame & 0x3) << 16);
  702. writel(reg, mfc_regs->e_gop_config);
  703. /* profile & level */
  704. reg = 0;
  705. /** level */
  706. reg |= ((p_h264->level & 0xFF) << 8);
  707. /** profile - 0 ~ 3 */
  708. reg |= p_h264->profile & 0x3F;
  709. writel(reg, mfc_regs->e_picture_profile);
  710. /* rate control config. */
  711. reg = readl(mfc_regs->e_rc_config);
  712. /** macroblock level rate control */
  713. reg &= ~(0x1 << 8);
  714. reg |= ((p->rc_mb & 0x1) << 8);
  715. writel(reg, mfc_regs->e_rc_config);
  716. /** frame QP */
  717. reg &= ~(0x3F);
  718. reg |= p_h264->rc_frame_qp & 0x3F;
  719. writel(reg, mfc_regs->e_rc_config);
  720. /* max & min value of QP */
  721. reg = 0;
  722. /** max QP */
  723. reg |= ((p_h264->rc_max_qp & 0x3F) << 8);
  724. /** min QP */
  725. reg |= p_h264->rc_min_qp & 0x3F;
  726. writel(reg, mfc_regs->e_rc_qp_bound);
  727. /* other QPs */
  728. writel(0x0, mfc_regs->e_fixed_picture_qp);
  729. if (!p->rc_frame && !p->rc_mb) {
  730. reg = 0;
  731. reg |= ((p_h264->rc_b_frame_qp & 0x3F) << 16);
  732. reg |= ((p_h264->rc_p_frame_qp & 0x3F) << 8);
  733. reg |= p_h264->rc_frame_qp & 0x3F;
  734. writel(reg, mfc_regs->e_fixed_picture_qp);
  735. }
  736. /* frame rate */
  737. if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
  738. reg = 0;
  739. reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
  740. reg |= p->rc_framerate_denom & 0xFFFF;
  741. writel(reg, mfc_regs->e_rc_frame_rate);
  742. }
  743. /* vbv buffer size */
  744. if (p->frame_skip_mode ==
  745. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  746. writel(p_h264->cpb_size & 0xFFFF,
  747. mfc_regs->e_vbv_buffer_size);
  748. if (p->rc_frame)
  749. writel(p->vbv_delay, mfc_regs->e_vbv_init_delay);
  750. }
  751. /* interlace */
  752. reg = 0;
  753. reg |= ((p_h264->interlace & 0x1) << 3);
  754. writel(reg, mfc_regs->e_h264_options);
  755. /* height */
  756. if (p_h264->interlace) {
  757. writel(ctx->img_height >> 1,
  758. mfc_regs->e_frame_height); /* 32 align */
  759. /* cropped height */
  760. writel(ctx->img_height >> 1,
  761. mfc_regs->e_cropped_frame_height);
  762. }
  763. /* loop filter ctrl */
  764. reg = readl(mfc_regs->e_h264_options);
  765. reg &= ~(0x3 << 1);
  766. reg |= ((p_h264->loop_filter_mode & 0x3) << 1);
  767. writel(reg, mfc_regs->e_h264_options);
  768. /* loopfilter alpha offset */
  769. if (p_h264->loop_filter_alpha < 0) {
  770. reg = 0x10;
  771. reg |= (0xFF - p_h264->loop_filter_alpha) + 1;
  772. } else {
  773. reg = 0x00;
  774. reg |= (p_h264->loop_filter_alpha & 0xF);
  775. }
  776. writel(reg, mfc_regs->e_h264_lf_alpha_offset);
  777. /* loopfilter beta offset */
  778. if (p_h264->loop_filter_beta < 0) {
  779. reg = 0x10;
  780. reg |= (0xFF - p_h264->loop_filter_beta) + 1;
  781. } else {
  782. reg = 0x00;
  783. reg |= (p_h264->loop_filter_beta & 0xF);
  784. }
  785. writel(reg, mfc_regs->e_h264_lf_beta_offset);
  786. /* entropy coding mode */
  787. reg = readl(mfc_regs->e_h264_options);
  788. reg &= ~(0x1);
  789. reg |= p_h264->entropy_mode & 0x1;
  790. writel(reg, mfc_regs->e_h264_options);
  791. /* number of ref. picture */
  792. reg = readl(mfc_regs->e_h264_options);
  793. reg &= ~(0x1 << 7);
  794. reg |= (((p_h264->num_ref_pic_4p - 1) & 0x1) << 7);
  795. writel(reg, mfc_regs->e_h264_options);
  796. /* 8x8 transform enable */
  797. reg = readl(mfc_regs->e_h264_options);
  798. reg &= ~(0x3 << 12);
  799. reg |= ((p_h264->_8x8_transform & 0x3) << 12);
  800. writel(reg, mfc_regs->e_h264_options);
  801. /* macroblock adaptive scaling features */
  802. writel(0x0, mfc_regs->e_mb_rc_config);
  803. if (p->rc_mb) {
  804. reg = 0;
  805. /** dark region */
  806. reg |= ((p_h264->rc_mb_dark & 0x1) << 3);
  807. /** smooth region */
  808. reg |= ((p_h264->rc_mb_smooth & 0x1) << 2);
  809. /** static region */
  810. reg |= ((p_h264->rc_mb_static & 0x1) << 1);
  811. /** high activity region */
  812. reg |= p_h264->rc_mb_activity & 0x1;
  813. writel(reg, mfc_regs->e_mb_rc_config);
  814. }
  815. /* aspect ratio VUI */
  816. readl(mfc_regs->e_h264_options);
  817. reg &= ~(0x1 << 5);
  818. reg |= ((p_h264->vui_sar & 0x1) << 5);
  819. writel(reg, mfc_regs->e_h264_options);
  820. writel(0x0, mfc_regs->e_aspect_ratio);
  821. writel(0x0, mfc_regs->e_extended_sar);
  822. if (p_h264->vui_sar) {
  823. /* aspect ration IDC */
  824. reg = 0;
  825. reg |= p_h264->vui_sar_idc & 0xFF;
  826. writel(reg, mfc_regs->e_aspect_ratio);
  827. if (p_h264->vui_sar_idc == 0xFF) {
  828. /* extended SAR */
  829. reg = 0;
  830. reg |= (p_h264->vui_ext_sar_width & 0xFFFF) << 16;
  831. reg |= p_h264->vui_ext_sar_height & 0xFFFF;
  832. writel(reg, mfc_regs->e_extended_sar);
  833. }
  834. }
  835. /* intra picture period for H.264 open GOP */
  836. /* control */
  837. readl(mfc_regs->e_h264_options);
  838. reg &= ~(0x1 << 4);
  839. reg |= ((p_h264->open_gop & 0x1) << 4);
  840. writel(reg, mfc_regs->e_h264_options);
  841. /* value */
  842. writel(0x0, mfc_regs->e_h264_i_period);
  843. if (p_h264->open_gop) {
  844. reg = 0;
  845. reg |= p_h264->open_gop_size & 0xFFFF;
  846. writel(reg, mfc_regs->e_h264_i_period);
  847. }
  848. /* 'WEIGHTED_BI_PREDICTION' for B is disable */
  849. readl(mfc_regs->e_h264_options);
  850. reg &= ~(0x3 << 9);
  851. writel(reg, mfc_regs->e_h264_options);
  852. /* 'CONSTRAINED_INTRA_PRED_ENABLE' is disable */
  853. readl(mfc_regs->e_h264_options);
  854. reg &= ~(0x1 << 14);
  855. writel(reg, mfc_regs->e_h264_options);
  856. /* ASO */
  857. readl(mfc_regs->e_h264_options);
  858. reg &= ~(0x1 << 6);
  859. reg |= ((p_h264->aso & 0x1) << 6);
  860. writel(reg, mfc_regs->e_h264_options);
  861. /* hier qp enable */
  862. readl(mfc_regs->e_h264_options);
  863. reg &= ~(0x1 << 8);
  864. reg |= ((p_h264->open_gop & 0x1) << 8);
  865. writel(reg, mfc_regs->e_h264_options);
  866. reg = 0;
  867. if (p_h264->hier_qp && p_h264->hier_qp_layer) {
  868. reg |= (p_h264->hier_qp_type & 0x1) << 0x3;
  869. reg |= p_h264->hier_qp_layer & 0x7;
  870. writel(reg, mfc_regs->e_h264_num_t_layer);
  871. /* QP value for each layer */
  872. for (i = 0; i < p_h264->hier_qp_layer &&
  873. i < ARRAY_SIZE(p_h264->hier_qp_layer_qp); i++) {
  874. writel(p_h264->hier_qp_layer_qp[i],
  875. mfc_regs->e_h264_hierarchical_qp_layer0
  876. + i * 4);
  877. }
  878. }
  879. /* number of coding layer should be zero when hierarchical is disable */
  880. writel(reg, mfc_regs->e_h264_num_t_layer);
  881. /* frame packing SEI generation */
  882. readl(mfc_regs->e_h264_options);
  883. reg &= ~(0x1 << 25);
  884. reg |= ((p_h264->sei_frame_packing & 0x1) << 25);
  885. writel(reg, mfc_regs->e_h264_options);
  886. if (p_h264->sei_frame_packing) {
  887. reg = 0;
  888. /** current frame0 flag */
  889. reg |= ((p_h264->sei_fp_curr_frame_0 & 0x1) << 2);
  890. /** arrangement type */
  891. reg |= p_h264->sei_fp_arrangement_type & 0x3;
  892. writel(reg, mfc_regs->e_h264_frame_packing_sei_info);
  893. }
  894. if (p_h264->fmo) {
  895. switch (p_h264->fmo_map_type) {
  896. case V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES:
  897. if (p_h264->fmo_slice_grp > 4)
  898. p_h264->fmo_slice_grp = 4;
  899. for (i = 0; i < (p_h264->fmo_slice_grp & 0xF); i++)
  900. writel(p_h264->fmo_run_len[i] - 1,
  901. mfc_regs->e_h264_fmo_run_length_minus1_0
  902. + i * 4);
  903. break;
  904. case V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES:
  905. if (p_h264->fmo_slice_grp > 4)
  906. p_h264->fmo_slice_grp = 4;
  907. break;
  908. case V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN:
  909. case V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN:
  910. if (p_h264->fmo_slice_grp > 2)
  911. p_h264->fmo_slice_grp = 2;
  912. writel(p_h264->fmo_chg_dir & 0x1,
  913. mfc_regs->e_h264_fmo_slice_grp_change_dir);
  914. /* the valid range is 0 ~ number of macroblocks -1 */
  915. writel(p_h264->fmo_chg_rate,
  916. mfc_regs->e_h264_fmo_slice_grp_change_rate_minus1);
  917. break;
  918. default:
  919. mfc_err("Unsupported map type for FMO: %d\n",
  920. p_h264->fmo_map_type);
  921. p_h264->fmo_map_type = 0;
  922. p_h264->fmo_slice_grp = 1;
  923. break;
  924. }
  925. writel(p_h264->fmo_map_type,
  926. mfc_regs->e_h264_fmo_slice_grp_map_type);
  927. writel(p_h264->fmo_slice_grp - 1,
  928. mfc_regs->e_h264_fmo_num_slice_grp_minus1);
  929. } else {
  930. writel(0, mfc_regs->e_h264_fmo_num_slice_grp_minus1);
  931. }
  932. mfc_debug_leave();
  933. return 0;
  934. }
  935. static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
  936. {
  937. struct s5p_mfc_dev *dev = ctx->dev;
  938. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  939. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  940. struct s5p_mfc_mpeg4_enc_params *p_mpeg4 = &p->codec.mpeg4;
  941. unsigned int reg = 0;
  942. mfc_debug_enter();
  943. s5p_mfc_set_enc_params(ctx);
  944. /* pictype : number of B */
  945. reg = readl(mfc_regs->e_gop_config);
  946. reg &= ~(0x3 << 16);
  947. reg |= ((p->num_b_frame & 0x3) << 16);
  948. writel(reg, mfc_regs->e_gop_config);
  949. /* profile & level */
  950. reg = 0;
  951. /** level */
  952. reg |= ((p_mpeg4->level & 0xFF) << 8);
  953. /** profile - 0 ~ 1 */
  954. reg |= p_mpeg4->profile & 0x3F;
  955. writel(reg, mfc_regs->e_picture_profile);
  956. /* rate control config. */
  957. reg = readl(mfc_regs->e_rc_config);
  958. /** macroblock level rate control */
  959. reg &= ~(0x1 << 8);
  960. reg |= ((p->rc_mb & 0x1) << 8);
  961. writel(reg, mfc_regs->e_rc_config);
  962. /** frame QP */
  963. reg &= ~(0x3F);
  964. reg |= p_mpeg4->rc_frame_qp & 0x3F;
  965. writel(reg, mfc_regs->e_rc_config);
  966. /* max & min value of QP */
  967. reg = 0;
  968. /** max QP */
  969. reg |= ((p_mpeg4->rc_max_qp & 0x3F) << 8);
  970. /** min QP */
  971. reg |= p_mpeg4->rc_min_qp & 0x3F;
  972. writel(reg, mfc_regs->e_rc_qp_bound);
  973. /* other QPs */
  974. writel(0x0, mfc_regs->e_fixed_picture_qp);
  975. if (!p->rc_frame && !p->rc_mb) {
  976. reg = 0;
  977. reg |= ((p_mpeg4->rc_b_frame_qp & 0x3F) << 16);
  978. reg |= ((p_mpeg4->rc_p_frame_qp & 0x3F) << 8);
  979. reg |= p_mpeg4->rc_frame_qp & 0x3F;
  980. writel(reg, mfc_regs->e_fixed_picture_qp);
  981. }
  982. /* frame rate */
  983. if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
  984. reg = 0;
  985. reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
  986. reg |= p->rc_framerate_denom & 0xFFFF;
  987. writel(reg, mfc_regs->e_rc_frame_rate);
  988. }
  989. /* vbv buffer size */
  990. if (p->frame_skip_mode ==
  991. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  992. writel(p->vbv_size & 0xFFFF, mfc_regs->e_vbv_buffer_size);
  993. if (p->rc_frame)
  994. writel(p->vbv_delay, mfc_regs->e_vbv_init_delay);
  995. }
  996. /* Disable HEC */
  997. writel(0x0, mfc_regs->e_mpeg4_options);
  998. writel(0x0, mfc_regs->e_mpeg4_hec_period);
  999. mfc_debug_leave();
  1000. return 0;
  1001. }
  1002. static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
  1003. {
  1004. struct s5p_mfc_dev *dev = ctx->dev;
  1005. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1006. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  1007. struct s5p_mfc_mpeg4_enc_params *p_h263 = &p->codec.mpeg4;
  1008. unsigned int reg = 0;
  1009. mfc_debug_enter();
  1010. s5p_mfc_set_enc_params(ctx);
  1011. /* profile & level */
  1012. reg = 0;
  1013. /** profile */
  1014. reg |= (0x1 << 4);
  1015. writel(reg, mfc_regs->e_picture_profile);
  1016. /* rate control config. */
  1017. reg = readl(mfc_regs->e_rc_config);
  1018. /** macroblock level rate control */
  1019. reg &= ~(0x1 << 8);
  1020. reg |= ((p->rc_mb & 0x1) << 8);
  1021. writel(reg, mfc_regs->e_rc_config);
  1022. /** frame QP */
  1023. reg &= ~(0x3F);
  1024. reg |= p_h263->rc_frame_qp & 0x3F;
  1025. writel(reg, mfc_regs->e_rc_config);
  1026. /* max & min value of QP */
  1027. reg = 0;
  1028. /** max QP */
  1029. reg |= ((p_h263->rc_max_qp & 0x3F) << 8);
  1030. /** min QP */
  1031. reg |= p_h263->rc_min_qp & 0x3F;
  1032. writel(reg, mfc_regs->e_rc_qp_bound);
  1033. /* other QPs */
  1034. writel(0x0, mfc_regs->e_fixed_picture_qp);
  1035. if (!p->rc_frame && !p->rc_mb) {
  1036. reg = 0;
  1037. reg |= ((p_h263->rc_b_frame_qp & 0x3F) << 16);
  1038. reg |= ((p_h263->rc_p_frame_qp & 0x3F) << 8);
  1039. reg |= p_h263->rc_frame_qp & 0x3F;
  1040. writel(reg, mfc_regs->e_fixed_picture_qp);
  1041. }
  1042. /* frame rate */
  1043. if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
  1044. reg = 0;
  1045. reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
  1046. reg |= p->rc_framerate_denom & 0xFFFF;
  1047. writel(reg, mfc_regs->e_rc_frame_rate);
  1048. }
  1049. /* vbv buffer size */
  1050. if (p->frame_skip_mode ==
  1051. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  1052. writel(p->vbv_size & 0xFFFF, mfc_regs->e_vbv_buffer_size);
  1053. if (p->rc_frame)
  1054. writel(p->vbv_delay, mfc_regs->e_vbv_init_delay);
  1055. }
  1056. mfc_debug_leave();
  1057. return 0;
  1058. }
  1059. static int s5p_mfc_set_enc_params_vp8(struct s5p_mfc_ctx *ctx)
  1060. {
  1061. struct s5p_mfc_dev *dev = ctx->dev;
  1062. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1063. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  1064. struct s5p_mfc_vp8_enc_params *p_vp8 = &p->codec.vp8;
  1065. unsigned int reg = 0;
  1066. unsigned int val = 0;
  1067. mfc_debug_enter();
  1068. s5p_mfc_set_enc_params(ctx);
  1069. /* pictype : number of B */
  1070. reg = readl(mfc_regs->e_gop_config);
  1071. reg &= ~(0x3 << 16);
  1072. reg |= ((p->num_b_frame & 0x3) << 16);
  1073. writel(reg, mfc_regs->e_gop_config);
  1074. /* profile - 0 ~ 3 */
  1075. reg = p_vp8->profile & 0x3;
  1076. writel(reg, mfc_regs->e_picture_profile);
  1077. /* rate control config. */
  1078. reg = readl(mfc_regs->e_rc_config);
  1079. /** macroblock level rate control */
  1080. reg &= ~(0x1 << 8);
  1081. reg |= ((p->rc_mb & 0x1) << 8);
  1082. writel(reg, mfc_regs->e_rc_config);
  1083. /* frame rate */
  1084. if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
  1085. reg = 0;
  1086. reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
  1087. reg |= p->rc_framerate_denom & 0xFFFF;
  1088. writel(reg, mfc_regs->e_rc_frame_rate);
  1089. }
  1090. /* frame QP */
  1091. reg &= ~(0x7F);
  1092. reg |= p_vp8->rc_frame_qp & 0x7F;
  1093. writel(reg, mfc_regs->e_rc_config);
  1094. /* other QPs */
  1095. writel(0x0, mfc_regs->e_fixed_picture_qp);
  1096. if (!p->rc_frame && !p->rc_mb) {
  1097. reg = 0;
  1098. reg |= ((p_vp8->rc_p_frame_qp & 0x7F) << 8);
  1099. reg |= p_vp8->rc_frame_qp & 0x7F;
  1100. writel(reg, mfc_regs->e_fixed_picture_qp);
  1101. }
  1102. /* max QP */
  1103. reg = ((p_vp8->rc_max_qp & 0x7F) << 8);
  1104. /* min QP */
  1105. reg |= p_vp8->rc_min_qp & 0x7F;
  1106. writel(reg, mfc_regs->e_rc_qp_bound);
  1107. /* vbv buffer size */
  1108. if (p->frame_skip_mode ==
  1109. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  1110. writel(p->vbv_size & 0xFFFF, mfc_regs->e_vbv_buffer_size);
  1111. if (p->rc_frame)
  1112. writel(p->vbv_delay, mfc_regs->e_vbv_init_delay);
  1113. }
  1114. /* VP8 specific params */
  1115. reg = 0;
  1116. reg |= (p_vp8->imd_4x4 & 0x1) << 10;
  1117. switch (p_vp8->num_partitions) {
  1118. case V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION:
  1119. val = 0;
  1120. break;
  1121. case V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS:
  1122. val = 2;
  1123. break;
  1124. case V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS:
  1125. val = 4;
  1126. break;
  1127. case V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS:
  1128. val = 8;
  1129. break;
  1130. }
  1131. reg |= (val & 0xF) << 3;
  1132. reg |= (p_vp8->num_ref & 0x2);
  1133. writel(reg, mfc_regs->e_vp8_options);
  1134. mfc_debug_leave();
  1135. return 0;
  1136. }
  1137. /* Initialize decoding */
  1138. static int s5p_mfc_init_decode_v6(struct s5p_mfc_ctx *ctx)
  1139. {
  1140. struct s5p_mfc_dev *dev = ctx->dev;
  1141. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1142. unsigned int reg = 0;
  1143. int fmo_aso_ctrl = 0;
  1144. mfc_debug_enter();
  1145. mfc_debug(2, "InstNo: %d/%d\n", ctx->inst_no,
  1146. S5P_FIMV_CH_SEQ_HEADER_V6);
  1147. mfc_debug(2, "BUFs: %08x %08x %08x\n",
  1148. readl(mfc_regs->d_cpb_buffer_addr),
  1149. readl(mfc_regs->d_cpb_buffer_addr),
  1150. readl(mfc_regs->d_cpb_buffer_addr));
  1151. /* FMO_ASO_CTRL - 0: Enable, 1: Disable */
  1152. reg |= (fmo_aso_ctrl << S5P_FIMV_D_OPT_FMO_ASO_CTRL_MASK_V6);
  1153. if (ctx->display_delay_enable) {
  1154. reg |= (0x1 << S5P_FIMV_D_OPT_DDELAY_EN_SHIFT_V6);
  1155. writel(ctx->display_delay, mfc_regs->d_display_delay);
  1156. }
  1157. if (IS_MFCV7_PLUS(dev) || IS_MFCV6_V2(dev)) {
  1158. writel(reg, mfc_regs->d_dec_options);
  1159. reg = 0;
  1160. }
  1161. /* Setup loop filter, for decoding this is only valid for MPEG4 */
  1162. if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_DEC) {
  1163. mfc_debug(2, "Set loop filter to: %d\n",
  1164. ctx->loop_filter_mpeg4);
  1165. reg |= (ctx->loop_filter_mpeg4 <<
  1166. S5P_FIMV_D_OPT_LF_CTRL_SHIFT_V6);
  1167. }
  1168. if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16)
  1169. reg |= (0x1 << S5P_FIMV_D_OPT_TILE_MODE_SHIFT_V6);
  1170. if (IS_MFCV7_PLUS(dev) || IS_MFCV6_V2(dev))
  1171. writel(reg, mfc_regs->d_init_buffer_options);
  1172. else
  1173. writel(reg, mfc_regs->d_dec_options);
  1174. /* 0: NV12(CbCr), 1: NV21(CrCb) */
  1175. if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV21M)
  1176. writel(0x1, mfc_regs->pixel_format);
  1177. else
  1178. writel(0x0, mfc_regs->pixel_format);
  1179. /* sei parse */
  1180. writel(ctx->sei_fp_parse & 0x1, mfc_regs->d_sei_enable);
  1181. writel(ctx->inst_no, mfc_regs->instance_id);
  1182. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  1183. S5P_FIMV_CH_SEQ_HEADER_V6, NULL);
  1184. mfc_debug_leave();
  1185. return 0;
  1186. }
  1187. static inline void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
  1188. {
  1189. struct s5p_mfc_dev *dev = ctx->dev;
  1190. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1191. if (flush) {
  1192. dev->curr_ctx = ctx->num;
  1193. writel(ctx->inst_no, mfc_regs->instance_id);
  1194. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  1195. S5P_FIMV_H2R_CMD_FLUSH_V6, NULL);
  1196. }
  1197. }
  1198. /* Decode a single frame */
  1199. static int s5p_mfc_decode_one_frame_v6(struct s5p_mfc_ctx *ctx,
  1200. enum s5p_mfc_decode_arg last_frame)
  1201. {
  1202. struct s5p_mfc_dev *dev = ctx->dev;
  1203. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1204. writel(ctx->dec_dst_flag, mfc_regs->d_available_dpb_flag_lower);
  1205. writel(ctx->slice_interface & 0x1, mfc_regs->d_slice_if_enable);
  1206. writel(ctx->inst_no, mfc_regs->instance_id);
  1207. /* Issue different commands to instance basing on whether it
  1208. * is the last frame or not. */
  1209. switch (last_frame) {
  1210. case 0:
  1211. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  1212. S5P_FIMV_CH_FRAME_START_V6, NULL);
  1213. break;
  1214. case 1:
  1215. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  1216. S5P_FIMV_CH_LAST_FRAME_V6, NULL);
  1217. break;
  1218. default:
  1219. mfc_err("Unsupported last frame arg.\n");
  1220. return -EINVAL;
  1221. }
  1222. mfc_debug(2, "Decoding a usual frame.\n");
  1223. return 0;
  1224. }
  1225. static int s5p_mfc_init_encode_v6(struct s5p_mfc_ctx *ctx)
  1226. {
  1227. struct s5p_mfc_dev *dev = ctx->dev;
  1228. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1229. if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
  1230. s5p_mfc_set_enc_params_h264(ctx);
  1231. else if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_ENC)
  1232. s5p_mfc_set_enc_params_mpeg4(ctx);
  1233. else if (ctx->codec_mode == S5P_MFC_CODEC_H263_ENC)
  1234. s5p_mfc_set_enc_params_h263(ctx);
  1235. else if (ctx->codec_mode == S5P_MFC_CODEC_VP8_ENC)
  1236. s5p_mfc_set_enc_params_vp8(ctx);
  1237. else {
  1238. mfc_err("Unknown codec for encoding (%x).\n",
  1239. ctx->codec_mode);
  1240. return -EINVAL;
  1241. }
  1242. /* Set stride lengths for v7 & above */
  1243. if (IS_MFCV7_PLUS(dev)) {
  1244. writel(ctx->img_width, mfc_regs->e_source_first_plane_stride);
  1245. writel(ctx->img_width, mfc_regs->e_source_second_plane_stride);
  1246. }
  1247. writel(ctx->inst_no, mfc_regs->instance_id);
  1248. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev,
  1249. S5P_FIMV_CH_SEQ_HEADER_V6, NULL);
  1250. return 0;
  1251. }
  1252. static int s5p_mfc_h264_set_aso_slice_order_v6(struct s5p_mfc_ctx *ctx)
  1253. {
  1254. struct s5p_mfc_dev *dev = ctx->dev;
  1255. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1256. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  1257. struct s5p_mfc_h264_enc_params *p_h264 = &p->codec.h264;
  1258. int i;
  1259. if (p_h264->aso) {
  1260. for (i = 0; i < ARRAY_SIZE(p_h264->aso_slice_order); i++) {
  1261. writel(p_h264->aso_slice_order[i],
  1262. mfc_regs->e_h264_aso_slice_order_0 + i * 4);
  1263. }
  1264. }
  1265. return 0;
  1266. }
  1267. /* Encode a single frame */
  1268. static int s5p_mfc_encode_one_frame_v6(struct s5p_mfc_ctx *ctx)
  1269. {
  1270. struct s5p_mfc_dev *dev = ctx->dev;
  1271. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1272. int cmd;
  1273. mfc_debug(2, "++\n");
  1274. /* memory structure cur. frame */
  1275. if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
  1276. s5p_mfc_h264_set_aso_slice_order_v6(ctx);
  1277. s5p_mfc_set_slice_mode(ctx);
  1278. if (ctx->state != MFCINST_FINISHING)
  1279. cmd = S5P_FIMV_CH_FRAME_START_V6;
  1280. else
  1281. cmd = S5P_FIMV_CH_LAST_FRAME_V6;
  1282. writel(ctx->inst_no, mfc_regs->instance_id);
  1283. s5p_mfc_hw_call_void(dev->mfc_cmds, cmd_host2risc, dev, cmd, NULL);
  1284. mfc_debug(2, "--\n");
  1285. return 0;
  1286. }
  1287. static inline int s5p_mfc_get_new_ctx(struct s5p_mfc_dev *dev)
  1288. {
  1289. unsigned long flags;
  1290. int new_ctx;
  1291. int cnt;
  1292. spin_lock_irqsave(&dev->condlock, flags);
  1293. mfc_debug(2, "Previous context: %d (bits %08lx)\n", dev->curr_ctx,
  1294. dev->ctx_work_bits);
  1295. new_ctx = (dev->curr_ctx + 1) % MFC_NUM_CONTEXTS;
  1296. cnt = 0;
  1297. while (!test_bit(new_ctx, &dev->ctx_work_bits)) {
  1298. new_ctx = (new_ctx + 1) % MFC_NUM_CONTEXTS;
  1299. cnt++;
  1300. if (cnt > MFC_NUM_CONTEXTS) {
  1301. /* No contexts to run */
  1302. spin_unlock_irqrestore(&dev->condlock, flags);
  1303. return -EAGAIN;
  1304. }
  1305. }
  1306. spin_unlock_irqrestore(&dev->condlock, flags);
  1307. return new_ctx;
  1308. }
  1309. static inline void s5p_mfc_run_dec_last_frames(struct s5p_mfc_ctx *ctx)
  1310. {
  1311. struct s5p_mfc_dev *dev = ctx->dev;
  1312. s5p_mfc_set_dec_stream_buffer_v6(ctx, 0, 0, 0);
  1313. dev->curr_ctx = ctx->num;
  1314. s5p_mfc_decode_one_frame_v6(ctx, MFC_DEC_LAST_FRAME);
  1315. }
  1316. static inline int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx)
  1317. {
  1318. struct s5p_mfc_dev *dev = ctx->dev;
  1319. struct s5p_mfc_buf *temp_vb;
  1320. unsigned long flags;
  1321. int last_frame = 0;
  1322. if (ctx->state == MFCINST_FINISHING) {
  1323. last_frame = MFC_DEC_LAST_FRAME;
  1324. s5p_mfc_set_dec_stream_buffer_v6(ctx, 0, 0, 0);
  1325. dev->curr_ctx = ctx->num;
  1326. s5p_mfc_clean_ctx_int_flags(ctx);
  1327. s5p_mfc_decode_one_frame_v6(ctx, last_frame);
  1328. return 0;
  1329. }
  1330. spin_lock_irqsave(&dev->irqlock, flags);
  1331. /* Frames are being decoded */
  1332. if (list_empty(&ctx->src_queue)) {
  1333. mfc_debug(2, "No src buffers.\n");
  1334. spin_unlock_irqrestore(&dev->irqlock, flags);
  1335. return -EAGAIN;
  1336. }
  1337. /* Get the next source buffer */
  1338. temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1339. temp_vb->flags |= MFC_BUF_FLAG_USED;
  1340. s5p_mfc_set_dec_stream_buffer_v6(ctx,
  1341. vb2_dma_contig_plane_dma_addr(&temp_vb->b->vb2_buf, 0),
  1342. ctx->consumed_stream,
  1343. temp_vb->b->vb2_buf.planes[0].bytesused);
  1344. spin_unlock_irqrestore(&dev->irqlock, flags);
  1345. dev->curr_ctx = ctx->num;
  1346. if (temp_vb->b->vb2_buf.planes[0].bytesused == 0) {
  1347. last_frame = 1;
  1348. mfc_debug(2, "Setting ctx->state to FINISHING\n");
  1349. ctx->state = MFCINST_FINISHING;
  1350. }
  1351. s5p_mfc_decode_one_frame_v6(ctx, last_frame);
  1352. return 0;
  1353. }
  1354. static inline int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
  1355. {
  1356. struct s5p_mfc_dev *dev = ctx->dev;
  1357. unsigned long flags;
  1358. struct s5p_mfc_buf *dst_mb;
  1359. struct s5p_mfc_buf *src_mb;
  1360. unsigned long src_y_addr, src_c_addr, dst_addr;
  1361. /*
  1362. unsigned int src_y_size, src_c_size;
  1363. */
  1364. unsigned int dst_size;
  1365. spin_lock_irqsave(&dev->irqlock, flags);
  1366. if (list_empty(&ctx->src_queue) && ctx->state != MFCINST_FINISHING) {
  1367. mfc_debug(2, "no src buffers.\n");
  1368. spin_unlock_irqrestore(&dev->irqlock, flags);
  1369. return -EAGAIN;
  1370. }
  1371. if (list_empty(&ctx->dst_queue)) {
  1372. mfc_debug(2, "no dst buffers.\n");
  1373. spin_unlock_irqrestore(&dev->irqlock, flags);
  1374. return -EAGAIN;
  1375. }
  1376. if (list_empty(&ctx->src_queue)) {
  1377. /* send null frame */
  1378. s5p_mfc_set_enc_frame_buffer_v6(ctx, 0, 0);
  1379. src_mb = NULL;
  1380. } else {
  1381. src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1382. src_mb->flags |= MFC_BUF_FLAG_USED;
  1383. if (src_mb->b->vb2_buf.planes[0].bytesused == 0) {
  1384. s5p_mfc_set_enc_frame_buffer_v6(ctx, 0, 0);
  1385. ctx->state = MFCINST_FINISHING;
  1386. } else {
  1387. src_y_addr = vb2_dma_contig_plane_dma_addr(&src_mb->b->vb2_buf, 0);
  1388. src_c_addr = vb2_dma_contig_plane_dma_addr(&src_mb->b->vb2_buf, 1);
  1389. mfc_debug(2, "enc src y addr: 0x%08lx\n", src_y_addr);
  1390. mfc_debug(2, "enc src c addr: 0x%08lx\n", src_c_addr);
  1391. s5p_mfc_set_enc_frame_buffer_v6(ctx, src_y_addr, src_c_addr);
  1392. if (src_mb->flags & MFC_BUF_FLAG_EOS)
  1393. ctx->state = MFCINST_FINISHING;
  1394. }
  1395. }
  1396. dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
  1397. dst_mb->flags |= MFC_BUF_FLAG_USED;
  1398. dst_addr = vb2_dma_contig_plane_dma_addr(&dst_mb->b->vb2_buf, 0);
  1399. dst_size = vb2_plane_size(&dst_mb->b->vb2_buf, 0);
  1400. s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
  1401. spin_unlock_irqrestore(&dev->irqlock, flags);
  1402. dev->curr_ctx = ctx->num;
  1403. s5p_mfc_encode_one_frame_v6(ctx);
  1404. return 0;
  1405. }
  1406. static inline void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
  1407. {
  1408. struct s5p_mfc_dev *dev = ctx->dev;
  1409. unsigned long flags;
  1410. struct s5p_mfc_buf *temp_vb;
  1411. /* Initializing decoding - parsing header */
  1412. spin_lock_irqsave(&dev->irqlock, flags);
  1413. mfc_debug(2, "Preparing to init decoding.\n");
  1414. temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1415. mfc_debug(2, "Header size: %d\n", temp_vb->b->vb2_buf.planes[0].bytesused);
  1416. s5p_mfc_set_dec_stream_buffer_v6(ctx,
  1417. vb2_dma_contig_plane_dma_addr(&temp_vb->b->vb2_buf, 0), 0,
  1418. temp_vb->b->vb2_buf.planes[0].bytesused);
  1419. spin_unlock_irqrestore(&dev->irqlock, flags);
  1420. dev->curr_ctx = ctx->num;
  1421. s5p_mfc_init_decode_v6(ctx);
  1422. }
  1423. static inline void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
  1424. {
  1425. struct s5p_mfc_dev *dev = ctx->dev;
  1426. unsigned long flags;
  1427. struct s5p_mfc_buf *dst_mb;
  1428. unsigned long dst_addr;
  1429. unsigned int dst_size;
  1430. spin_lock_irqsave(&dev->irqlock, flags);
  1431. dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
  1432. dst_addr = vb2_dma_contig_plane_dma_addr(&dst_mb->b->vb2_buf, 0);
  1433. dst_size = vb2_plane_size(&dst_mb->b->vb2_buf, 0);
  1434. s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
  1435. spin_unlock_irqrestore(&dev->irqlock, flags);
  1436. dev->curr_ctx = ctx->num;
  1437. s5p_mfc_init_encode_v6(ctx);
  1438. }
  1439. static inline int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
  1440. {
  1441. struct s5p_mfc_dev *dev = ctx->dev;
  1442. int ret;
  1443. /* Header was parsed now start processing
  1444. * First set the output frame buffers
  1445. * s5p_mfc_alloc_dec_buffers(ctx); */
  1446. if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
  1447. mfc_err("It seems that not all destionation buffers were\n"
  1448. "mmaped.MFC requires that all destination are mmaped\n"
  1449. "before starting processing.\n");
  1450. return -EAGAIN;
  1451. }
  1452. dev->curr_ctx = ctx->num;
  1453. ret = s5p_mfc_set_dec_frame_buffer_v6(ctx);
  1454. if (ret) {
  1455. mfc_err("Failed to alloc frame mem.\n");
  1456. ctx->state = MFCINST_ERROR;
  1457. }
  1458. return ret;
  1459. }
  1460. static inline int s5p_mfc_run_init_enc_buffers(struct s5p_mfc_ctx *ctx)
  1461. {
  1462. struct s5p_mfc_dev *dev = ctx->dev;
  1463. int ret;
  1464. dev->curr_ctx = ctx->num;
  1465. ret = s5p_mfc_set_enc_ref_buffer_v6(ctx);
  1466. if (ret) {
  1467. mfc_err("Failed to alloc frame mem.\n");
  1468. ctx->state = MFCINST_ERROR;
  1469. }
  1470. return ret;
  1471. }
  1472. /* Try running an operation on hardware */
  1473. static void s5p_mfc_try_run_v6(struct s5p_mfc_dev *dev)
  1474. {
  1475. struct s5p_mfc_ctx *ctx;
  1476. int new_ctx;
  1477. unsigned int ret = 0;
  1478. mfc_debug(1, "Try run dev: %p\n", dev);
  1479. /* Check whether hardware is not running */
  1480. if (test_and_set_bit(0, &dev->hw_lock) != 0) {
  1481. /* This is perfectly ok, the scheduled ctx should wait */
  1482. mfc_debug(1, "Couldn't lock HW.\n");
  1483. return;
  1484. }
  1485. /* Choose the context to run */
  1486. new_ctx = s5p_mfc_get_new_ctx(dev);
  1487. if (new_ctx < 0) {
  1488. /* No contexts to run */
  1489. if (test_and_clear_bit(0, &dev->hw_lock) == 0) {
  1490. mfc_err("Failed to unlock hardware.\n");
  1491. return;
  1492. }
  1493. mfc_debug(1, "No ctx is scheduled to be run.\n");
  1494. return;
  1495. }
  1496. mfc_debug(1, "New context: %d\n", new_ctx);
  1497. ctx = dev->ctx[new_ctx];
  1498. mfc_debug(1, "Setting new context to %p\n", ctx);
  1499. /* Got context to run in ctx */
  1500. mfc_debug(1, "ctx->dst_queue_cnt=%d ctx->dpb_count=%d ctx->src_queue_cnt=%d\n",
  1501. ctx->dst_queue_cnt, ctx->pb_count, ctx->src_queue_cnt);
  1502. mfc_debug(1, "ctx->state=%d\n", ctx->state);
  1503. /* Last frame has already been sent to MFC
  1504. * Now obtaining frames from MFC buffer */
  1505. s5p_mfc_clock_on();
  1506. s5p_mfc_clean_ctx_int_flags(ctx);
  1507. if (ctx->type == MFCINST_DECODER) {
  1508. switch (ctx->state) {
  1509. case MFCINST_FINISHING:
  1510. s5p_mfc_run_dec_last_frames(ctx);
  1511. break;
  1512. case MFCINST_RUNNING:
  1513. ret = s5p_mfc_run_dec_frame(ctx);
  1514. break;
  1515. case MFCINST_INIT:
  1516. ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
  1517. ctx);
  1518. break;
  1519. case MFCINST_RETURN_INST:
  1520. ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
  1521. ctx);
  1522. break;
  1523. case MFCINST_GOT_INST:
  1524. s5p_mfc_run_init_dec(ctx);
  1525. break;
  1526. case MFCINST_HEAD_PARSED:
  1527. ret = s5p_mfc_run_init_dec_buffers(ctx);
  1528. break;
  1529. case MFCINST_FLUSH:
  1530. s5p_mfc_set_flush(ctx, ctx->dpb_flush_flag);
  1531. break;
  1532. case MFCINST_RES_CHANGE_INIT:
  1533. s5p_mfc_run_dec_last_frames(ctx);
  1534. break;
  1535. case MFCINST_RES_CHANGE_FLUSH:
  1536. s5p_mfc_run_dec_last_frames(ctx);
  1537. break;
  1538. case MFCINST_RES_CHANGE_END:
  1539. mfc_debug(2, "Finished remaining frames after resolution change.\n");
  1540. ctx->capture_state = QUEUE_FREE;
  1541. mfc_debug(2, "Will re-init the codec`.\n");
  1542. s5p_mfc_run_init_dec(ctx);
  1543. break;
  1544. default:
  1545. ret = -EAGAIN;
  1546. }
  1547. } else if (ctx->type == MFCINST_ENCODER) {
  1548. switch (ctx->state) {
  1549. case MFCINST_FINISHING:
  1550. case MFCINST_RUNNING:
  1551. ret = s5p_mfc_run_enc_frame(ctx);
  1552. break;
  1553. case MFCINST_INIT:
  1554. ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
  1555. ctx);
  1556. break;
  1557. case MFCINST_RETURN_INST:
  1558. ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
  1559. ctx);
  1560. break;
  1561. case MFCINST_GOT_INST:
  1562. s5p_mfc_run_init_enc(ctx);
  1563. break;
  1564. case MFCINST_HEAD_PRODUCED:
  1565. ret = s5p_mfc_run_init_enc_buffers(ctx);
  1566. break;
  1567. default:
  1568. ret = -EAGAIN;
  1569. }
  1570. } else {
  1571. mfc_err("invalid context type: %d\n", ctx->type);
  1572. ret = -EAGAIN;
  1573. }
  1574. if (ret) {
  1575. /* Free hardware lock */
  1576. if (test_and_clear_bit(0, &dev->hw_lock) == 0)
  1577. mfc_err("Failed to unlock hardware.\n");
  1578. /* This is in deed imporant, as no operation has been
  1579. * scheduled, reduce the clock count as no one will
  1580. * ever do this, because no interrupt related to this try_run
  1581. * will ever come from hardware. */
  1582. s5p_mfc_clock_off();
  1583. }
  1584. }
  1585. static void s5p_mfc_cleanup_queue_v6(struct list_head *lh, struct vb2_queue *vq)
  1586. {
  1587. struct s5p_mfc_buf *b;
  1588. int i;
  1589. while (!list_empty(lh)) {
  1590. b = list_entry(lh->next, struct s5p_mfc_buf, list);
  1591. for (i = 0; i < b->b->vb2_buf.num_planes; i++)
  1592. vb2_set_plane_payload(&b->b->vb2_buf, i, 0);
  1593. vb2_buffer_done(&b->b->vb2_buf, VB2_BUF_STATE_ERROR);
  1594. list_del(&b->list);
  1595. }
  1596. }
  1597. static void s5p_mfc_clear_int_flags_v6(struct s5p_mfc_dev *dev)
  1598. {
  1599. const struct s5p_mfc_regs *mfc_regs = dev->mfc_regs;
  1600. writel(0, mfc_regs->risc2host_command);
  1601. writel(0, mfc_regs->risc2host_int);
  1602. }
  1603. static void s5p_mfc_write_info_v6(struct s5p_mfc_ctx *ctx, unsigned int data,
  1604. unsigned int ofs)
  1605. {
  1606. s5p_mfc_clock_on();
  1607. writel(data, (void __iomem *)((unsigned long)ofs));
  1608. s5p_mfc_clock_off();
  1609. }
  1610. static unsigned int
  1611. s5p_mfc_read_info_v6(struct s5p_mfc_ctx *ctx, unsigned long ofs)
  1612. {
  1613. int ret;
  1614. s5p_mfc_clock_on();
  1615. ret = readl((void __iomem *)ofs);
  1616. s5p_mfc_clock_off();
  1617. return ret;
  1618. }
  1619. static int s5p_mfc_get_dspl_y_adr_v6(struct s5p_mfc_dev *dev)
  1620. {
  1621. return readl(dev->mfc_regs->d_display_first_plane_addr);
  1622. }
  1623. static int s5p_mfc_get_dec_y_adr_v6(struct s5p_mfc_dev *dev)
  1624. {
  1625. return readl(dev->mfc_regs->d_decoded_first_plane_addr);
  1626. }
  1627. static int s5p_mfc_get_dspl_status_v6(struct s5p_mfc_dev *dev)
  1628. {
  1629. return readl(dev->mfc_regs->d_display_status);
  1630. }
  1631. static int s5p_mfc_get_dec_status_v6(struct s5p_mfc_dev *dev)
  1632. {
  1633. return readl(dev->mfc_regs->d_decoded_status);
  1634. }
  1635. static int s5p_mfc_get_dec_frame_type_v6(struct s5p_mfc_dev *dev)
  1636. {
  1637. return readl(dev->mfc_regs->d_decoded_frame_type) &
  1638. S5P_FIMV_DECODE_FRAME_MASK_V6;
  1639. }
  1640. static int s5p_mfc_get_disp_frame_type_v6(struct s5p_mfc_ctx *ctx)
  1641. {
  1642. struct s5p_mfc_dev *dev = ctx->dev;
  1643. return readl(dev->mfc_regs->d_display_frame_type) &
  1644. S5P_FIMV_DECODE_FRAME_MASK_V6;
  1645. }
  1646. static int s5p_mfc_get_consumed_stream_v6(struct s5p_mfc_dev *dev)
  1647. {
  1648. return readl(dev->mfc_regs->d_decoded_nal_size);
  1649. }
  1650. static int s5p_mfc_get_int_reason_v6(struct s5p_mfc_dev *dev)
  1651. {
  1652. return readl(dev->mfc_regs->risc2host_command) &
  1653. S5P_FIMV_RISC2HOST_CMD_MASK;
  1654. }
  1655. static int s5p_mfc_get_int_err_v6(struct s5p_mfc_dev *dev)
  1656. {
  1657. return readl(dev->mfc_regs->error_code);
  1658. }
  1659. static int s5p_mfc_err_dec_v6(unsigned int err)
  1660. {
  1661. return (err & S5P_FIMV_ERR_DEC_MASK_V6) >> S5P_FIMV_ERR_DEC_SHIFT_V6;
  1662. }
  1663. static int s5p_mfc_err_dspl_v6(unsigned int err)
  1664. {
  1665. return (err & S5P_FIMV_ERR_DSPL_MASK_V6) >> S5P_FIMV_ERR_DSPL_SHIFT_V6;
  1666. }
  1667. static int s5p_mfc_get_img_width_v6(struct s5p_mfc_dev *dev)
  1668. {
  1669. return readl(dev->mfc_regs->d_display_frame_width);
  1670. }
  1671. static int s5p_mfc_get_img_height_v6(struct s5p_mfc_dev *dev)
  1672. {
  1673. return readl(dev->mfc_regs->d_display_frame_height);
  1674. }
  1675. static int s5p_mfc_get_dpb_count_v6(struct s5p_mfc_dev *dev)
  1676. {
  1677. return readl(dev->mfc_regs->d_min_num_dpb);
  1678. }
  1679. static int s5p_mfc_get_mv_count_v6(struct s5p_mfc_dev *dev)
  1680. {
  1681. return readl(dev->mfc_regs->d_min_num_mv);
  1682. }
  1683. static int s5p_mfc_get_inst_no_v6(struct s5p_mfc_dev *dev)
  1684. {
  1685. return readl(dev->mfc_regs->ret_instance_id);
  1686. }
  1687. static int s5p_mfc_get_enc_dpb_count_v6(struct s5p_mfc_dev *dev)
  1688. {
  1689. return readl(dev->mfc_regs->e_num_dpb);
  1690. }
  1691. static int s5p_mfc_get_enc_strm_size_v6(struct s5p_mfc_dev *dev)
  1692. {
  1693. return readl(dev->mfc_regs->e_stream_size);
  1694. }
  1695. static int s5p_mfc_get_enc_slice_type_v6(struct s5p_mfc_dev *dev)
  1696. {
  1697. return readl(dev->mfc_regs->e_slice_type);
  1698. }
  1699. static int s5p_mfc_get_enc_pic_count_v6(struct s5p_mfc_dev *dev)
  1700. {
  1701. return readl(dev->mfc_regs->e_picture_count);
  1702. }
  1703. static int s5p_mfc_get_sei_avail_status_v6(struct s5p_mfc_ctx *ctx)
  1704. {
  1705. struct s5p_mfc_dev *dev = ctx->dev;
  1706. return readl(dev->mfc_regs->d_frame_pack_sei_avail);
  1707. }
  1708. static int s5p_mfc_get_mvc_num_views_v6(struct s5p_mfc_dev *dev)
  1709. {
  1710. return readl(dev->mfc_regs->d_mvc_num_views);
  1711. }
  1712. static int s5p_mfc_get_mvc_view_id_v6(struct s5p_mfc_dev *dev)
  1713. {
  1714. return readl(dev->mfc_regs->d_mvc_view_id);
  1715. }
  1716. static unsigned int s5p_mfc_get_pic_type_top_v6(struct s5p_mfc_ctx *ctx)
  1717. {
  1718. return s5p_mfc_read_info_v6(ctx,
  1719. (__force unsigned long) ctx->dev->mfc_regs->d_ret_picture_tag_top);
  1720. }
  1721. static unsigned int s5p_mfc_get_pic_type_bot_v6(struct s5p_mfc_ctx *ctx)
  1722. {
  1723. return s5p_mfc_read_info_v6(ctx,
  1724. (__force unsigned long) ctx->dev->mfc_regs->d_ret_picture_tag_bot);
  1725. }
  1726. static unsigned int s5p_mfc_get_crop_info_h_v6(struct s5p_mfc_ctx *ctx)
  1727. {
  1728. return s5p_mfc_read_info_v6(ctx,
  1729. (__force unsigned long) ctx->dev->mfc_regs->d_display_crop_info1);
  1730. }
  1731. static unsigned int s5p_mfc_get_crop_info_v_v6(struct s5p_mfc_ctx *ctx)
  1732. {
  1733. return s5p_mfc_read_info_v6(ctx,
  1734. (__force unsigned long) ctx->dev->mfc_regs->d_display_crop_info2);
  1735. }
  1736. static struct s5p_mfc_regs mfc_regs;
  1737. /* Initialize registers for MFC v6 onwards */
  1738. const struct s5p_mfc_regs *s5p_mfc_init_regs_v6_plus(struct s5p_mfc_dev *dev)
  1739. {
  1740. memset(&mfc_regs, 0, sizeof(mfc_regs));
  1741. #define S5P_MFC_REG_ADDR(dev, reg) ((dev)->regs_base + (reg))
  1742. #define R(m, r) mfc_regs.m = S5P_MFC_REG_ADDR(dev, r)
  1743. /* codec common registers */
  1744. R(risc_on, S5P_FIMV_RISC_ON_V6);
  1745. R(risc2host_int, S5P_FIMV_RISC2HOST_INT_V6);
  1746. R(host2risc_int, S5P_FIMV_HOST2RISC_INT_V6);
  1747. R(risc_base_address, S5P_FIMV_RISC_BASE_ADDRESS_V6);
  1748. R(mfc_reset, S5P_FIMV_MFC_RESET_V6);
  1749. R(host2risc_command, S5P_FIMV_HOST2RISC_CMD_V6);
  1750. R(risc2host_command, S5P_FIMV_RISC2HOST_CMD_V6);
  1751. R(firmware_version, S5P_FIMV_FW_VERSION_V6);
  1752. R(instance_id, S5P_FIMV_INSTANCE_ID_V6);
  1753. R(codec_type, S5P_FIMV_CODEC_TYPE_V6);
  1754. R(context_mem_addr, S5P_FIMV_CONTEXT_MEM_ADDR_V6);
  1755. R(context_mem_size, S5P_FIMV_CONTEXT_MEM_SIZE_V6);
  1756. R(pixel_format, S5P_FIMV_PIXEL_FORMAT_V6);
  1757. R(ret_instance_id, S5P_FIMV_RET_INSTANCE_ID_V6);
  1758. R(error_code, S5P_FIMV_ERROR_CODE_V6);
  1759. /* decoder registers */
  1760. R(d_crc_ctrl, S5P_FIMV_D_CRC_CTRL_V6);
  1761. R(d_dec_options, S5P_FIMV_D_DEC_OPTIONS_V6);
  1762. R(d_display_delay, S5P_FIMV_D_DISPLAY_DELAY_V6);
  1763. R(d_sei_enable, S5P_FIMV_D_SEI_ENABLE_V6);
  1764. R(d_min_num_dpb, S5P_FIMV_D_MIN_NUM_DPB_V6);
  1765. R(d_min_num_mv, S5P_FIMV_D_MIN_NUM_MV_V6);
  1766. R(d_mvc_num_views, S5P_FIMV_D_MVC_NUM_VIEWS_V6);
  1767. R(d_num_dpb, S5P_FIMV_D_NUM_DPB_V6);
  1768. R(d_num_mv, S5P_FIMV_D_NUM_MV_V6);
  1769. R(d_init_buffer_options, S5P_FIMV_D_INIT_BUFFER_OPTIONS_V6);
  1770. R(d_first_plane_dpb_size, S5P_FIMV_D_LUMA_DPB_SIZE_V6);
  1771. R(d_second_plane_dpb_size, S5P_FIMV_D_CHROMA_DPB_SIZE_V6);
  1772. R(d_mv_buffer_size, S5P_FIMV_D_MV_BUFFER_SIZE_V6);
  1773. R(d_first_plane_dpb, S5P_FIMV_D_LUMA_DPB_V6);
  1774. R(d_second_plane_dpb, S5P_FIMV_D_CHROMA_DPB_V6);
  1775. R(d_mv_buffer, S5P_FIMV_D_MV_BUFFER_V6);
  1776. R(d_scratch_buffer_addr, S5P_FIMV_D_SCRATCH_BUFFER_ADDR_V6);
  1777. R(d_scratch_buffer_size, S5P_FIMV_D_SCRATCH_BUFFER_SIZE_V6);
  1778. R(d_cpb_buffer_addr, S5P_FIMV_D_CPB_BUFFER_ADDR_V6);
  1779. R(d_cpb_buffer_size, S5P_FIMV_D_CPB_BUFFER_SIZE_V6);
  1780. R(d_available_dpb_flag_lower, S5P_FIMV_D_AVAILABLE_DPB_FLAG_LOWER_V6);
  1781. R(d_cpb_buffer_offset, S5P_FIMV_D_CPB_BUFFER_OFFSET_V6);
  1782. R(d_slice_if_enable, S5P_FIMV_D_SLICE_IF_ENABLE_V6);
  1783. R(d_stream_data_size, S5P_FIMV_D_STREAM_DATA_SIZE_V6);
  1784. R(d_display_frame_width, S5P_FIMV_D_DISPLAY_FRAME_WIDTH_V6);
  1785. R(d_display_frame_height, S5P_FIMV_D_DISPLAY_FRAME_HEIGHT_V6);
  1786. R(d_display_status, S5P_FIMV_D_DISPLAY_STATUS_V6);
  1787. R(d_display_first_plane_addr, S5P_FIMV_D_DISPLAY_LUMA_ADDR_V6);
  1788. R(d_display_second_plane_addr, S5P_FIMV_D_DISPLAY_CHROMA_ADDR_V6);
  1789. R(d_display_frame_type, S5P_FIMV_D_DISPLAY_FRAME_TYPE_V6);
  1790. R(d_display_crop_info1, S5P_FIMV_D_DISPLAY_CROP_INFO1_V6);
  1791. R(d_display_crop_info2, S5P_FIMV_D_DISPLAY_CROP_INFO2_V6);
  1792. R(d_display_aspect_ratio, S5P_FIMV_D_DISPLAY_ASPECT_RATIO_V6);
  1793. R(d_display_extended_ar, S5P_FIMV_D_DISPLAY_EXTENDED_AR_V6);
  1794. R(d_decoded_status, S5P_FIMV_D_DECODED_STATUS_V6);
  1795. R(d_decoded_first_plane_addr, S5P_FIMV_D_DECODED_LUMA_ADDR_V6);
  1796. R(d_decoded_second_plane_addr, S5P_FIMV_D_DECODED_CHROMA_ADDR_V6);
  1797. R(d_decoded_frame_type, S5P_FIMV_D_DECODED_FRAME_TYPE_V6);
  1798. R(d_decoded_nal_size, S5P_FIMV_D_DECODED_NAL_SIZE_V6);
  1799. R(d_ret_picture_tag_top, S5P_FIMV_D_RET_PICTURE_TAG_TOP_V6);
  1800. R(d_ret_picture_tag_bot, S5P_FIMV_D_RET_PICTURE_TAG_BOT_V6);
  1801. R(d_h264_info, S5P_FIMV_D_H264_INFO_V6);
  1802. R(d_mvc_view_id, S5P_FIMV_D_MVC_VIEW_ID_V6);
  1803. R(d_frame_pack_sei_avail, S5P_FIMV_D_FRAME_PACK_SEI_AVAIL_V6);
  1804. /* encoder registers */
  1805. R(e_frame_width, S5P_FIMV_E_FRAME_WIDTH_V6);
  1806. R(e_frame_height, S5P_FIMV_E_FRAME_HEIGHT_V6);
  1807. R(e_cropped_frame_width, S5P_FIMV_E_CROPPED_FRAME_WIDTH_V6);
  1808. R(e_cropped_frame_height, S5P_FIMV_E_CROPPED_FRAME_HEIGHT_V6);
  1809. R(e_frame_crop_offset, S5P_FIMV_E_FRAME_CROP_OFFSET_V6);
  1810. R(e_enc_options, S5P_FIMV_E_ENC_OPTIONS_V6);
  1811. R(e_picture_profile, S5P_FIMV_E_PICTURE_PROFILE_V6);
  1812. R(e_vbv_buffer_size, S5P_FIMV_E_VBV_BUFFER_SIZE_V6);
  1813. R(e_vbv_init_delay, S5P_FIMV_E_VBV_INIT_DELAY_V6);
  1814. R(e_fixed_picture_qp, S5P_FIMV_E_FIXED_PICTURE_QP_V6);
  1815. R(e_rc_config, S5P_FIMV_E_RC_CONFIG_V6);
  1816. R(e_rc_qp_bound, S5P_FIMV_E_RC_QP_BOUND_V6);
  1817. R(e_rc_mode, S5P_FIMV_E_RC_RPARAM_V6);
  1818. R(e_mb_rc_config, S5P_FIMV_E_MB_RC_CONFIG_V6);
  1819. R(e_padding_ctrl, S5P_FIMV_E_PADDING_CTRL_V6);
  1820. R(e_mv_hor_range, S5P_FIMV_E_MV_HOR_RANGE_V6);
  1821. R(e_mv_ver_range, S5P_FIMV_E_MV_VER_RANGE_V6);
  1822. R(e_num_dpb, S5P_FIMV_E_NUM_DPB_V6);
  1823. R(e_luma_dpb, S5P_FIMV_E_LUMA_DPB_V6);
  1824. R(e_chroma_dpb, S5P_FIMV_E_CHROMA_DPB_V6);
  1825. R(e_me_buffer, S5P_FIMV_E_ME_BUFFER_V6);
  1826. R(e_scratch_buffer_addr, S5P_FIMV_E_SCRATCH_BUFFER_ADDR_V6);
  1827. R(e_scratch_buffer_size, S5P_FIMV_E_SCRATCH_BUFFER_SIZE_V6);
  1828. R(e_tmv_buffer0, S5P_FIMV_E_TMV_BUFFER0_V6);
  1829. R(e_tmv_buffer1, S5P_FIMV_E_TMV_BUFFER1_V6);
  1830. R(e_source_first_plane_addr, S5P_FIMV_E_SOURCE_LUMA_ADDR_V6);
  1831. R(e_source_second_plane_addr, S5P_FIMV_E_SOURCE_CHROMA_ADDR_V6);
  1832. R(e_stream_buffer_addr, S5P_FIMV_E_STREAM_BUFFER_ADDR_V6);
  1833. R(e_stream_buffer_size, S5P_FIMV_E_STREAM_BUFFER_SIZE_V6);
  1834. R(e_roi_buffer_addr, S5P_FIMV_E_ROI_BUFFER_ADDR_V6);
  1835. R(e_param_change, S5P_FIMV_E_PARAM_CHANGE_V6);
  1836. R(e_ir_size, S5P_FIMV_E_IR_SIZE_V6);
  1837. R(e_gop_config, S5P_FIMV_E_GOP_CONFIG_V6);
  1838. R(e_mslice_mode, S5P_FIMV_E_MSLICE_MODE_V6);
  1839. R(e_mslice_size_mb, S5P_FIMV_E_MSLICE_SIZE_MB_V6);
  1840. R(e_mslice_size_bits, S5P_FIMV_E_MSLICE_SIZE_BITS_V6);
  1841. R(e_frame_insertion, S5P_FIMV_E_FRAME_INSERTION_V6);
  1842. R(e_rc_frame_rate, S5P_FIMV_E_RC_FRAME_RATE_V6);
  1843. R(e_rc_bit_rate, S5P_FIMV_E_RC_BIT_RATE_V6);
  1844. R(e_rc_roi_ctrl, S5P_FIMV_E_RC_ROI_CTRL_V6);
  1845. R(e_picture_tag, S5P_FIMV_E_PICTURE_TAG_V6);
  1846. R(e_bit_count_enable, S5P_FIMV_E_BIT_COUNT_ENABLE_V6);
  1847. R(e_max_bit_count, S5P_FIMV_E_MAX_BIT_COUNT_V6);
  1848. R(e_min_bit_count, S5P_FIMV_E_MIN_BIT_COUNT_V6);
  1849. R(e_metadata_buffer_addr, S5P_FIMV_E_METADATA_BUFFER_ADDR_V6);
  1850. R(e_metadata_buffer_size, S5P_FIMV_E_METADATA_BUFFER_SIZE_V6);
  1851. R(e_encoded_source_first_plane_addr,
  1852. S5P_FIMV_E_ENCODED_SOURCE_LUMA_ADDR_V6);
  1853. R(e_encoded_source_second_plane_addr,
  1854. S5P_FIMV_E_ENCODED_SOURCE_CHROMA_ADDR_V6);
  1855. R(e_stream_size, S5P_FIMV_E_STREAM_SIZE_V6);
  1856. R(e_slice_type, S5P_FIMV_E_SLICE_TYPE_V6);
  1857. R(e_picture_count, S5P_FIMV_E_PICTURE_COUNT_V6);
  1858. R(e_ret_picture_tag, S5P_FIMV_E_RET_PICTURE_TAG_V6);
  1859. R(e_recon_luma_dpb_addr, S5P_FIMV_E_RECON_LUMA_DPB_ADDR_V6);
  1860. R(e_recon_chroma_dpb_addr, S5P_FIMV_E_RECON_CHROMA_DPB_ADDR_V6);
  1861. R(e_mpeg4_options, S5P_FIMV_E_MPEG4_OPTIONS_V6);
  1862. R(e_mpeg4_hec_period, S5P_FIMV_E_MPEG4_HEC_PERIOD_V6);
  1863. R(e_aspect_ratio, S5P_FIMV_E_ASPECT_RATIO_V6);
  1864. R(e_extended_sar, S5P_FIMV_E_EXTENDED_SAR_V6);
  1865. R(e_h264_options, S5P_FIMV_E_H264_OPTIONS_V6);
  1866. R(e_h264_lf_alpha_offset, S5P_FIMV_E_H264_LF_ALPHA_OFFSET_V6);
  1867. R(e_h264_lf_beta_offset, S5P_FIMV_E_H264_LF_BETA_OFFSET_V6);
  1868. R(e_h264_i_period, S5P_FIMV_E_H264_I_PERIOD_V6);
  1869. R(e_h264_fmo_slice_grp_map_type,
  1870. S5P_FIMV_E_H264_FMO_SLICE_GRP_MAP_TYPE_V6);
  1871. R(e_h264_fmo_num_slice_grp_minus1,
  1872. S5P_FIMV_E_H264_FMO_NUM_SLICE_GRP_MINUS1_V6);
  1873. R(e_h264_fmo_slice_grp_change_dir,
  1874. S5P_FIMV_E_H264_FMO_SLICE_GRP_CHANGE_DIR_V6);
  1875. R(e_h264_fmo_slice_grp_change_rate_minus1,
  1876. S5P_FIMV_E_H264_FMO_SLICE_GRP_CHANGE_RATE_MINUS1_V6);
  1877. R(e_h264_fmo_run_length_minus1_0,
  1878. S5P_FIMV_E_H264_FMO_RUN_LENGTH_MINUS1_0_V6);
  1879. R(e_h264_aso_slice_order_0, S5P_FIMV_E_H264_ASO_SLICE_ORDER_0_V6);
  1880. R(e_h264_num_t_layer, S5P_FIMV_E_H264_NUM_T_LAYER_V6);
  1881. R(e_h264_hierarchical_qp_layer0,
  1882. S5P_FIMV_E_H264_HIERARCHICAL_QP_LAYER0_V6);
  1883. R(e_h264_frame_packing_sei_info,
  1884. S5P_FIMV_E_H264_FRAME_PACKING_SEI_INFO_V6);
  1885. if (!IS_MFCV7_PLUS(dev))
  1886. goto done;
  1887. /* Initialize registers used in MFC v7+ */
  1888. R(e_source_first_plane_addr, S5P_FIMV_E_SOURCE_FIRST_ADDR_V7);
  1889. R(e_source_second_plane_addr, S5P_FIMV_E_SOURCE_SECOND_ADDR_V7);
  1890. R(e_source_third_plane_addr, S5P_FIMV_E_SOURCE_THIRD_ADDR_V7);
  1891. R(e_source_first_plane_stride, S5P_FIMV_E_SOURCE_FIRST_STRIDE_V7);
  1892. R(e_source_second_plane_stride, S5P_FIMV_E_SOURCE_SECOND_STRIDE_V7);
  1893. R(e_source_third_plane_stride, S5P_FIMV_E_SOURCE_THIRD_STRIDE_V7);
  1894. R(e_encoded_source_first_plane_addr,
  1895. S5P_FIMV_E_ENCODED_SOURCE_FIRST_ADDR_V7);
  1896. R(e_encoded_source_second_plane_addr,
  1897. S5P_FIMV_E_ENCODED_SOURCE_SECOND_ADDR_V7);
  1898. R(e_vp8_options, S5P_FIMV_E_VP8_OPTIONS_V7);
  1899. if (!IS_MFCV8(dev))
  1900. goto done;
  1901. /* Initialize registers used in MFC v8 only.
  1902. * Also, over-write the registers which have
  1903. * a different offset for MFC v8. */
  1904. R(d_stream_data_size, S5P_FIMV_D_STREAM_DATA_SIZE_V8);
  1905. R(d_cpb_buffer_addr, S5P_FIMV_D_CPB_BUFFER_ADDR_V8);
  1906. R(d_cpb_buffer_size, S5P_FIMV_D_CPB_BUFFER_SIZE_V8);
  1907. R(d_cpb_buffer_offset, S5P_FIMV_D_CPB_BUFFER_OFFSET_V8);
  1908. R(d_first_plane_dpb_size, S5P_FIMV_D_FIRST_PLANE_DPB_SIZE_V8);
  1909. R(d_second_plane_dpb_size, S5P_FIMV_D_SECOND_PLANE_DPB_SIZE_V8);
  1910. R(d_scratch_buffer_addr, S5P_FIMV_D_SCRATCH_BUFFER_ADDR_V8);
  1911. R(d_scratch_buffer_size, S5P_FIMV_D_SCRATCH_BUFFER_SIZE_V8);
  1912. R(d_first_plane_dpb_stride_size,
  1913. S5P_FIMV_D_FIRST_PLANE_DPB_STRIDE_SIZE_V8);
  1914. R(d_second_plane_dpb_stride_size,
  1915. S5P_FIMV_D_SECOND_PLANE_DPB_STRIDE_SIZE_V8);
  1916. R(d_mv_buffer_size, S5P_FIMV_D_MV_BUFFER_SIZE_V8);
  1917. R(d_num_mv, S5P_FIMV_D_NUM_MV_V8);
  1918. R(d_first_plane_dpb, S5P_FIMV_D_FIRST_PLANE_DPB_V8);
  1919. R(d_second_plane_dpb, S5P_FIMV_D_SECOND_PLANE_DPB_V8);
  1920. R(d_mv_buffer, S5P_FIMV_D_MV_BUFFER_V8);
  1921. R(d_init_buffer_options, S5P_FIMV_D_INIT_BUFFER_OPTIONS_V8);
  1922. R(d_available_dpb_flag_lower, S5P_FIMV_D_AVAILABLE_DPB_FLAG_LOWER_V8);
  1923. R(d_slice_if_enable, S5P_FIMV_D_SLICE_IF_ENABLE_V8);
  1924. R(d_display_first_plane_addr, S5P_FIMV_D_DISPLAY_FIRST_PLANE_ADDR_V8);
  1925. R(d_display_second_plane_addr, S5P_FIMV_D_DISPLAY_SECOND_PLANE_ADDR_V8);
  1926. R(d_decoded_first_plane_addr, S5P_FIMV_D_DECODED_FIRST_PLANE_ADDR_V8);
  1927. R(d_decoded_second_plane_addr, S5P_FIMV_D_DECODED_SECOND_PLANE_ADDR_V8);
  1928. R(d_display_status, S5P_FIMV_D_DISPLAY_STATUS_V8);
  1929. R(d_decoded_status, S5P_FIMV_D_DECODED_STATUS_V8);
  1930. R(d_decoded_frame_type, S5P_FIMV_D_DECODED_FRAME_TYPE_V8);
  1931. R(d_display_frame_type, S5P_FIMV_D_DISPLAY_FRAME_TYPE_V8);
  1932. R(d_decoded_nal_size, S5P_FIMV_D_DECODED_NAL_SIZE_V8);
  1933. R(d_display_frame_width, S5P_FIMV_D_DISPLAY_FRAME_WIDTH_V8);
  1934. R(d_display_frame_height, S5P_FIMV_D_DISPLAY_FRAME_HEIGHT_V8);
  1935. R(d_frame_pack_sei_avail, S5P_FIMV_D_FRAME_PACK_SEI_AVAIL_V8);
  1936. R(d_mvc_num_views, S5P_FIMV_D_MVC_NUM_VIEWS_V8);
  1937. R(d_mvc_view_id, S5P_FIMV_D_MVC_VIEW_ID_V8);
  1938. R(d_ret_picture_tag_top, S5P_FIMV_D_RET_PICTURE_TAG_TOP_V8);
  1939. R(d_ret_picture_tag_bot, S5P_FIMV_D_RET_PICTURE_TAG_BOT_V8);
  1940. R(d_display_crop_info1, S5P_FIMV_D_DISPLAY_CROP_INFO1_V8);
  1941. R(d_display_crop_info2, S5P_FIMV_D_DISPLAY_CROP_INFO2_V8);
  1942. /* encoder registers */
  1943. R(e_padding_ctrl, S5P_FIMV_E_PADDING_CTRL_V8);
  1944. R(e_rc_config, S5P_FIMV_E_RC_CONFIG_V8);
  1945. R(e_rc_mode, S5P_FIMV_E_RC_RPARAM_V8);
  1946. R(e_mv_hor_range, S5P_FIMV_E_MV_HOR_RANGE_V8);
  1947. R(e_mv_ver_range, S5P_FIMV_E_MV_VER_RANGE_V8);
  1948. R(e_rc_qp_bound, S5P_FIMV_E_RC_QP_BOUND_V8);
  1949. R(e_fixed_picture_qp, S5P_FIMV_E_FIXED_PICTURE_QP_V8);
  1950. R(e_vbv_buffer_size, S5P_FIMV_E_VBV_BUFFER_SIZE_V8);
  1951. R(e_vbv_init_delay, S5P_FIMV_E_VBV_INIT_DELAY_V8);
  1952. R(e_mb_rc_config, S5P_FIMV_E_MB_RC_CONFIG_V8);
  1953. R(e_aspect_ratio, S5P_FIMV_E_ASPECT_RATIO_V8);
  1954. R(e_extended_sar, S5P_FIMV_E_EXTENDED_SAR_V8);
  1955. R(e_h264_options, S5P_FIMV_E_H264_OPTIONS_V8);
  1956. done:
  1957. return &mfc_regs;
  1958. #undef S5P_MFC_REG_ADDR
  1959. #undef R
  1960. }
  1961. /* Initialize opr function pointers for MFC v6 */
  1962. static struct s5p_mfc_hw_ops s5p_mfc_ops_v6 = {
  1963. .alloc_dec_temp_buffers = s5p_mfc_alloc_dec_temp_buffers_v6,
  1964. .release_dec_desc_buffer = s5p_mfc_release_dec_desc_buffer_v6,
  1965. .alloc_codec_buffers = s5p_mfc_alloc_codec_buffers_v6,
  1966. .release_codec_buffers = s5p_mfc_release_codec_buffers_v6,
  1967. .alloc_instance_buffer = s5p_mfc_alloc_instance_buffer_v6,
  1968. .release_instance_buffer = s5p_mfc_release_instance_buffer_v6,
  1969. .alloc_dev_context_buffer =
  1970. s5p_mfc_alloc_dev_context_buffer_v6,
  1971. .release_dev_context_buffer =
  1972. s5p_mfc_release_dev_context_buffer_v6,
  1973. .dec_calc_dpb_size = s5p_mfc_dec_calc_dpb_size_v6,
  1974. .enc_calc_src_size = s5p_mfc_enc_calc_src_size_v6,
  1975. .set_dec_stream_buffer = s5p_mfc_set_dec_stream_buffer_v6,
  1976. .set_dec_frame_buffer = s5p_mfc_set_dec_frame_buffer_v6,
  1977. .set_enc_stream_buffer = s5p_mfc_set_enc_stream_buffer_v6,
  1978. .set_enc_frame_buffer = s5p_mfc_set_enc_frame_buffer_v6,
  1979. .get_enc_frame_buffer = s5p_mfc_get_enc_frame_buffer_v6,
  1980. .set_enc_ref_buffer = s5p_mfc_set_enc_ref_buffer_v6,
  1981. .init_decode = s5p_mfc_init_decode_v6,
  1982. .init_encode = s5p_mfc_init_encode_v6,
  1983. .encode_one_frame = s5p_mfc_encode_one_frame_v6,
  1984. .try_run = s5p_mfc_try_run_v6,
  1985. .cleanup_queue = s5p_mfc_cleanup_queue_v6,
  1986. .clear_int_flags = s5p_mfc_clear_int_flags_v6,
  1987. .write_info = s5p_mfc_write_info_v6,
  1988. .read_info = s5p_mfc_read_info_v6,
  1989. .get_dspl_y_adr = s5p_mfc_get_dspl_y_adr_v6,
  1990. .get_dec_y_adr = s5p_mfc_get_dec_y_adr_v6,
  1991. .get_dspl_status = s5p_mfc_get_dspl_status_v6,
  1992. .get_dec_status = s5p_mfc_get_dec_status_v6,
  1993. .get_dec_frame_type = s5p_mfc_get_dec_frame_type_v6,
  1994. .get_disp_frame_type = s5p_mfc_get_disp_frame_type_v6,
  1995. .get_consumed_stream = s5p_mfc_get_consumed_stream_v6,
  1996. .get_int_reason = s5p_mfc_get_int_reason_v6,
  1997. .get_int_err = s5p_mfc_get_int_err_v6,
  1998. .err_dec = s5p_mfc_err_dec_v6,
  1999. .err_dspl = s5p_mfc_err_dspl_v6,
  2000. .get_img_width = s5p_mfc_get_img_width_v6,
  2001. .get_img_height = s5p_mfc_get_img_height_v6,
  2002. .get_dpb_count = s5p_mfc_get_dpb_count_v6,
  2003. .get_mv_count = s5p_mfc_get_mv_count_v6,
  2004. .get_inst_no = s5p_mfc_get_inst_no_v6,
  2005. .get_enc_strm_size = s5p_mfc_get_enc_strm_size_v6,
  2006. .get_enc_slice_type = s5p_mfc_get_enc_slice_type_v6,
  2007. .get_enc_dpb_count = s5p_mfc_get_enc_dpb_count_v6,
  2008. .get_enc_pic_count = s5p_mfc_get_enc_pic_count_v6,
  2009. .get_sei_avail_status = s5p_mfc_get_sei_avail_status_v6,
  2010. .get_mvc_num_views = s5p_mfc_get_mvc_num_views_v6,
  2011. .get_mvc_view_id = s5p_mfc_get_mvc_view_id_v6,
  2012. .get_pic_type_top = s5p_mfc_get_pic_type_top_v6,
  2013. .get_pic_type_bot = s5p_mfc_get_pic_type_bot_v6,
  2014. .get_crop_info_h = s5p_mfc_get_crop_info_h_v6,
  2015. .get_crop_info_v = s5p_mfc_get_crop_info_v_v6,
  2016. };
  2017. struct s5p_mfc_hw_ops *s5p_mfc_init_hw_ops_v6(void)
  2018. {
  2019. return &s5p_mfc_ops_v6;
  2020. }