atombios_encoders.c 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. static u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. static void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  87. if (dig->backlight_level == 0)
  88. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  89. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  90. else {
  91. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  92. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  93. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  94. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  95. }
  96. break;
  97. default:
  98. break;
  99. }
  100. }
  101. }
  102. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  103. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  104. {
  105. u8 level;
  106. /* Convert brightness to hardware level */
  107. if (bd->props.brightness < 0)
  108. level = 0;
  109. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  110. level = AMDGPU_MAX_BL_LEVEL;
  111. else
  112. level = bd->props.brightness;
  113. return level;
  114. }
  115. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  116. {
  117. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  118. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  119. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  120. amdgpu_atombios_encoder_backlight_level(bd));
  121. return 0;
  122. }
  123. static int
  124. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  125. {
  126. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  127. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  128. struct drm_device *dev = amdgpu_encoder->base.dev;
  129. struct amdgpu_device *adev = dev->dev_private;
  130. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  131. }
  132. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  133. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  134. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  135. };
  136. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  137. struct drm_connector *drm_connector)
  138. {
  139. struct drm_device *dev = amdgpu_encoder->base.dev;
  140. struct amdgpu_device *adev = dev->dev_private;
  141. struct backlight_device *bd;
  142. struct backlight_properties props;
  143. struct amdgpu_backlight_privdata *pdata;
  144. struct amdgpu_encoder_atom_dig *dig;
  145. u8 backlight_level;
  146. char bl_name[16];
  147. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  148. * so don't register a backlight device
  149. */
  150. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  151. (adev->pdev->device == 0x6741))
  152. return;
  153. if (!amdgpu_encoder->enc_priv)
  154. return;
  155. if (!adev->is_atom_bios)
  156. return;
  157. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  158. return;
  159. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  160. if (!pdata) {
  161. DRM_ERROR("Memory allocation failed\n");
  162. goto error;
  163. }
  164. memset(&props, 0, sizeof(props));
  165. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  166. props.type = BACKLIGHT_RAW;
  167. snprintf(bl_name, sizeof(bl_name),
  168. "amdgpu_bl%d", dev->primary->index);
  169. bd = backlight_device_register(bl_name, drm_connector->kdev,
  170. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  171. if (IS_ERR(bd)) {
  172. DRM_ERROR("Backlight registration failed\n");
  173. goto error;
  174. }
  175. pdata->encoder = amdgpu_encoder;
  176. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  177. dig = amdgpu_encoder->enc_priv;
  178. dig->bl_dev = bd;
  179. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  180. bd->props.power = FB_BLANK_UNBLANK;
  181. backlight_update_status(bd);
  182. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  183. return;
  184. error:
  185. kfree(pdata);
  186. return;
  187. }
  188. void
  189. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  190. {
  191. struct drm_device *dev = amdgpu_encoder->base.dev;
  192. struct amdgpu_device *adev = dev->dev_private;
  193. struct backlight_device *bd = NULL;
  194. struct amdgpu_encoder_atom_dig *dig;
  195. if (!amdgpu_encoder->enc_priv)
  196. return;
  197. if (!adev->is_atom_bios)
  198. return;
  199. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  200. return;
  201. dig = amdgpu_encoder->enc_priv;
  202. bd = dig->bl_dev;
  203. dig->bl_dev = NULL;
  204. if (bd) {
  205. struct amdgpu_legacy_backlight_privdata *pdata;
  206. pdata = bl_get_data(bd);
  207. backlight_device_unregister(bd);
  208. kfree(pdata);
  209. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  210. }
  211. }
  212. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  213. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  214. {
  215. }
  216. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  217. {
  218. }
  219. #endif
  220. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  221. {
  222. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  223. switch (amdgpu_encoder->encoder_id) {
  224. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  225. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  226. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  227. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  228. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  229. return true;
  230. default:
  231. return false;
  232. }
  233. }
  234. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  235. const struct drm_display_mode *mode,
  236. struct drm_display_mode *adjusted_mode)
  237. {
  238. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  239. /* set the active encoder to connector routing */
  240. amdgpu_encoder_set_active_device(encoder);
  241. drm_mode_set_crtcinfo(adjusted_mode, 0);
  242. /* hw bug */
  243. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  244. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  245. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  246. /* vertical FP must be at least 1 */
  247. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  248. adjusted_mode->crtc_vsync_start++;
  249. /* get the native mode for scaling */
  250. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  251. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  252. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  253. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  254. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  255. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  256. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  257. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  258. }
  259. return true;
  260. }
  261. static void
  262. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  263. {
  264. struct drm_device *dev = encoder->dev;
  265. struct amdgpu_device *adev = dev->dev_private;
  266. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  267. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  268. int index = 0;
  269. memset(&args, 0, sizeof(args));
  270. switch (amdgpu_encoder->encoder_id) {
  271. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  273. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  274. break;
  275. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  276. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  277. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  278. break;
  279. }
  280. args.ucAction = action;
  281. args.ucDacStandard = ATOM_DAC1_PS2;
  282. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  283. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  284. }
  285. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  286. {
  287. int bpc = 8;
  288. if (encoder->crtc) {
  289. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  290. bpc = amdgpu_crtc->bpc;
  291. }
  292. switch (bpc) {
  293. case 0:
  294. return PANEL_BPC_UNDEFINE;
  295. case 6:
  296. return PANEL_6BIT_PER_COLOR;
  297. case 8:
  298. default:
  299. return PANEL_8BIT_PER_COLOR;
  300. case 10:
  301. return PANEL_10BIT_PER_COLOR;
  302. case 12:
  303. return PANEL_12BIT_PER_COLOR;
  304. case 16:
  305. return PANEL_16BIT_PER_COLOR;
  306. }
  307. }
  308. union dvo_encoder_control {
  309. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  310. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  311. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  312. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  313. };
  314. static void
  315. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  316. {
  317. struct drm_device *dev = encoder->dev;
  318. struct amdgpu_device *adev = dev->dev_private;
  319. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  320. union dvo_encoder_control args;
  321. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  322. uint8_t frev, crev;
  323. memset(&args, 0, sizeof(args));
  324. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  325. return;
  326. switch (frev) {
  327. case 1:
  328. switch (crev) {
  329. case 1:
  330. /* R4xx, R5xx */
  331. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  332. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  333. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  334. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  335. break;
  336. case 2:
  337. /* RS600/690/740 */
  338. args.dvo.sDVOEncoder.ucAction = action;
  339. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  340. /* DFP1, CRT1, TV1 depending on the type of port */
  341. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  342. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  343. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  344. break;
  345. case 3:
  346. /* R6xx */
  347. args.dvo_v3.ucAction = action;
  348. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  349. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  350. break;
  351. case 4:
  352. /* DCE8 */
  353. args.dvo_v4.ucAction = action;
  354. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  355. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  356. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  357. break;
  358. default:
  359. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  360. break;
  361. }
  362. break;
  363. default:
  364. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  365. break;
  366. }
  367. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  368. }
  369. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  370. {
  371. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  372. struct drm_connector *connector;
  373. struct amdgpu_connector *amdgpu_connector;
  374. struct amdgpu_connector_atom_dig *dig_connector;
  375. /* dp bridges are always DP */
  376. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  377. return ATOM_ENCODER_MODE_DP;
  378. /* DVO is always DVO */
  379. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  380. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  381. return ATOM_ENCODER_MODE_DVO;
  382. connector = amdgpu_get_connector_for_encoder(encoder);
  383. /* if we don't have an active device yet, just use one of
  384. * the connectors tied to the encoder.
  385. */
  386. if (!connector)
  387. connector = amdgpu_get_connector_for_encoder_init(encoder);
  388. amdgpu_connector = to_amdgpu_connector(connector);
  389. switch (connector->connector_type) {
  390. case DRM_MODE_CONNECTOR_DVII:
  391. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  392. if (amdgpu_audio != 0) {
  393. if (amdgpu_connector->use_digital &&
  394. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  395. return ATOM_ENCODER_MODE_HDMI;
  396. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  397. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  398. return ATOM_ENCODER_MODE_HDMI;
  399. else if (amdgpu_connector->use_digital)
  400. return ATOM_ENCODER_MODE_DVI;
  401. else
  402. return ATOM_ENCODER_MODE_CRT;
  403. } else if (amdgpu_connector->use_digital) {
  404. return ATOM_ENCODER_MODE_DVI;
  405. } else {
  406. return ATOM_ENCODER_MODE_CRT;
  407. }
  408. break;
  409. case DRM_MODE_CONNECTOR_DVID:
  410. case DRM_MODE_CONNECTOR_HDMIA:
  411. default:
  412. if (amdgpu_audio != 0) {
  413. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  414. return ATOM_ENCODER_MODE_HDMI;
  415. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  416. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  417. return ATOM_ENCODER_MODE_HDMI;
  418. else
  419. return ATOM_ENCODER_MODE_DVI;
  420. } else {
  421. return ATOM_ENCODER_MODE_DVI;
  422. }
  423. break;
  424. case DRM_MODE_CONNECTOR_LVDS:
  425. return ATOM_ENCODER_MODE_LVDS;
  426. break;
  427. case DRM_MODE_CONNECTOR_DisplayPort:
  428. dig_connector = amdgpu_connector->con_priv;
  429. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  430. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  431. return ATOM_ENCODER_MODE_DP;
  432. } else if (amdgpu_audio != 0) {
  433. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  434. return ATOM_ENCODER_MODE_HDMI;
  435. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  436. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  437. return ATOM_ENCODER_MODE_HDMI;
  438. else
  439. return ATOM_ENCODER_MODE_DVI;
  440. } else {
  441. return ATOM_ENCODER_MODE_DVI;
  442. }
  443. break;
  444. case DRM_MODE_CONNECTOR_eDP:
  445. return ATOM_ENCODER_MODE_DP;
  446. case DRM_MODE_CONNECTOR_DVIA:
  447. case DRM_MODE_CONNECTOR_VGA:
  448. return ATOM_ENCODER_MODE_CRT;
  449. break;
  450. case DRM_MODE_CONNECTOR_Composite:
  451. case DRM_MODE_CONNECTOR_SVIDEO:
  452. case DRM_MODE_CONNECTOR_9PinDIN:
  453. /* fix me */
  454. return ATOM_ENCODER_MODE_TV;
  455. /*return ATOM_ENCODER_MODE_CV;*/
  456. break;
  457. }
  458. }
  459. /*
  460. * DIG Encoder/Transmitter Setup
  461. *
  462. * DCE 6.0
  463. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  464. * Supports up to 6 digital outputs
  465. * - 6 DIG encoder blocks.
  466. * - DIG to PHY mapping is hardcoded
  467. * DIG1 drives UNIPHY0 link A, A+B
  468. * DIG2 drives UNIPHY0 link B
  469. * DIG3 drives UNIPHY1 link A, A+B
  470. * DIG4 drives UNIPHY1 link B
  471. * DIG5 drives UNIPHY2 link A, A+B
  472. * DIG6 drives UNIPHY2 link B
  473. *
  474. * Routing
  475. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  476. * Examples:
  477. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  478. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  479. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  480. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  481. */
  482. union dig_encoder_control {
  483. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  484. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  485. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  486. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  487. };
  488. void
  489. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  490. int action, int panel_mode)
  491. {
  492. struct drm_device *dev = encoder->dev;
  493. struct amdgpu_device *adev = dev->dev_private;
  494. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  495. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  496. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  497. union dig_encoder_control args;
  498. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  499. uint8_t frev, crev;
  500. int dp_clock = 0;
  501. int dp_lane_count = 0;
  502. int hpd_id = AMDGPU_HPD_NONE;
  503. if (connector) {
  504. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  505. struct amdgpu_connector_atom_dig *dig_connector =
  506. amdgpu_connector->con_priv;
  507. dp_clock = dig_connector->dp_clock;
  508. dp_lane_count = dig_connector->dp_lane_count;
  509. hpd_id = amdgpu_connector->hpd.hpd;
  510. }
  511. /* no dig encoder assigned */
  512. if (dig->dig_encoder == -1)
  513. return;
  514. memset(&args, 0, sizeof(args));
  515. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  516. return;
  517. switch (frev) {
  518. case 1:
  519. switch (crev) {
  520. case 1:
  521. args.v1.ucAction = action;
  522. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  523. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  524. args.v3.ucPanelMode = panel_mode;
  525. else
  526. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  527. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  528. args.v1.ucLaneNum = dp_lane_count;
  529. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  530. args.v1.ucLaneNum = 8;
  531. else
  532. args.v1.ucLaneNum = 4;
  533. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  534. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  535. switch (amdgpu_encoder->encoder_id) {
  536. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  537. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  538. break;
  539. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  540. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  541. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  542. break;
  543. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  544. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  545. break;
  546. }
  547. if (dig->linkb)
  548. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  549. else
  550. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  551. break;
  552. case 2:
  553. case 3:
  554. args.v3.ucAction = action;
  555. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  556. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  557. args.v3.ucPanelMode = panel_mode;
  558. else
  559. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  560. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  561. args.v3.ucLaneNum = dp_lane_count;
  562. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  563. args.v3.ucLaneNum = 8;
  564. else
  565. args.v3.ucLaneNum = 4;
  566. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  567. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  568. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  569. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  570. break;
  571. case 4:
  572. args.v4.ucAction = action;
  573. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  574. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  575. args.v4.ucPanelMode = panel_mode;
  576. else
  577. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  578. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  579. args.v4.ucLaneNum = dp_lane_count;
  580. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  581. args.v4.ucLaneNum = 8;
  582. else
  583. args.v4.ucLaneNum = 4;
  584. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  585. if (dp_clock == 540000)
  586. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  587. else if (dp_clock == 324000)
  588. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  589. else if (dp_clock == 270000)
  590. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  591. else
  592. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  593. }
  594. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  595. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  596. if (hpd_id == AMDGPU_HPD_NONE)
  597. args.v4.ucHPD_ID = 0;
  598. else
  599. args.v4.ucHPD_ID = hpd_id + 1;
  600. break;
  601. default:
  602. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  603. break;
  604. }
  605. break;
  606. default:
  607. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  608. break;
  609. }
  610. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  611. }
  612. union dig_transmitter_control {
  613. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  614. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  615. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  616. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  617. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  618. };
  619. void
  620. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  621. uint8_t lane_num, uint8_t lane_set)
  622. {
  623. struct drm_device *dev = encoder->dev;
  624. struct amdgpu_device *adev = dev->dev_private;
  625. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  626. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  627. struct drm_connector *connector;
  628. union dig_transmitter_control args;
  629. int index = 0;
  630. uint8_t frev, crev;
  631. bool is_dp = false;
  632. int pll_id = 0;
  633. int dp_clock = 0;
  634. int dp_lane_count = 0;
  635. int connector_object_id = 0;
  636. int igp_lane_info = 0;
  637. int dig_encoder = dig->dig_encoder;
  638. int hpd_id = AMDGPU_HPD_NONE;
  639. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  640. connector = amdgpu_get_connector_for_encoder_init(encoder);
  641. /* just needed to avoid bailing in the encoder check. the encoder
  642. * isn't used for init
  643. */
  644. dig_encoder = 0;
  645. } else
  646. connector = amdgpu_get_connector_for_encoder(encoder);
  647. if (connector) {
  648. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  649. struct amdgpu_connector_atom_dig *dig_connector =
  650. amdgpu_connector->con_priv;
  651. hpd_id = amdgpu_connector->hpd.hpd;
  652. dp_clock = dig_connector->dp_clock;
  653. dp_lane_count = dig_connector->dp_lane_count;
  654. connector_object_id =
  655. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  656. }
  657. if (encoder->crtc) {
  658. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  659. pll_id = amdgpu_crtc->pll_id;
  660. }
  661. /* no dig encoder assigned */
  662. if (dig_encoder == -1)
  663. return;
  664. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  665. is_dp = true;
  666. memset(&args, 0, sizeof(args));
  667. switch (amdgpu_encoder->encoder_id) {
  668. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  669. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  670. break;
  671. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  672. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  673. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  674. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  675. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  676. break;
  677. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  678. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  679. break;
  680. }
  681. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  682. return;
  683. switch (frev) {
  684. case 1:
  685. switch (crev) {
  686. case 1:
  687. args.v1.ucAction = action;
  688. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  689. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  690. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  691. args.v1.asMode.ucLaneSel = lane_num;
  692. args.v1.asMode.ucLaneSet = lane_set;
  693. } else {
  694. if (is_dp)
  695. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  696. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  697. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  698. else
  699. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  700. }
  701. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  702. if (dig_encoder)
  703. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  704. else
  705. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  706. if ((adev->flags & AMD_IS_APU) &&
  707. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  708. if (is_dp ||
  709. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  710. if (igp_lane_info & 0x1)
  711. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  712. else if (igp_lane_info & 0x2)
  713. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  714. else if (igp_lane_info & 0x4)
  715. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  716. else if (igp_lane_info & 0x8)
  717. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  718. } else {
  719. if (igp_lane_info & 0x3)
  720. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  721. else if (igp_lane_info & 0xc)
  722. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  723. }
  724. }
  725. if (dig->linkb)
  726. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  727. else
  728. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  729. if (is_dp)
  730. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  731. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  732. if (dig->coherent_mode)
  733. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  734. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  735. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  736. }
  737. break;
  738. case 2:
  739. args.v2.ucAction = action;
  740. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  741. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  742. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  743. args.v2.asMode.ucLaneSel = lane_num;
  744. args.v2.asMode.ucLaneSet = lane_set;
  745. } else {
  746. if (is_dp)
  747. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  748. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  749. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  750. else
  751. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  752. }
  753. args.v2.acConfig.ucEncoderSel = dig_encoder;
  754. if (dig->linkb)
  755. args.v2.acConfig.ucLinkSel = 1;
  756. switch (amdgpu_encoder->encoder_id) {
  757. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  758. args.v2.acConfig.ucTransmitterSel = 0;
  759. break;
  760. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  761. args.v2.acConfig.ucTransmitterSel = 1;
  762. break;
  763. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  764. args.v2.acConfig.ucTransmitterSel = 2;
  765. break;
  766. }
  767. if (is_dp) {
  768. args.v2.acConfig.fCoherentMode = 1;
  769. args.v2.acConfig.fDPConnector = 1;
  770. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  771. if (dig->coherent_mode)
  772. args.v2.acConfig.fCoherentMode = 1;
  773. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  774. args.v2.acConfig.fDualLinkConnector = 1;
  775. }
  776. break;
  777. case 3:
  778. args.v3.ucAction = action;
  779. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  780. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  781. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  782. args.v3.asMode.ucLaneSel = lane_num;
  783. args.v3.asMode.ucLaneSet = lane_set;
  784. } else {
  785. if (is_dp)
  786. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  787. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  788. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  789. else
  790. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  791. }
  792. if (is_dp)
  793. args.v3.ucLaneNum = dp_lane_count;
  794. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  795. args.v3.ucLaneNum = 8;
  796. else
  797. args.v3.ucLaneNum = 4;
  798. if (dig->linkb)
  799. args.v3.acConfig.ucLinkSel = 1;
  800. if (dig_encoder & 1)
  801. args.v3.acConfig.ucEncoderSel = 1;
  802. /* Select the PLL for the PHY
  803. * DP PHY should be clocked from external src if there is
  804. * one.
  805. */
  806. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  807. if (is_dp && adev->clock.dp_extclk)
  808. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  809. else
  810. args.v3.acConfig.ucRefClkSource = pll_id;
  811. switch (amdgpu_encoder->encoder_id) {
  812. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  813. args.v3.acConfig.ucTransmitterSel = 0;
  814. break;
  815. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  816. args.v3.acConfig.ucTransmitterSel = 1;
  817. break;
  818. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  819. args.v3.acConfig.ucTransmitterSel = 2;
  820. break;
  821. }
  822. if (is_dp)
  823. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  824. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  825. if (dig->coherent_mode)
  826. args.v3.acConfig.fCoherentMode = 1;
  827. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  828. args.v3.acConfig.fDualLinkConnector = 1;
  829. }
  830. break;
  831. case 4:
  832. args.v4.ucAction = action;
  833. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  834. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  835. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  836. args.v4.asMode.ucLaneSel = lane_num;
  837. args.v4.asMode.ucLaneSet = lane_set;
  838. } else {
  839. if (is_dp)
  840. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  841. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  842. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  843. else
  844. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  845. }
  846. if (is_dp)
  847. args.v4.ucLaneNum = dp_lane_count;
  848. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  849. args.v4.ucLaneNum = 8;
  850. else
  851. args.v4.ucLaneNum = 4;
  852. if (dig->linkb)
  853. args.v4.acConfig.ucLinkSel = 1;
  854. if (dig_encoder & 1)
  855. args.v4.acConfig.ucEncoderSel = 1;
  856. /* Select the PLL for the PHY
  857. * DP PHY should be clocked from external src if there is
  858. * one.
  859. */
  860. /* On DCE5 DCPLL usually generates the DP ref clock */
  861. if (is_dp) {
  862. if (adev->clock.dp_extclk)
  863. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  864. else
  865. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  866. } else
  867. args.v4.acConfig.ucRefClkSource = pll_id;
  868. switch (amdgpu_encoder->encoder_id) {
  869. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  870. args.v4.acConfig.ucTransmitterSel = 0;
  871. break;
  872. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  873. args.v4.acConfig.ucTransmitterSel = 1;
  874. break;
  875. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  876. args.v4.acConfig.ucTransmitterSel = 2;
  877. break;
  878. }
  879. if (is_dp)
  880. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  881. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  882. if (dig->coherent_mode)
  883. args.v4.acConfig.fCoherentMode = 1;
  884. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  885. args.v4.acConfig.fDualLinkConnector = 1;
  886. }
  887. break;
  888. case 5:
  889. args.v5.ucAction = action;
  890. if (is_dp)
  891. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  892. else
  893. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  894. switch (amdgpu_encoder->encoder_id) {
  895. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  896. if (dig->linkb)
  897. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  898. else
  899. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  900. break;
  901. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  902. if (dig->linkb)
  903. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  904. else
  905. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  906. break;
  907. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  908. if (dig->linkb)
  909. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  910. else
  911. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  912. break;
  913. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  914. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  915. break;
  916. }
  917. if (is_dp)
  918. args.v5.ucLaneNum = dp_lane_count;
  919. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  920. args.v5.ucLaneNum = 8;
  921. else
  922. args.v5.ucLaneNum = 4;
  923. args.v5.ucConnObjId = connector_object_id;
  924. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  925. if (is_dp && adev->clock.dp_extclk)
  926. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  927. else
  928. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  929. if (is_dp)
  930. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  931. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  932. if (dig->coherent_mode)
  933. args.v5.asConfig.ucCoherentMode = 1;
  934. }
  935. if (hpd_id == AMDGPU_HPD_NONE)
  936. args.v5.asConfig.ucHPDSel = 0;
  937. else
  938. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  939. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  940. args.v5.ucDPLaneSet = lane_set;
  941. break;
  942. default:
  943. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  944. break;
  945. }
  946. break;
  947. default:
  948. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  949. break;
  950. }
  951. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  952. }
  953. bool
  954. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  955. int action)
  956. {
  957. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  958. struct drm_device *dev = amdgpu_connector->base.dev;
  959. struct amdgpu_device *adev = dev->dev_private;
  960. union dig_transmitter_control args;
  961. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  962. uint8_t frev, crev;
  963. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  964. goto done;
  965. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  966. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  967. goto done;
  968. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  969. goto done;
  970. memset(&args, 0, sizeof(args));
  971. args.v1.ucAction = action;
  972. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  973. /* wait for the panel to power up */
  974. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  975. int i;
  976. for (i = 0; i < 300; i++) {
  977. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  978. return true;
  979. mdelay(1);
  980. }
  981. return false;
  982. }
  983. done:
  984. return true;
  985. }
  986. union external_encoder_control {
  987. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  988. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  989. };
  990. static void
  991. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  992. struct drm_encoder *ext_encoder,
  993. int action)
  994. {
  995. struct drm_device *dev = encoder->dev;
  996. struct amdgpu_device *adev = dev->dev_private;
  997. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  998. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  999. union external_encoder_control args;
  1000. struct drm_connector *connector;
  1001. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1002. u8 frev, crev;
  1003. int dp_clock = 0;
  1004. int dp_lane_count = 0;
  1005. int connector_object_id = 0;
  1006. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1007. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1008. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1009. else
  1010. connector = amdgpu_get_connector_for_encoder(encoder);
  1011. if (connector) {
  1012. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1013. struct amdgpu_connector_atom_dig *dig_connector =
  1014. amdgpu_connector->con_priv;
  1015. dp_clock = dig_connector->dp_clock;
  1016. dp_lane_count = dig_connector->dp_lane_count;
  1017. connector_object_id =
  1018. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1019. }
  1020. memset(&args, 0, sizeof(args));
  1021. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1022. return;
  1023. switch (frev) {
  1024. case 1:
  1025. /* no params on frev 1 */
  1026. break;
  1027. case 2:
  1028. switch (crev) {
  1029. case 1:
  1030. case 2:
  1031. args.v1.sDigEncoder.ucAction = action;
  1032. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1033. args.v1.sDigEncoder.ucEncoderMode =
  1034. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1035. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1036. if (dp_clock == 270000)
  1037. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1038. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1039. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1040. args.v1.sDigEncoder.ucLaneNum = 8;
  1041. else
  1042. args.v1.sDigEncoder.ucLaneNum = 4;
  1043. break;
  1044. case 3:
  1045. args.v3.sExtEncoder.ucAction = action;
  1046. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1047. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1048. else
  1049. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1050. args.v3.sExtEncoder.ucEncoderMode =
  1051. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1052. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1053. if (dp_clock == 270000)
  1054. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1055. else if (dp_clock == 540000)
  1056. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1057. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1058. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1059. args.v3.sExtEncoder.ucLaneNum = 8;
  1060. else
  1061. args.v3.sExtEncoder.ucLaneNum = 4;
  1062. switch (ext_enum) {
  1063. case GRAPH_OBJECT_ENUM_ID1:
  1064. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1065. break;
  1066. case GRAPH_OBJECT_ENUM_ID2:
  1067. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1068. break;
  1069. case GRAPH_OBJECT_ENUM_ID3:
  1070. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1071. break;
  1072. }
  1073. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1074. break;
  1075. default:
  1076. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1077. return;
  1078. }
  1079. break;
  1080. default:
  1081. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1082. return;
  1083. }
  1084. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1085. }
  1086. static void
  1087. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1088. {
  1089. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1090. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1091. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1092. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1093. struct amdgpu_connector *amdgpu_connector = NULL;
  1094. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1095. if (connector) {
  1096. amdgpu_connector = to_amdgpu_connector(connector);
  1097. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1098. }
  1099. if (action == ATOM_ENABLE) {
  1100. if (!connector)
  1101. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1102. else
  1103. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1104. /* setup and enable the encoder */
  1105. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1106. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1107. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1108. dig->panel_mode);
  1109. if (ext_encoder)
  1110. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1111. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1112. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1113. connector) {
  1114. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1115. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1116. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1117. amdgpu_dig_connector->edp_on = true;
  1118. }
  1119. }
  1120. /* enable the transmitter */
  1121. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1122. ATOM_TRANSMITTER_ACTION_ENABLE,
  1123. 0, 0);
  1124. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1125. connector) {
  1126. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1127. amdgpu_atombios_dp_link_train(encoder, connector);
  1128. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1129. }
  1130. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1131. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1132. if (ext_encoder)
  1133. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1134. } else {
  1135. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1136. connector)
  1137. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1138. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1139. if (ext_encoder)
  1140. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1141. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1142. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1143. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1144. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1145. connector)
  1146. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1147. /* disable the transmitter */
  1148. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1149. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1150. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1151. connector) {
  1152. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1153. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1154. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1155. amdgpu_dig_connector->edp_on = false;
  1156. }
  1157. }
  1158. }
  1159. }
  1160. void
  1161. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1162. {
  1163. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1164. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1165. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1166. amdgpu_encoder->active_device);
  1167. switch (amdgpu_encoder->encoder_id) {
  1168. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1169. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1170. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1171. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1172. switch (mode) {
  1173. case DRM_MODE_DPMS_ON:
  1174. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1175. break;
  1176. case DRM_MODE_DPMS_STANDBY:
  1177. case DRM_MODE_DPMS_SUSPEND:
  1178. case DRM_MODE_DPMS_OFF:
  1179. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1180. break;
  1181. }
  1182. break;
  1183. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1184. switch (mode) {
  1185. case DRM_MODE_DPMS_ON:
  1186. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1187. break;
  1188. case DRM_MODE_DPMS_STANDBY:
  1189. case DRM_MODE_DPMS_SUSPEND:
  1190. case DRM_MODE_DPMS_OFF:
  1191. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1192. break;
  1193. }
  1194. break;
  1195. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1196. switch (mode) {
  1197. case DRM_MODE_DPMS_ON:
  1198. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1199. break;
  1200. case DRM_MODE_DPMS_STANDBY:
  1201. case DRM_MODE_DPMS_SUSPEND:
  1202. case DRM_MODE_DPMS_OFF:
  1203. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1204. break;
  1205. }
  1206. break;
  1207. default:
  1208. return;
  1209. }
  1210. }
  1211. union crtc_source_param {
  1212. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1213. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1214. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1215. };
  1216. void
  1217. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1218. {
  1219. struct drm_device *dev = encoder->dev;
  1220. struct amdgpu_device *adev = dev->dev_private;
  1221. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1222. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1223. union crtc_source_param args;
  1224. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1225. uint8_t frev, crev;
  1226. struct amdgpu_encoder_atom_dig *dig;
  1227. memset(&args, 0, sizeof(args));
  1228. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1229. return;
  1230. switch (frev) {
  1231. case 1:
  1232. switch (crev) {
  1233. case 1:
  1234. default:
  1235. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1236. switch (amdgpu_encoder->encoder_id) {
  1237. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1238. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1239. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1240. break;
  1241. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1242. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1243. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1244. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1245. else
  1246. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1247. break;
  1248. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1249. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1250. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1251. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1252. break;
  1253. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1254. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1255. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1256. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1257. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1258. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1259. else
  1260. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1261. break;
  1262. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1263. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1264. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1265. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1266. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1267. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1268. else
  1269. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1270. break;
  1271. }
  1272. break;
  1273. case 2:
  1274. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1275. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1276. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1277. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1278. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1279. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1280. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1281. else
  1282. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1283. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1284. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1285. } else {
  1286. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1287. }
  1288. switch (amdgpu_encoder->encoder_id) {
  1289. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1290. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1291. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1292. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1293. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1294. dig = amdgpu_encoder->enc_priv;
  1295. switch (dig->dig_encoder) {
  1296. case 0:
  1297. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1298. break;
  1299. case 1:
  1300. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1301. break;
  1302. case 2:
  1303. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1304. break;
  1305. case 3:
  1306. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1307. break;
  1308. case 4:
  1309. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1310. break;
  1311. case 5:
  1312. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1313. break;
  1314. case 6:
  1315. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1316. break;
  1317. }
  1318. break;
  1319. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1320. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1321. break;
  1322. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1323. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1324. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1325. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1326. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1327. else
  1328. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1329. break;
  1330. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1331. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1332. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1333. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1334. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1335. else
  1336. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1337. break;
  1338. }
  1339. break;
  1340. case 3:
  1341. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1342. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1343. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1344. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1345. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1346. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1347. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1348. else
  1349. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1350. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1351. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1352. } else {
  1353. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1354. }
  1355. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1356. switch (amdgpu_encoder->encoder_id) {
  1357. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1358. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1359. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1360. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1361. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1362. dig = amdgpu_encoder->enc_priv;
  1363. switch (dig->dig_encoder) {
  1364. case 0:
  1365. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1366. break;
  1367. case 1:
  1368. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1369. break;
  1370. case 2:
  1371. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1372. break;
  1373. case 3:
  1374. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1375. break;
  1376. case 4:
  1377. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1378. break;
  1379. case 5:
  1380. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1381. break;
  1382. case 6:
  1383. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1384. break;
  1385. }
  1386. break;
  1387. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1388. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1389. break;
  1390. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1391. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1392. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1393. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1394. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1395. else
  1396. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1397. break;
  1398. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1399. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1400. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1401. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1402. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1403. else
  1404. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1405. break;
  1406. }
  1407. break;
  1408. }
  1409. break;
  1410. default:
  1411. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1412. return;
  1413. }
  1414. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1415. }
  1416. /* This only needs to be called once at startup */
  1417. void
  1418. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1419. {
  1420. struct drm_device *dev = adev->ddev;
  1421. struct drm_encoder *encoder;
  1422. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1423. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1424. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1425. switch (amdgpu_encoder->encoder_id) {
  1426. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1427. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1428. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1429. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1430. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1431. 0, 0);
  1432. break;
  1433. }
  1434. if (ext_encoder)
  1435. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1436. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1437. }
  1438. }
  1439. static bool
  1440. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1441. struct drm_connector *connector)
  1442. {
  1443. struct drm_device *dev = encoder->dev;
  1444. struct amdgpu_device *adev = dev->dev_private;
  1445. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1446. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1447. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1448. ATOM_DEVICE_CV_SUPPORT |
  1449. ATOM_DEVICE_CRT_SUPPORT)) {
  1450. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1451. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1452. uint8_t frev, crev;
  1453. memset(&args, 0, sizeof(args));
  1454. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1455. return false;
  1456. args.sDacload.ucMisc = 0;
  1457. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1458. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1459. args.sDacload.ucDacType = ATOM_DAC_A;
  1460. else
  1461. args.sDacload.ucDacType = ATOM_DAC_B;
  1462. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1463. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1464. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1465. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1466. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1467. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1468. if (crev >= 3)
  1469. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1470. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1471. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1472. if (crev >= 3)
  1473. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1474. }
  1475. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1476. return true;
  1477. } else
  1478. return false;
  1479. }
  1480. enum drm_connector_status
  1481. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1482. struct drm_connector *connector)
  1483. {
  1484. struct drm_device *dev = encoder->dev;
  1485. struct amdgpu_device *adev = dev->dev_private;
  1486. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1487. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1488. uint32_t bios_0_scratch;
  1489. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1490. DRM_DEBUG_KMS("detect returned false \n");
  1491. return connector_status_unknown;
  1492. }
  1493. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1494. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1495. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1496. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1497. return connector_status_connected;
  1498. }
  1499. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1500. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1501. return connector_status_connected;
  1502. }
  1503. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1504. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1505. return connector_status_connected;
  1506. }
  1507. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1508. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1509. return connector_status_connected; /* CTV */
  1510. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1511. return connector_status_connected; /* STV */
  1512. }
  1513. return connector_status_disconnected;
  1514. }
  1515. enum drm_connector_status
  1516. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1517. struct drm_connector *connector)
  1518. {
  1519. struct drm_device *dev = encoder->dev;
  1520. struct amdgpu_device *adev = dev->dev_private;
  1521. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1522. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1523. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1524. u32 bios_0_scratch;
  1525. if (!ext_encoder)
  1526. return connector_status_unknown;
  1527. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1528. return connector_status_unknown;
  1529. /* load detect on the dp bridge */
  1530. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1531. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1532. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1533. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1534. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1535. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1536. return connector_status_connected;
  1537. }
  1538. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1539. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1540. return connector_status_connected;
  1541. }
  1542. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1543. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1544. return connector_status_connected;
  1545. }
  1546. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1547. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1548. return connector_status_connected; /* CTV */
  1549. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1550. return connector_status_connected; /* STV */
  1551. }
  1552. return connector_status_disconnected;
  1553. }
  1554. void
  1555. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1556. {
  1557. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1558. if (ext_encoder)
  1559. /* ddc_setup on the dp bridge */
  1560. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1561. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1562. }
  1563. void
  1564. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1565. struct drm_encoder *encoder,
  1566. bool connected)
  1567. {
  1568. struct drm_device *dev = connector->dev;
  1569. struct amdgpu_device *adev = dev->dev_private;
  1570. struct amdgpu_connector *amdgpu_connector =
  1571. to_amdgpu_connector(connector);
  1572. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1573. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1574. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1575. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1576. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1577. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1578. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1579. if (connected) {
  1580. DRM_DEBUG_KMS("LCD1 connected\n");
  1581. bios_0_scratch |= ATOM_S0_LCD1;
  1582. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1583. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1584. } else {
  1585. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1586. bios_0_scratch &= ~ATOM_S0_LCD1;
  1587. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1588. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1589. }
  1590. }
  1591. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1592. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1593. if (connected) {
  1594. DRM_DEBUG_KMS("CRT1 connected\n");
  1595. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1596. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1597. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1598. } else {
  1599. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1600. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1601. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1602. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1603. }
  1604. }
  1605. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1606. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1607. if (connected) {
  1608. DRM_DEBUG_KMS("CRT2 connected\n");
  1609. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1610. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1611. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1612. } else {
  1613. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1614. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1615. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1616. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1617. }
  1618. }
  1619. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1620. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1621. if (connected) {
  1622. DRM_DEBUG_KMS("DFP1 connected\n");
  1623. bios_0_scratch |= ATOM_S0_DFP1;
  1624. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1625. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1626. } else {
  1627. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1628. bios_0_scratch &= ~ATOM_S0_DFP1;
  1629. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1630. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1631. }
  1632. }
  1633. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1634. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1635. if (connected) {
  1636. DRM_DEBUG_KMS("DFP2 connected\n");
  1637. bios_0_scratch |= ATOM_S0_DFP2;
  1638. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1639. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1640. } else {
  1641. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1642. bios_0_scratch &= ~ATOM_S0_DFP2;
  1643. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1644. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1645. }
  1646. }
  1647. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1648. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1649. if (connected) {
  1650. DRM_DEBUG_KMS("DFP3 connected\n");
  1651. bios_0_scratch |= ATOM_S0_DFP3;
  1652. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1653. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1654. } else {
  1655. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1656. bios_0_scratch &= ~ATOM_S0_DFP3;
  1657. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1658. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1659. }
  1660. }
  1661. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1662. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1663. if (connected) {
  1664. DRM_DEBUG_KMS("DFP4 connected\n");
  1665. bios_0_scratch |= ATOM_S0_DFP4;
  1666. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1667. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1668. } else {
  1669. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1670. bios_0_scratch &= ~ATOM_S0_DFP4;
  1671. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1672. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1673. }
  1674. }
  1675. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1676. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1677. if (connected) {
  1678. DRM_DEBUG_KMS("DFP5 connected\n");
  1679. bios_0_scratch |= ATOM_S0_DFP5;
  1680. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1681. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1682. } else {
  1683. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1684. bios_0_scratch &= ~ATOM_S0_DFP5;
  1685. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1686. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1687. }
  1688. }
  1689. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1690. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1691. if (connected) {
  1692. DRM_DEBUG_KMS("DFP6 connected\n");
  1693. bios_0_scratch |= ATOM_S0_DFP6;
  1694. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1695. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1696. } else {
  1697. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1698. bios_0_scratch &= ~ATOM_S0_DFP6;
  1699. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1700. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1701. }
  1702. }
  1703. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1704. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1705. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1706. }
  1707. union lvds_info {
  1708. struct _ATOM_LVDS_INFO info;
  1709. struct _ATOM_LVDS_INFO_V12 info_12;
  1710. };
  1711. struct amdgpu_encoder_atom_dig *
  1712. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1713. {
  1714. struct drm_device *dev = encoder->base.dev;
  1715. struct amdgpu_device *adev = dev->dev_private;
  1716. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1717. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1718. uint16_t data_offset, misc;
  1719. union lvds_info *lvds_info;
  1720. uint8_t frev, crev;
  1721. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1722. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1723. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1724. &frev, &crev, &data_offset)) {
  1725. lvds_info =
  1726. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1727. lvds =
  1728. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1729. if (!lvds)
  1730. return NULL;
  1731. lvds->native_mode.clock =
  1732. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1733. lvds->native_mode.hdisplay =
  1734. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1735. lvds->native_mode.vdisplay =
  1736. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1737. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1738. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1739. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1740. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1741. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1742. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1743. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1744. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1745. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1746. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1747. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1748. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1749. lvds->panel_pwr_delay =
  1750. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1751. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1752. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1753. if (misc & ATOM_VSYNC_POLARITY)
  1754. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1755. if (misc & ATOM_HSYNC_POLARITY)
  1756. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1757. if (misc & ATOM_COMPOSITESYNC)
  1758. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1759. if (misc & ATOM_INTERLACE)
  1760. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1761. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1762. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1763. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1764. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1765. /* set crtc values */
  1766. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1767. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1768. encoder->native_mode = lvds->native_mode;
  1769. if (encoder_enum == 2)
  1770. lvds->linkb = true;
  1771. else
  1772. lvds->linkb = false;
  1773. /* parse the lcd record table */
  1774. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1775. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1776. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1777. bool bad_record = false;
  1778. u8 *record;
  1779. if ((frev == 1) && (crev < 2))
  1780. /* absolute */
  1781. record = (u8 *)(mode_info->atom_context->bios +
  1782. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1783. else
  1784. /* relative */
  1785. record = (u8 *)(mode_info->atom_context->bios +
  1786. data_offset +
  1787. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1788. while (*record != ATOM_RECORD_END_TYPE) {
  1789. switch (*record) {
  1790. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1791. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1792. break;
  1793. case LCD_RTS_RECORD_TYPE:
  1794. record += sizeof(ATOM_LCD_RTS_RECORD);
  1795. break;
  1796. case LCD_CAP_RECORD_TYPE:
  1797. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1798. break;
  1799. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1800. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1801. if (fake_edid_record->ucFakeEDIDLength) {
  1802. struct edid *edid;
  1803. int edid_size =
  1804. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1805. edid = kmalloc(edid_size, GFP_KERNEL);
  1806. if (edid) {
  1807. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1808. fake_edid_record->ucFakeEDIDLength);
  1809. if (drm_edid_is_valid(edid)) {
  1810. adev->mode_info.bios_hardcoded_edid = edid;
  1811. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1812. } else
  1813. kfree(edid);
  1814. }
  1815. }
  1816. record += fake_edid_record->ucFakeEDIDLength ?
  1817. fake_edid_record->ucFakeEDIDLength + 2 :
  1818. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1819. break;
  1820. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1821. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1822. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1823. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1824. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1825. break;
  1826. default:
  1827. DRM_ERROR("Bad LCD record %d\n", *record);
  1828. bad_record = true;
  1829. break;
  1830. }
  1831. if (bad_record)
  1832. break;
  1833. }
  1834. }
  1835. }
  1836. return lvds;
  1837. }
  1838. struct amdgpu_encoder_atom_dig *
  1839. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1840. {
  1841. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1842. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1843. if (!dig)
  1844. return NULL;
  1845. /* coherent mode by default */
  1846. dig->coherent_mode = true;
  1847. dig->dig_encoder = -1;
  1848. if (encoder_enum == 2)
  1849. dig->linkb = true;
  1850. else
  1851. dig->linkb = false;
  1852. return dig;
  1853. }