cdv_intel_dp.c 56 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146
  1. /*
  2. * Copyright © 2012 Intel Corporation
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20. * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21. * IN THE SOFTWARE.
  22. *
  23. * Authors:
  24. * Keith Packard <keithp@keithp.com>
  25. *
  26. */
  27. #include <linux/i2c.h>
  28. #include <linux/slab.h>
  29. #include <linux/module.h>
  30. #include <drm/drmP.h>
  31. #include <drm/drm_crtc.h>
  32. #include <drm/drm_crtc_helper.h>
  33. #include "psb_drv.h"
  34. #include "psb_intel_drv.h"
  35. #include "psb_intel_reg.h"
  36. #include "gma_display.h"
  37. #include <drm/drm_dp_helper.h>
  38. /**
  39. * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp
  40. * aux algorithm
  41. * @running: set by the algo indicating whether an i2c is ongoing or whether
  42. * the i2c bus is quiescent
  43. * @address: i2c target address for the currently ongoing transfer
  44. * @aux_ch: driver callback to transfer a single byte of the i2c payload
  45. */
  46. struct i2c_algo_dp_aux_data {
  47. bool running;
  48. u16 address;
  49. int (*aux_ch) (struct i2c_adapter *adapter,
  50. int mode, uint8_t write_byte,
  51. uint8_t *read_byte);
  52. };
  53. /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */
  54. static int
  55. i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode,
  56. uint8_t write_byte, uint8_t *read_byte)
  57. {
  58. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  59. int ret;
  60. ret = (*algo_data->aux_ch)(adapter, mode,
  61. write_byte, read_byte);
  62. return ret;
  63. }
  64. /*
  65. * I2C over AUX CH
  66. */
  67. /*
  68. * Send the address. If the I2C link is running, this 'restarts'
  69. * the connection with the new address, this is used for doing
  70. * a write followed by a read (as needed for DDC)
  71. */
  72. static int
  73. i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading)
  74. {
  75. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  76. int mode = MODE_I2C_START;
  77. int ret;
  78. if (reading)
  79. mode |= MODE_I2C_READ;
  80. else
  81. mode |= MODE_I2C_WRITE;
  82. algo_data->address = address;
  83. algo_data->running = true;
  84. ret = i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
  85. return ret;
  86. }
  87. /*
  88. * Stop the I2C transaction. This closes out the link, sending
  89. * a bare address packet with the MOT bit turned off
  90. */
  91. static void
  92. i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading)
  93. {
  94. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  95. int mode = MODE_I2C_STOP;
  96. if (reading)
  97. mode |= MODE_I2C_READ;
  98. else
  99. mode |= MODE_I2C_WRITE;
  100. if (algo_data->running) {
  101. (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL);
  102. algo_data->running = false;
  103. }
  104. }
  105. /*
  106. * Write a single byte to the current I2C address, the
  107. * the I2C link must be running or this returns -EIO
  108. */
  109. static int
  110. i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte)
  111. {
  112. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  113. int ret;
  114. if (!algo_data->running)
  115. return -EIO;
  116. ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL);
  117. return ret;
  118. }
  119. /*
  120. * Read a single byte from the current I2C address, the
  121. * I2C link must be running or this returns -EIO
  122. */
  123. static int
  124. i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret)
  125. {
  126. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  127. int ret;
  128. if (!algo_data->running)
  129. return -EIO;
  130. ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret);
  131. return ret;
  132. }
  133. static int
  134. i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter,
  135. struct i2c_msg *msgs,
  136. int num)
  137. {
  138. int ret = 0;
  139. bool reading = false;
  140. int m;
  141. int b;
  142. for (m = 0; m < num; m++) {
  143. u16 len = msgs[m].len;
  144. u8 *buf = msgs[m].buf;
  145. reading = (msgs[m].flags & I2C_M_RD) != 0;
  146. ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading);
  147. if (ret < 0)
  148. break;
  149. if (reading) {
  150. for (b = 0; b < len; b++) {
  151. ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]);
  152. if (ret < 0)
  153. break;
  154. }
  155. } else {
  156. for (b = 0; b < len; b++) {
  157. ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]);
  158. if (ret < 0)
  159. break;
  160. }
  161. }
  162. if (ret < 0)
  163. break;
  164. }
  165. if (ret >= 0)
  166. ret = num;
  167. i2c_algo_dp_aux_stop(adapter, reading);
  168. DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret);
  169. return ret;
  170. }
  171. static u32
  172. i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter)
  173. {
  174. return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
  175. I2C_FUNC_SMBUS_READ_BLOCK_DATA |
  176. I2C_FUNC_SMBUS_BLOCK_PROC_CALL |
  177. I2C_FUNC_10BIT_ADDR;
  178. }
  179. static const struct i2c_algorithm i2c_dp_aux_algo = {
  180. .master_xfer = i2c_algo_dp_aux_xfer,
  181. .functionality = i2c_algo_dp_aux_functionality,
  182. };
  183. static void
  184. i2c_dp_aux_reset_bus(struct i2c_adapter *adapter)
  185. {
  186. (void) i2c_algo_dp_aux_address(adapter, 0, false);
  187. (void) i2c_algo_dp_aux_stop(adapter, false);
  188. }
  189. static int
  190. i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter)
  191. {
  192. adapter->algo = &i2c_dp_aux_algo;
  193. adapter->retries = 3;
  194. i2c_dp_aux_reset_bus(adapter);
  195. return 0;
  196. }
  197. /*
  198. * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to
  199. * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon.
  200. */
  201. static int
  202. i2c_dp_aux_add_bus(struct i2c_adapter *adapter)
  203. {
  204. int error;
  205. error = i2c_dp_aux_prepare_bus(adapter);
  206. if (error)
  207. return error;
  208. error = i2c_add_adapter(adapter);
  209. return error;
  210. }
  211. #define _wait_for(COND, MS, W) ({ \
  212. unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \
  213. int ret__ = 0; \
  214. while (! (COND)) { \
  215. if (time_after(jiffies, timeout__)) { \
  216. ret__ = -ETIMEDOUT; \
  217. break; \
  218. } \
  219. if (W && !in_dbg_master()) msleep(W); \
  220. } \
  221. ret__; \
  222. })
  223. #define wait_for(COND, MS) _wait_for(COND, MS, 1)
  224. #define DP_LINK_CHECK_TIMEOUT (10 * 1000)
  225. #define DP_LINK_CONFIGURATION_SIZE 9
  226. #define CDV_FAST_LINK_TRAIN 1
  227. struct cdv_intel_dp {
  228. uint32_t output_reg;
  229. uint32_t DP;
  230. uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE];
  231. bool has_audio;
  232. int force_audio;
  233. uint32_t color_range;
  234. uint8_t link_bw;
  235. uint8_t lane_count;
  236. uint8_t dpcd[4];
  237. struct gma_encoder *encoder;
  238. struct i2c_adapter adapter;
  239. struct i2c_algo_dp_aux_data algo;
  240. uint8_t train_set[4];
  241. uint8_t link_status[DP_LINK_STATUS_SIZE];
  242. int panel_power_up_delay;
  243. int panel_power_down_delay;
  244. int panel_power_cycle_delay;
  245. int backlight_on_delay;
  246. int backlight_off_delay;
  247. struct drm_display_mode *panel_fixed_mode; /* for eDP */
  248. bool panel_on;
  249. };
  250. struct ddi_regoff {
  251. uint32_t PreEmph1;
  252. uint32_t PreEmph2;
  253. uint32_t VSwing1;
  254. uint32_t VSwing2;
  255. uint32_t VSwing3;
  256. uint32_t VSwing4;
  257. uint32_t VSwing5;
  258. };
  259. static struct ddi_regoff ddi_DP_train_table[] = {
  260. {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154,
  261. .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150,
  262. .VSwing5 = 0x8158,},
  263. {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254,
  264. .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250,
  265. .VSwing5 = 0x8258,},
  266. };
  267. static uint32_t dp_vswing_premph_table[] = {
  268. 0x55338954, 0x4000,
  269. 0x554d8954, 0x2000,
  270. 0x55668954, 0,
  271. 0x559ac0d4, 0x6000,
  272. };
  273. /**
  274. * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
  275. * @intel_dp: DP struct
  276. *
  277. * If a CPU or PCH DP output is attached to an eDP panel, this function
  278. * will return true, and false otherwise.
  279. */
  280. static bool is_edp(struct gma_encoder *encoder)
  281. {
  282. return encoder->type == INTEL_OUTPUT_EDP;
  283. }
  284. static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder);
  285. static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder);
  286. static void cdv_intel_dp_link_down(struct gma_encoder *encoder);
  287. static int
  288. cdv_intel_dp_max_lane_count(struct gma_encoder *encoder)
  289. {
  290. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  291. int max_lane_count = 4;
  292. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
  293. max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
  294. switch (max_lane_count) {
  295. case 1: case 2: case 4:
  296. break;
  297. default:
  298. max_lane_count = 4;
  299. }
  300. }
  301. return max_lane_count;
  302. }
  303. static int
  304. cdv_intel_dp_max_link_bw(struct gma_encoder *encoder)
  305. {
  306. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  307. int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
  308. switch (max_link_bw) {
  309. case DP_LINK_BW_1_62:
  310. case DP_LINK_BW_2_7:
  311. break;
  312. default:
  313. max_link_bw = DP_LINK_BW_1_62;
  314. break;
  315. }
  316. return max_link_bw;
  317. }
  318. static int
  319. cdv_intel_dp_link_clock(uint8_t link_bw)
  320. {
  321. if (link_bw == DP_LINK_BW_2_7)
  322. return 270000;
  323. else
  324. return 162000;
  325. }
  326. static int
  327. cdv_intel_dp_link_required(int pixel_clock, int bpp)
  328. {
  329. return (pixel_clock * bpp + 7) / 8;
  330. }
  331. static int
  332. cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes)
  333. {
  334. return (max_link_clock * max_lanes * 19) / 20;
  335. }
  336. static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder)
  337. {
  338. struct drm_device *dev = intel_encoder->base.dev;
  339. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  340. u32 pp;
  341. if (intel_dp->panel_on) {
  342. DRM_DEBUG_KMS("Skip VDD on because of panel on\n");
  343. return;
  344. }
  345. DRM_DEBUG_KMS("\n");
  346. pp = REG_READ(PP_CONTROL);
  347. pp |= EDP_FORCE_VDD;
  348. REG_WRITE(PP_CONTROL, pp);
  349. REG_READ(PP_CONTROL);
  350. msleep(intel_dp->panel_power_up_delay);
  351. }
  352. static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder)
  353. {
  354. struct drm_device *dev = intel_encoder->base.dev;
  355. u32 pp;
  356. DRM_DEBUG_KMS("\n");
  357. pp = REG_READ(PP_CONTROL);
  358. pp &= ~EDP_FORCE_VDD;
  359. REG_WRITE(PP_CONTROL, pp);
  360. REG_READ(PP_CONTROL);
  361. }
  362. /* Returns true if the panel was already on when called */
  363. static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder)
  364. {
  365. struct drm_device *dev = intel_encoder->base.dev;
  366. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  367. u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE;
  368. if (intel_dp->panel_on)
  369. return true;
  370. DRM_DEBUG_KMS("\n");
  371. pp = REG_READ(PP_CONTROL);
  372. pp &= ~PANEL_UNLOCK_MASK;
  373. pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON);
  374. REG_WRITE(PP_CONTROL, pp);
  375. REG_READ(PP_CONTROL);
  376. if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) {
  377. DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS));
  378. intel_dp->panel_on = false;
  379. } else
  380. intel_dp->panel_on = true;
  381. msleep(intel_dp->panel_power_up_delay);
  382. return false;
  383. }
  384. static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder)
  385. {
  386. struct drm_device *dev = intel_encoder->base.dev;
  387. u32 pp, idle_off_mask = PP_ON ;
  388. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  389. DRM_DEBUG_KMS("\n");
  390. pp = REG_READ(PP_CONTROL);
  391. if ((pp & POWER_TARGET_ON) == 0)
  392. return;
  393. intel_dp->panel_on = false;
  394. pp &= ~PANEL_UNLOCK_MASK;
  395. /* ILK workaround: disable reset around power sequence */
  396. pp &= ~POWER_TARGET_ON;
  397. pp &= ~EDP_FORCE_VDD;
  398. pp &= ~EDP_BLC_ENABLE;
  399. REG_WRITE(PP_CONTROL, pp);
  400. REG_READ(PP_CONTROL);
  401. DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS));
  402. if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) {
  403. DRM_DEBUG_KMS("Error in turning off Panel\n");
  404. }
  405. msleep(intel_dp->panel_power_cycle_delay);
  406. DRM_DEBUG_KMS("Over\n");
  407. }
  408. static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder)
  409. {
  410. struct drm_device *dev = intel_encoder->base.dev;
  411. u32 pp;
  412. DRM_DEBUG_KMS("\n");
  413. /*
  414. * If we enable the backlight right away following a panel power
  415. * on, we may see slight flicker as the panel syncs with the eDP
  416. * link. So delay a bit to make sure the image is solid before
  417. * allowing it to appear.
  418. */
  419. msleep(300);
  420. pp = REG_READ(PP_CONTROL);
  421. pp |= EDP_BLC_ENABLE;
  422. REG_WRITE(PP_CONTROL, pp);
  423. gma_backlight_enable(dev);
  424. }
  425. static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder)
  426. {
  427. struct drm_device *dev = intel_encoder->base.dev;
  428. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  429. u32 pp;
  430. DRM_DEBUG_KMS("\n");
  431. gma_backlight_disable(dev);
  432. msleep(10);
  433. pp = REG_READ(PP_CONTROL);
  434. pp &= ~EDP_BLC_ENABLE;
  435. REG_WRITE(PP_CONTROL, pp);
  436. msleep(intel_dp->backlight_off_delay);
  437. }
  438. static int
  439. cdv_intel_dp_mode_valid(struct drm_connector *connector,
  440. struct drm_display_mode *mode)
  441. {
  442. struct gma_encoder *encoder = gma_attached_encoder(connector);
  443. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  444. int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder));
  445. int max_lanes = cdv_intel_dp_max_lane_count(encoder);
  446. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  447. if (is_edp(encoder) && intel_dp->panel_fixed_mode) {
  448. if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
  449. return MODE_PANEL;
  450. if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
  451. return MODE_PANEL;
  452. }
  453. /* only refuse the mode on non eDP since we have seen some weird eDP panels
  454. which are outside spec tolerances but somehow work by magic */
  455. if (!is_edp(encoder) &&
  456. (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp)
  457. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)))
  458. return MODE_CLOCK_HIGH;
  459. if (is_edp(encoder)) {
  460. if (cdv_intel_dp_link_required(mode->clock, 24)
  461. > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))
  462. return MODE_CLOCK_HIGH;
  463. }
  464. if (mode->clock < 10000)
  465. return MODE_CLOCK_LOW;
  466. return MODE_OK;
  467. }
  468. static uint32_t
  469. pack_aux(uint8_t *src, int src_bytes)
  470. {
  471. int i;
  472. uint32_t v = 0;
  473. if (src_bytes > 4)
  474. src_bytes = 4;
  475. for (i = 0; i < src_bytes; i++)
  476. v |= ((uint32_t) src[i]) << ((3-i) * 8);
  477. return v;
  478. }
  479. static void
  480. unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
  481. {
  482. int i;
  483. if (dst_bytes > 4)
  484. dst_bytes = 4;
  485. for (i = 0; i < dst_bytes; i++)
  486. dst[i] = src >> ((3-i) * 8);
  487. }
  488. static int
  489. cdv_intel_dp_aux_ch(struct gma_encoder *encoder,
  490. uint8_t *send, int send_bytes,
  491. uint8_t *recv, int recv_size)
  492. {
  493. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  494. uint32_t output_reg = intel_dp->output_reg;
  495. struct drm_device *dev = encoder->base.dev;
  496. uint32_t ch_ctl = output_reg + 0x10;
  497. uint32_t ch_data = ch_ctl + 4;
  498. int i;
  499. int recv_bytes;
  500. uint32_t status;
  501. uint32_t aux_clock_divider;
  502. int try, precharge;
  503. /* The clock divider is based off the hrawclk,
  504. * and would like to run at 2MHz. So, take the
  505. * hrawclk value and divide by 2 and use that
  506. * On CDV platform it uses 200MHz as hrawclk.
  507. *
  508. */
  509. aux_clock_divider = 200 / 2;
  510. precharge = 4;
  511. if (is_edp(encoder))
  512. precharge = 10;
  513. if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) {
  514. DRM_ERROR("dp_aux_ch not started status 0x%08x\n",
  515. REG_READ(ch_ctl));
  516. return -EBUSY;
  517. }
  518. /* Must try at least 3 times according to DP spec */
  519. for (try = 0; try < 5; try++) {
  520. /* Load the send data into the aux channel data registers */
  521. for (i = 0; i < send_bytes; i += 4)
  522. REG_WRITE(ch_data + i,
  523. pack_aux(send + i, send_bytes - i));
  524. /* Send the command and wait for it to complete */
  525. REG_WRITE(ch_ctl,
  526. DP_AUX_CH_CTL_SEND_BUSY |
  527. DP_AUX_CH_CTL_TIME_OUT_400us |
  528. (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
  529. (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
  530. (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
  531. DP_AUX_CH_CTL_DONE |
  532. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  533. DP_AUX_CH_CTL_RECEIVE_ERROR);
  534. for (;;) {
  535. status = REG_READ(ch_ctl);
  536. if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
  537. break;
  538. udelay(100);
  539. }
  540. /* Clear done status and any errors */
  541. REG_WRITE(ch_ctl,
  542. status |
  543. DP_AUX_CH_CTL_DONE |
  544. DP_AUX_CH_CTL_TIME_OUT_ERROR |
  545. DP_AUX_CH_CTL_RECEIVE_ERROR);
  546. if (status & DP_AUX_CH_CTL_DONE)
  547. break;
  548. }
  549. if ((status & DP_AUX_CH_CTL_DONE) == 0) {
  550. DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
  551. return -EBUSY;
  552. }
  553. /* Check for timeout or receive error.
  554. * Timeouts occur when the sink is not connected
  555. */
  556. if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
  557. DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
  558. return -EIO;
  559. }
  560. /* Timeouts occur when the device isn't connected, so they're
  561. * "normal" -- don't fill the kernel log with these */
  562. if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
  563. DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
  564. return -ETIMEDOUT;
  565. }
  566. /* Unload any bytes sent back from the other side */
  567. recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
  568. DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
  569. if (recv_bytes > recv_size)
  570. recv_bytes = recv_size;
  571. for (i = 0; i < recv_bytes; i += 4)
  572. unpack_aux(REG_READ(ch_data + i),
  573. recv + i, recv_bytes - i);
  574. return recv_bytes;
  575. }
  576. /* Write data to the aux channel in native mode */
  577. static int
  578. cdv_intel_dp_aux_native_write(struct gma_encoder *encoder,
  579. uint16_t address, uint8_t *send, int send_bytes)
  580. {
  581. int ret;
  582. uint8_t msg[20];
  583. int msg_bytes;
  584. uint8_t ack;
  585. if (send_bytes > 16)
  586. return -1;
  587. msg[0] = DP_AUX_NATIVE_WRITE << 4;
  588. msg[1] = address >> 8;
  589. msg[2] = address & 0xff;
  590. msg[3] = send_bytes - 1;
  591. memcpy(&msg[4], send, send_bytes);
  592. msg_bytes = send_bytes + 4;
  593. for (;;) {
  594. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1);
  595. if (ret < 0)
  596. return ret;
  597. ack >>= 4;
  598. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK)
  599. break;
  600. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  601. udelay(100);
  602. else
  603. return -EIO;
  604. }
  605. return send_bytes;
  606. }
  607. /* Write a single byte to the aux channel in native mode */
  608. static int
  609. cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder,
  610. uint16_t address, uint8_t byte)
  611. {
  612. return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1);
  613. }
  614. /* read bytes from a native aux channel */
  615. static int
  616. cdv_intel_dp_aux_native_read(struct gma_encoder *encoder,
  617. uint16_t address, uint8_t *recv, int recv_bytes)
  618. {
  619. uint8_t msg[4];
  620. int msg_bytes;
  621. uint8_t reply[20];
  622. int reply_bytes;
  623. uint8_t ack;
  624. int ret;
  625. msg[0] = DP_AUX_NATIVE_READ << 4;
  626. msg[1] = address >> 8;
  627. msg[2] = address & 0xff;
  628. msg[3] = recv_bytes - 1;
  629. msg_bytes = 4;
  630. reply_bytes = recv_bytes + 1;
  631. for (;;) {
  632. ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes,
  633. reply, reply_bytes);
  634. if (ret == 0)
  635. return -EPROTO;
  636. if (ret < 0)
  637. return ret;
  638. ack = reply[0] >> 4;
  639. if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) {
  640. memcpy(recv, reply + 1, ret - 1);
  641. return ret - 1;
  642. }
  643. else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER)
  644. udelay(100);
  645. else
  646. return -EIO;
  647. }
  648. }
  649. static int
  650. cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
  651. uint8_t write_byte, uint8_t *read_byte)
  652. {
  653. struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
  654. struct cdv_intel_dp *intel_dp = container_of(adapter,
  655. struct cdv_intel_dp,
  656. adapter);
  657. struct gma_encoder *encoder = intel_dp->encoder;
  658. uint16_t address = algo_data->address;
  659. uint8_t msg[5];
  660. uint8_t reply[2];
  661. unsigned retry;
  662. int msg_bytes;
  663. int reply_bytes;
  664. int ret;
  665. /* Set up the command byte */
  666. if (mode & MODE_I2C_READ)
  667. msg[0] = DP_AUX_I2C_READ << 4;
  668. else
  669. msg[0] = DP_AUX_I2C_WRITE << 4;
  670. if (!(mode & MODE_I2C_STOP))
  671. msg[0] |= DP_AUX_I2C_MOT << 4;
  672. msg[1] = address >> 8;
  673. msg[2] = address;
  674. switch (mode) {
  675. case MODE_I2C_WRITE:
  676. msg[3] = 0;
  677. msg[4] = write_byte;
  678. msg_bytes = 5;
  679. reply_bytes = 1;
  680. break;
  681. case MODE_I2C_READ:
  682. msg[3] = 0;
  683. msg_bytes = 4;
  684. reply_bytes = 2;
  685. break;
  686. default:
  687. msg_bytes = 3;
  688. reply_bytes = 1;
  689. break;
  690. }
  691. for (retry = 0; retry < 5; retry++) {
  692. ret = cdv_intel_dp_aux_ch(encoder,
  693. msg, msg_bytes,
  694. reply, reply_bytes);
  695. if (ret < 0) {
  696. DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
  697. return ret;
  698. }
  699. switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) {
  700. case DP_AUX_NATIVE_REPLY_ACK:
  701. /* I2C-over-AUX Reply field is only valid
  702. * when paired with AUX ACK.
  703. */
  704. break;
  705. case DP_AUX_NATIVE_REPLY_NACK:
  706. DRM_DEBUG_KMS("aux_ch native nack\n");
  707. return -EREMOTEIO;
  708. case DP_AUX_NATIVE_REPLY_DEFER:
  709. udelay(100);
  710. continue;
  711. default:
  712. DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
  713. reply[0]);
  714. return -EREMOTEIO;
  715. }
  716. switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) {
  717. case DP_AUX_I2C_REPLY_ACK:
  718. if (mode == MODE_I2C_READ) {
  719. *read_byte = reply[1];
  720. }
  721. return reply_bytes - 1;
  722. case DP_AUX_I2C_REPLY_NACK:
  723. DRM_DEBUG_KMS("aux_i2c nack\n");
  724. return -EREMOTEIO;
  725. case DP_AUX_I2C_REPLY_DEFER:
  726. DRM_DEBUG_KMS("aux_i2c defer\n");
  727. udelay(100);
  728. break;
  729. default:
  730. DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
  731. return -EREMOTEIO;
  732. }
  733. }
  734. DRM_ERROR("too many retries, giving up\n");
  735. return -EREMOTEIO;
  736. }
  737. static int
  738. cdv_intel_dp_i2c_init(struct gma_connector *connector,
  739. struct gma_encoder *encoder, const char *name)
  740. {
  741. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  742. int ret;
  743. DRM_DEBUG_KMS("i2c_init %s\n", name);
  744. intel_dp->algo.running = false;
  745. intel_dp->algo.address = 0;
  746. intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch;
  747. memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter));
  748. intel_dp->adapter.owner = THIS_MODULE;
  749. intel_dp->adapter.class = I2C_CLASS_DDC;
  750. strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
  751. intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
  752. intel_dp->adapter.algo_data = &intel_dp->algo;
  753. intel_dp->adapter.dev.parent = connector->base.kdev;
  754. if (is_edp(encoder))
  755. cdv_intel_edp_panel_vdd_on(encoder);
  756. ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
  757. if (is_edp(encoder))
  758. cdv_intel_edp_panel_vdd_off(encoder);
  759. return ret;
  760. }
  761. static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode,
  762. struct drm_display_mode *adjusted_mode)
  763. {
  764. adjusted_mode->hdisplay = fixed_mode->hdisplay;
  765. adjusted_mode->hsync_start = fixed_mode->hsync_start;
  766. adjusted_mode->hsync_end = fixed_mode->hsync_end;
  767. adjusted_mode->htotal = fixed_mode->htotal;
  768. adjusted_mode->vdisplay = fixed_mode->vdisplay;
  769. adjusted_mode->vsync_start = fixed_mode->vsync_start;
  770. adjusted_mode->vsync_end = fixed_mode->vsync_end;
  771. adjusted_mode->vtotal = fixed_mode->vtotal;
  772. adjusted_mode->clock = fixed_mode->clock;
  773. drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
  774. }
  775. static bool
  776. cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
  777. struct drm_display_mode *adjusted_mode)
  778. {
  779. struct drm_psb_private *dev_priv = encoder->dev->dev_private;
  780. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  781. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  782. int lane_count, clock;
  783. int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder);
  784. int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0;
  785. static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
  786. int refclock = mode->clock;
  787. int bpp = 24;
  788. if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) {
  789. cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
  790. refclock = intel_dp->panel_fixed_mode->clock;
  791. bpp = dev_priv->edp.bpp;
  792. }
  793. for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
  794. for (clock = max_clock; clock >= 0; clock--) {
  795. int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count);
  796. if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) {
  797. intel_dp->link_bw = bws[clock];
  798. intel_dp->lane_count = lane_count;
  799. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  800. DRM_DEBUG_KMS("Display port link bw %02x lane "
  801. "count %d clock %d\n",
  802. intel_dp->link_bw, intel_dp->lane_count,
  803. adjusted_mode->clock);
  804. return true;
  805. }
  806. }
  807. }
  808. if (is_edp(intel_encoder)) {
  809. /* okay we failed just pick the highest */
  810. intel_dp->lane_count = max_lane_count;
  811. intel_dp->link_bw = bws[max_clock];
  812. adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw);
  813. DRM_DEBUG_KMS("Force picking display port link bw %02x lane "
  814. "count %d clock %d\n",
  815. intel_dp->link_bw, intel_dp->lane_count,
  816. adjusted_mode->clock);
  817. return true;
  818. }
  819. return false;
  820. }
  821. struct cdv_intel_dp_m_n {
  822. uint32_t tu;
  823. uint32_t gmch_m;
  824. uint32_t gmch_n;
  825. uint32_t link_m;
  826. uint32_t link_n;
  827. };
  828. static void
  829. cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den)
  830. {
  831. /*
  832. while (*num > 0xffffff || *den > 0xffffff) {
  833. *num >>= 1;
  834. *den >>= 1;
  835. }*/
  836. uint64_t value, m;
  837. m = *num;
  838. value = m * (0x800000);
  839. m = do_div(value, *den);
  840. *num = value;
  841. *den = 0x800000;
  842. }
  843. static void
  844. cdv_intel_dp_compute_m_n(int bpp,
  845. int nlanes,
  846. int pixel_clock,
  847. int link_clock,
  848. struct cdv_intel_dp_m_n *m_n)
  849. {
  850. m_n->tu = 64;
  851. m_n->gmch_m = (pixel_clock * bpp + 7) >> 3;
  852. m_n->gmch_n = link_clock * nlanes;
  853. cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
  854. m_n->link_m = pixel_clock;
  855. m_n->link_n = link_clock;
  856. cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
  857. }
  858. void
  859. cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
  860. struct drm_display_mode *adjusted_mode)
  861. {
  862. struct drm_device *dev = crtc->dev;
  863. struct drm_psb_private *dev_priv = dev->dev_private;
  864. struct drm_mode_config *mode_config = &dev->mode_config;
  865. struct drm_encoder *encoder;
  866. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  867. int lane_count = 4, bpp = 24;
  868. struct cdv_intel_dp_m_n m_n;
  869. int pipe = gma_crtc->pipe;
  870. /*
  871. * Find the lane count in the intel_encoder private
  872. */
  873. list_for_each_entry(encoder, &mode_config->encoder_list, head) {
  874. struct gma_encoder *intel_encoder;
  875. struct cdv_intel_dp *intel_dp;
  876. if (encoder->crtc != crtc)
  877. continue;
  878. intel_encoder = to_gma_encoder(encoder);
  879. intel_dp = intel_encoder->dev_priv;
  880. if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) {
  881. lane_count = intel_dp->lane_count;
  882. break;
  883. } else if (is_edp(intel_encoder)) {
  884. lane_count = intel_dp->lane_count;
  885. bpp = dev_priv->edp.bpp;
  886. break;
  887. }
  888. }
  889. /*
  890. * Compute the GMCH and Link ratios. The '3' here is
  891. * the number of bytes_per_pixel post-LUT, which we always
  892. * set up for 8-bits of R/G/B, or 3 bytes total.
  893. */
  894. cdv_intel_dp_compute_m_n(bpp, lane_count,
  895. mode->clock, adjusted_mode->clock, &m_n);
  896. {
  897. REG_WRITE(PIPE_GMCH_DATA_M(pipe),
  898. ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
  899. m_n.gmch_m);
  900. REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
  901. REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
  902. REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
  903. }
  904. }
  905. static void
  906. cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  907. struct drm_display_mode *adjusted_mode)
  908. {
  909. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  910. struct drm_crtc *crtc = encoder->crtc;
  911. struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
  912. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  913. struct drm_device *dev = encoder->dev;
  914. intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
  915. intel_dp->DP |= intel_dp->color_range;
  916. if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
  917. intel_dp->DP |= DP_SYNC_HS_HIGH;
  918. if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
  919. intel_dp->DP |= DP_SYNC_VS_HIGH;
  920. intel_dp->DP |= DP_LINK_TRAIN_OFF;
  921. switch (intel_dp->lane_count) {
  922. case 1:
  923. intel_dp->DP |= DP_PORT_WIDTH_1;
  924. break;
  925. case 2:
  926. intel_dp->DP |= DP_PORT_WIDTH_2;
  927. break;
  928. case 4:
  929. intel_dp->DP |= DP_PORT_WIDTH_4;
  930. break;
  931. }
  932. if (intel_dp->has_audio)
  933. intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
  934. memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
  935. intel_dp->link_configuration[0] = intel_dp->link_bw;
  936. intel_dp->link_configuration[1] = intel_dp->lane_count;
  937. /*
  938. * Check for DPCD version > 1.1 and enhanced framing support
  939. */
  940. if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
  941. (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
  942. intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
  943. intel_dp->DP |= DP_ENHANCED_FRAMING;
  944. }
  945. /* CPT DP's pipe select is decided in TRANS_DP_CTL */
  946. if (gma_crtc->pipe == 1)
  947. intel_dp->DP |= DP_PIPEB_SELECT;
  948. REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN));
  949. DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP);
  950. if (is_edp(intel_encoder)) {
  951. uint32_t pfit_control;
  952. cdv_intel_edp_panel_on(intel_encoder);
  953. if (mode->hdisplay != adjusted_mode->hdisplay ||
  954. mode->vdisplay != adjusted_mode->vdisplay)
  955. pfit_control = PFIT_ENABLE;
  956. else
  957. pfit_control = 0;
  958. pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT;
  959. REG_WRITE(PFIT_CONTROL, pfit_control);
  960. }
  961. }
  962. /* If the sink supports it, try to set the power state appropriately */
  963. static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode)
  964. {
  965. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  966. int ret, i;
  967. /* Should have a valid DPCD by this point */
  968. if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
  969. return;
  970. if (mode != DRM_MODE_DPMS_ON) {
  971. ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER,
  972. DP_SET_POWER_D3);
  973. if (ret != 1)
  974. DRM_DEBUG_DRIVER("failed to write sink power state\n");
  975. } else {
  976. /*
  977. * When turning on, we need to retry for 1ms to give the sink
  978. * time to wake up.
  979. */
  980. for (i = 0; i < 3; i++) {
  981. ret = cdv_intel_dp_aux_native_write_1(encoder,
  982. DP_SET_POWER,
  983. DP_SET_POWER_D0);
  984. if (ret == 1)
  985. break;
  986. udelay(1000);
  987. }
  988. }
  989. }
  990. static void cdv_intel_dp_prepare(struct drm_encoder *encoder)
  991. {
  992. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  993. int edp = is_edp(intel_encoder);
  994. if (edp) {
  995. cdv_intel_edp_backlight_off(intel_encoder);
  996. cdv_intel_edp_panel_off(intel_encoder);
  997. cdv_intel_edp_panel_vdd_on(intel_encoder);
  998. }
  999. /* Wake up the sink first */
  1000. cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON);
  1001. cdv_intel_dp_link_down(intel_encoder);
  1002. if (edp)
  1003. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1004. }
  1005. static void cdv_intel_dp_commit(struct drm_encoder *encoder)
  1006. {
  1007. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  1008. int edp = is_edp(intel_encoder);
  1009. if (edp)
  1010. cdv_intel_edp_panel_on(intel_encoder);
  1011. cdv_intel_dp_start_link_train(intel_encoder);
  1012. cdv_intel_dp_complete_link_train(intel_encoder);
  1013. if (edp)
  1014. cdv_intel_edp_backlight_on(intel_encoder);
  1015. }
  1016. static void
  1017. cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode)
  1018. {
  1019. struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
  1020. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  1021. struct drm_device *dev = encoder->dev;
  1022. uint32_t dp_reg = REG_READ(intel_dp->output_reg);
  1023. int edp = is_edp(intel_encoder);
  1024. if (mode != DRM_MODE_DPMS_ON) {
  1025. if (edp) {
  1026. cdv_intel_edp_backlight_off(intel_encoder);
  1027. cdv_intel_edp_panel_vdd_on(intel_encoder);
  1028. }
  1029. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  1030. cdv_intel_dp_link_down(intel_encoder);
  1031. if (edp) {
  1032. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1033. cdv_intel_edp_panel_off(intel_encoder);
  1034. }
  1035. } else {
  1036. if (edp)
  1037. cdv_intel_edp_panel_on(intel_encoder);
  1038. cdv_intel_dp_sink_dpms(intel_encoder, mode);
  1039. if (!(dp_reg & DP_PORT_EN)) {
  1040. cdv_intel_dp_start_link_train(intel_encoder);
  1041. cdv_intel_dp_complete_link_train(intel_encoder);
  1042. }
  1043. if (edp)
  1044. cdv_intel_edp_backlight_on(intel_encoder);
  1045. }
  1046. }
  1047. /*
  1048. * Native read with retry for link status and receiver capability reads for
  1049. * cases where the sink may still be asleep.
  1050. */
  1051. static bool
  1052. cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address,
  1053. uint8_t *recv, int recv_bytes)
  1054. {
  1055. int ret, i;
  1056. /*
  1057. * Sinks are *supposed* to come up within 1ms from an off state,
  1058. * but we're also supposed to retry 3 times per the spec.
  1059. */
  1060. for (i = 0; i < 3; i++) {
  1061. ret = cdv_intel_dp_aux_native_read(encoder, address, recv,
  1062. recv_bytes);
  1063. if (ret == recv_bytes)
  1064. return true;
  1065. udelay(1000);
  1066. }
  1067. return false;
  1068. }
  1069. /*
  1070. * Fetch AUX CH registers 0x202 - 0x207 which contain
  1071. * link status information
  1072. */
  1073. static bool
  1074. cdv_intel_dp_get_link_status(struct gma_encoder *encoder)
  1075. {
  1076. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1077. return cdv_intel_dp_aux_native_read_retry(encoder,
  1078. DP_LANE0_1_STATUS,
  1079. intel_dp->link_status,
  1080. DP_LINK_STATUS_SIZE);
  1081. }
  1082. static uint8_t
  1083. cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1084. int r)
  1085. {
  1086. return link_status[r - DP_LANE0_1_STATUS];
  1087. }
  1088. static uint8_t
  1089. cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1090. int lane)
  1091. {
  1092. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  1093. int s = ((lane & 1) ?
  1094. DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
  1095. DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
  1096. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1097. return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
  1098. }
  1099. static uint8_t
  1100. cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1101. int lane)
  1102. {
  1103. int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
  1104. int s = ((lane & 1) ?
  1105. DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
  1106. DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
  1107. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1108. return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
  1109. }
  1110. #if 0
  1111. static char *voltage_names[] = {
  1112. "0.4V", "0.6V", "0.8V", "1.2V"
  1113. };
  1114. static char *pre_emph_names[] = {
  1115. "0dB", "3.5dB", "6dB", "9.5dB"
  1116. };
  1117. static char *link_train_names[] = {
  1118. "pattern 1", "pattern 2", "idle", "off"
  1119. };
  1120. #endif
  1121. #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3
  1122. /*
  1123. static uint8_t
  1124. cdv_intel_dp_pre_emphasis_max(uint8_t voltage_swing)
  1125. {
  1126. switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
  1127. case DP_TRAIN_VOLTAGE_SWING_400:
  1128. return DP_TRAIN_PRE_EMPHASIS_6;
  1129. case DP_TRAIN_VOLTAGE_SWING_600:
  1130. return DP_TRAIN_PRE_EMPHASIS_6;
  1131. case DP_TRAIN_VOLTAGE_SWING_800:
  1132. return DP_TRAIN_PRE_EMPHASIS_3_5;
  1133. case DP_TRAIN_VOLTAGE_SWING_1200:
  1134. default:
  1135. return DP_TRAIN_PRE_EMPHASIS_0;
  1136. }
  1137. }
  1138. */
  1139. static void
  1140. cdv_intel_get_adjust_train(struct gma_encoder *encoder)
  1141. {
  1142. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1143. uint8_t v = 0;
  1144. uint8_t p = 0;
  1145. int lane;
  1146. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  1147. uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane);
  1148. uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
  1149. if (this_v > v)
  1150. v = this_v;
  1151. if (this_p > p)
  1152. p = this_p;
  1153. }
  1154. if (v >= CDV_DP_VOLTAGE_MAX)
  1155. v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
  1156. if (p == DP_TRAIN_PRE_EMPHASIS_MASK)
  1157. p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
  1158. for (lane = 0; lane < 4; lane++)
  1159. intel_dp->train_set[lane] = v | p;
  1160. }
  1161. static uint8_t
  1162. cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
  1163. int lane)
  1164. {
  1165. int i = DP_LANE0_1_STATUS + (lane >> 1);
  1166. int s = (lane & 1) * 4;
  1167. uint8_t l = cdv_intel_dp_link_status(link_status, i);
  1168. return (l >> s) & 0xf;
  1169. }
  1170. /* Check for clock recovery is done on all channels */
  1171. static bool
  1172. cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
  1173. {
  1174. int lane;
  1175. uint8_t lane_status;
  1176. for (lane = 0; lane < lane_count; lane++) {
  1177. lane_status = cdv_intel_get_lane_status(link_status, lane);
  1178. if ((lane_status & DP_LANE_CR_DONE) == 0)
  1179. return false;
  1180. }
  1181. return true;
  1182. }
  1183. /* Check to see if channel eq is done on all channels */
  1184. #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
  1185. DP_LANE_CHANNEL_EQ_DONE|\
  1186. DP_LANE_SYMBOL_LOCKED)
  1187. static bool
  1188. cdv_intel_channel_eq_ok(struct gma_encoder *encoder)
  1189. {
  1190. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1191. uint8_t lane_align;
  1192. uint8_t lane_status;
  1193. int lane;
  1194. lane_align = cdv_intel_dp_link_status(intel_dp->link_status,
  1195. DP_LANE_ALIGN_STATUS_UPDATED);
  1196. if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
  1197. return false;
  1198. for (lane = 0; lane < intel_dp->lane_count; lane++) {
  1199. lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane);
  1200. if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
  1201. return false;
  1202. }
  1203. return true;
  1204. }
  1205. static bool
  1206. cdv_intel_dp_set_link_train(struct gma_encoder *encoder,
  1207. uint32_t dp_reg_value,
  1208. uint8_t dp_train_pat)
  1209. {
  1210. struct drm_device *dev = encoder->base.dev;
  1211. int ret;
  1212. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1213. REG_WRITE(intel_dp->output_reg, dp_reg_value);
  1214. REG_READ(intel_dp->output_reg);
  1215. ret = cdv_intel_dp_aux_native_write_1(encoder,
  1216. DP_TRAINING_PATTERN_SET,
  1217. dp_train_pat);
  1218. if (ret != 1) {
  1219. DRM_DEBUG_KMS("Failure in setting link pattern %x\n",
  1220. dp_train_pat);
  1221. return false;
  1222. }
  1223. return true;
  1224. }
  1225. static bool
  1226. cdv_intel_dplink_set_level(struct gma_encoder *encoder,
  1227. uint8_t dp_train_pat)
  1228. {
  1229. int ret;
  1230. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1231. ret = cdv_intel_dp_aux_native_write(encoder,
  1232. DP_TRAINING_LANE0_SET,
  1233. intel_dp->train_set,
  1234. intel_dp->lane_count);
  1235. if (ret != intel_dp->lane_count) {
  1236. DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n",
  1237. intel_dp->train_set[0], intel_dp->lane_count);
  1238. return false;
  1239. }
  1240. return true;
  1241. }
  1242. static void
  1243. cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level)
  1244. {
  1245. struct drm_device *dev = encoder->base.dev;
  1246. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1247. struct ddi_regoff *ddi_reg;
  1248. int vswing, premph, index;
  1249. if (intel_dp->output_reg == DP_B)
  1250. ddi_reg = &ddi_DP_train_table[0];
  1251. else
  1252. ddi_reg = &ddi_DP_train_table[1];
  1253. vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK);
  1254. premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >>
  1255. DP_TRAIN_PRE_EMPHASIS_SHIFT;
  1256. if (vswing + premph > 3)
  1257. return;
  1258. #ifdef CDV_FAST_LINK_TRAIN
  1259. return;
  1260. #endif
  1261. DRM_DEBUG_KMS("Test2\n");
  1262. //return ;
  1263. cdv_sb_reset(dev);
  1264. /* ;Swing voltage programming
  1265. ;gfx_dpio_set_reg(0xc058, 0x0505313A) */
  1266. cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A);
  1267. /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */
  1268. cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055);
  1269. /* ;gfx_dpio_set_reg(0x8148, 0x55338954)
  1270. * The VSwing_PreEmph table is also considered based on the vswing/premp
  1271. */
  1272. index = (vswing + premph) * 2;
  1273. if (premph == 1 && vswing == 1) {
  1274. cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954);
  1275. } else
  1276. cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]);
  1277. /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */
  1278. if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
  1279. cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040);
  1280. else
  1281. cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040);
  1282. /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */
  1283. /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */
  1284. /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */
  1285. cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055);
  1286. /* ;Pre emphasis programming
  1287. * ;gfx_dpio_set_reg(0xc02c, 0x1f030040)
  1288. */
  1289. cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040);
  1290. /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */
  1291. index = 2 * premph + 1;
  1292. cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]);
  1293. return;
  1294. }
  1295. /* Enable corresponding port and start training pattern 1 */
  1296. static void
  1297. cdv_intel_dp_start_link_train(struct gma_encoder *encoder)
  1298. {
  1299. struct drm_device *dev = encoder->base.dev;
  1300. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1301. int i;
  1302. uint8_t voltage;
  1303. bool clock_recovery = false;
  1304. int tries;
  1305. u32 reg;
  1306. uint32_t DP = intel_dp->DP;
  1307. DP |= DP_PORT_EN;
  1308. DP &= ~DP_LINK_TRAIN_MASK;
  1309. reg = DP;
  1310. reg |= DP_LINK_TRAIN_PAT_1;
  1311. /* Enable output, wait for it to become active */
  1312. REG_WRITE(intel_dp->output_reg, reg);
  1313. REG_READ(intel_dp->output_reg);
  1314. gma_wait_for_vblank(dev);
  1315. DRM_DEBUG_KMS("Link config\n");
  1316. /* Write the link configuration data */
  1317. cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET,
  1318. intel_dp->link_configuration,
  1319. 2);
  1320. memset(intel_dp->train_set, 0, 4);
  1321. voltage = 0;
  1322. tries = 0;
  1323. clock_recovery = false;
  1324. DRM_DEBUG_KMS("Start train\n");
  1325. reg = DP | DP_LINK_TRAIN_PAT_1;
  1326. for (;;) {
  1327. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1328. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1329. intel_dp->train_set[0],
  1330. intel_dp->link_configuration[0],
  1331. intel_dp->link_configuration[1]);
  1332. if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) {
  1333. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n");
  1334. }
  1335. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1336. /* Set training pattern 1 */
  1337. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1);
  1338. udelay(200);
  1339. if (!cdv_intel_dp_get_link_status(encoder))
  1340. break;
  1341. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1342. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1343. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1344. if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1345. DRM_DEBUG_KMS("PT1 train is done\n");
  1346. clock_recovery = true;
  1347. break;
  1348. }
  1349. /* Check to see if we've tried the max voltage */
  1350. for (i = 0; i < intel_dp->lane_count; i++)
  1351. if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
  1352. break;
  1353. if (i == intel_dp->lane_count)
  1354. break;
  1355. /* Check to see if we've tried the same voltage 5 times */
  1356. if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
  1357. ++tries;
  1358. if (tries == 5)
  1359. break;
  1360. } else
  1361. tries = 0;
  1362. voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
  1363. /* Compute new intel_dp->train_set as requested by target */
  1364. cdv_intel_get_adjust_train(encoder);
  1365. }
  1366. if (!clock_recovery) {
  1367. DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]);
  1368. }
  1369. intel_dp->DP = DP;
  1370. }
  1371. static void
  1372. cdv_intel_dp_complete_link_train(struct gma_encoder *encoder)
  1373. {
  1374. struct drm_device *dev = encoder->base.dev;
  1375. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1376. bool channel_eq = false;
  1377. int tries, cr_tries;
  1378. u32 reg;
  1379. uint32_t DP = intel_dp->DP;
  1380. /* channel equalization */
  1381. tries = 0;
  1382. cr_tries = 0;
  1383. channel_eq = false;
  1384. DRM_DEBUG_KMS("\n");
  1385. reg = DP | DP_LINK_TRAIN_PAT_2;
  1386. for (;;) {
  1387. DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n",
  1388. intel_dp->train_set[0],
  1389. intel_dp->link_configuration[0],
  1390. intel_dp->link_configuration[1]);
  1391. /* channel eq pattern */
  1392. if (!cdv_intel_dp_set_link_train(encoder, reg,
  1393. DP_TRAINING_PATTERN_2)) {
  1394. DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n");
  1395. }
  1396. /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
  1397. if (cr_tries > 5) {
  1398. DRM_ERROR("failed to train DP, aborting\n");
  1399. cdv_intel_dp_link_down(encoder);
  1400. break;
  1401. }
  1402. cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]);
  1403. cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2);
  1404. udelay(1000);
  1405. if (!cdv_intel_dp_get_link_status(encoder))
  1406. break;
  1407. DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n",
  1408. intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2],
  1409. intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]);
  1410. /* Make sure clock is still ok */
  1411. if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
  1412. cdv_intel_dp_start_link_train(encoder);
  1413. cr_tries++;
  1414. continue;
  1415. }
  1416. if (cdv_intel_channel_eq_ok(encoder)) {
  1417. DRM_DEBUG_KMS("PT2 train is done\n");
  1418. channel_eq = true;
  1419. break;
  1420. }
  1421. /* Try 5 times, then try clock recovery if that fails */
  1422. if (tries > 5) {
  1423. cdv_intel_dp_link_down(encoder);
  1424. cdv_intel_dp_start_link_train(encoder);
  1425. tries = 0;
  1426. cr_tries++;
  1427. continue;
  1428. }
  1429. /* Compute new intel_dp->train_set as requested by target */
  1430. cdv_intel_get_adjust_train(encoder);
  1431. ++tries;
  1432. }
  1433. reg = DP | DP_LINK_TRAIN_OFF;
  1434. REG_WRITE(intel_dp->output_reg, reg);
  1435. REG_READ(intel_dp->output_reg);
  1436. cdv_intel_dp_aux_native_write_1(encoder,
  1437. DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
  1438. }
  1439. static void
  1440. cdv_intel_dp_link_down(struct gma_encoder *encoder)
  1441. {
  1442. struct drm_device *dev = encoder->base.dev;
  1443. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1444. uint32_t DP = intel_dp->DP;
  1445. if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
  1446. return;
  1447. DRM_DEBUG_KMS("\n");
  1448. {
  1449. DP &= ~DP_LINK_TRAIN_MASK;
  1450. REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
  1451. }
  1452. REG_READ(intel_dp->output_reg);
  1453. msleep(17);
  1454. REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
  1455. REG_READ(intel_dp->output_reg);
  1456. }
  1457. static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder)
  1458. {
  1459. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1460. enum drm_connector_status status;
  1461. status = connector_status_disconnected;
  1462. if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd,
  1463. sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd))
  1464. {
  1465. if (intel_dp->dpcd[DP_DPCD_REV] != 0)
  1466. status = connector_status_connected;
  1467. }
  1468. if (status == connector_status_connected)
  1469. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1470. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1471. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1472. return status;
  1473. }
  1474. /**
  1475. * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
  1476. *
  1477. * \return true if DP port is connected.
  1478. * \return false if DP port is disconnected.
  1479. */
  1480. static enum drm_connector_status
  1481. cdv_intel_dp_detect(struct drm_connector *connector, bool force)
  1482. {
  1483. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1484. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1485. enum drm_connector_status status;
  1486. struct edid *edid = NULL;
  1487. int edp = is_edp(encoder);
  1488. intel_dp->has_audio = false;
  1489. if (edp)
  1490. cdv_intel_edp_panel_vdd_on(encoder);
  1491. status = cdv_dp_detect(encoder);
  1492. if (status != connector_status_connected) {
  1493. if (edp)
  1494. cdv_intel_edp_panel_vdd_off(encoder);
  1495. return status;
  1496. }
  1497. if (intel_dp->force_audio) {
  1498. intel_dp->has_audio = intel_dp->force_audio > 0;
  1499. } else {
  1500. edid = drm_get_edid(connector, &intel_dp->adapter);
  1501. if (edid) {
  1502. intel_dp->has_audio = drm_detect_monitor_audio(edid);
  1503. kfree(edid);
  1504. }
  1505. }
  1506. if (edp)
  1507. cdv_intel_edp_panel_vdd_off(encoder);
  1508. return connector_status_connected;
  1509. }
  1510. static int cdv_intel_dp_get_modes(struct drm_connector *connector)
  1511. {
  1512. struct gma_encoder *intel_encoder = gma_attached_encoder(connector);
  1513. struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv;
  1514. struct edid *edid = NULL;
  1515. int ret = 0;
  1516. int edp = is_edp(intel_encoder);
  1517. edid = drm_get_edid(connector, &intel_dp->adapter);
  1518. if (edid) {
  1519. drm_mode_connector_update_edid_property(connector, edid);
  1520. ret = drm_add_edid_modes(connector, edid);
  1521. kfree(edid);
  1522. }
  1523. if (is_edp(intel_encoder)) {
  1524. struct drm_device *dev = connector->dev;
  1525. struct drm_psb_private *dev_priv = dev->dev_private;
  1526. cdv_intel_edp_panel_vdd_off(intel_encoder);
  1527. if (ret) {
  1528. if (edp && !intel_dp->panel_fixed_mode) {
  1529. struct drm_display_mode *newmode;
  1530. list_for_each_entry(newmode, &connector->probed_modes,
  1531. head) {
  1532. if (newmode->type & DRM_MODE_TYPE_PREFERRED) {
  1533. intel_dp->panel_fixed_mode =
  1534. drm_mode_duplicate(dev, newmode);
  1535. break;
  1536. }
  1537. }
  1538. }
  1539. return ret;
  1540. }
  1541. if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) {
  1542. intel_dp->panel_fixed_mode =
  1543. drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
  1544. if (intel_dp->panel_fixed_mode) {
  1545. intel_dp->panel_fixed_mode->type |=
  1546. DRM_MODE_TYPE_PREFERRED;
  1547. }
  1548. }
  1549. if (intel_dp->panel_fixed_mode != NULL) {
  1550. struct drm_display_mode *mode;
  1551. mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
  1552. drm_mode_probed_add(connector, mode);
  1553. return 1;
  1554. }
  1555. }
  1556. return ret;
  1557. }
  1558. static bool
  1559. cdv_intel_dp_detect_audio(struct drm_connector *connector)
  1560. {
  1561. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1562. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1563. struct edid *edid;
  1564. bool has_audio = false;
  1565. int edp = is_edp(encoder);
  1566. if (edp)
  1567. cdv_intel_edp_panel_vdd_on(encoder);
  1568. edid = drm_get_edid(connector, &intel_dp->adapter);
  1569. if (edid) {
  1570. has_audio = drm_detect_monitor_audio(edid);
  1571. kfree(edid);
  1572. }
  1573. if (edp)
  1574. cdv_intel_edp_panel_vdd_off(encoder);
  1575. return has_audio;
  1576. }
  1577. static int
  1578. cdv_intel_dp_set_property(struct drm_connector *connector,
  1579. struct drm_property *property,
  1580. uint64_t val)
  1581. {
  1582. struct drm_psb_private *dev_priv = connector->dev->dev_private;
  1583. struct gma_encoder *encoder = gma_attached_encoder(connector);
  1584. struct cdv_intel_dp *intel_dp = encoder->dev_priv;
  1585. int ret;
  1586. ret = drm_object_property_set_value(&connector->base, property, val);
  1587. if (ret)
  1588. return ret;
  1589. if (property == dev_priv->force_audio_property) {
  1590. int i = val;
  1591. bool has_audio;
  1592. if (i == intel_dp->force_audio)
  1593. return 0;
  1594. intel_dp->force_audio = i;
  1595. if (i == 0)
  1596. has_audio = cdv_intel_dp_detect_audio(connector);
  1597. else
  1598. has_audio = i > 0;
  1599. if (has_audio == intel_dp->has_audio)
  1600. return 0;
  1601. intel_dp->has_audio = has_audio;
  1602. goto done;
  1603. }
  1604. if (property == dev_priv->broadcast_rgb_property) {
  1605. if (val == !!intel_dp->color_range)
  1606. return 0;
  1607. intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
  1608. goto done;
  1609. }
  1610. return -EINVAL;
  1611. done:
  1612. if (encoder->base.crtc) {
  1613. struct drm_crtc *crtc = encoder->base.crtc;
  1614. drm_crtc_helper_set_mode(crtc, &crtc->mode,
  1615. crtc->x, crtc->y,
  1616. crtc->primary->fb);
  1617. }
  1618. return 0;
  1619. }
  1620. static void
  1621. cdv_intel_dp_destroy(struct drm_connector *connector)
  1622. {
  1623. struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
  1624. struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv;
  1625. if (is_edp(gma_encoder)) {
  1626. /* cdv_intel_panel_destroy_backlight(connector->dev); */
  1627. if (intel_dp->panel_fixed_mode) {
  1628. kfree(intel_dp->panel_fixed_mode);
  1629. intel_dp->panel_fixed_mode = NULL;
  1630. }
  1631. }
  1632. i2c_del_adapter(&intel_dp->adapter);
  1633. drm_connector_unregister(connector);
  1634. drm_connector_cleanup(connector);
  1635. kfree(connector);
  1636. }
  1637. static void cdv_intel_dp_encoder_destroy(struct drm_encoder *encoder)
  1638. {
  1639. drm_encoder_cleanup(encoder);
  1640. }
  1641. static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = {
  1642. .dpms = cdv_intel_dp_dpms,
  1643. .mode_fixup = cdv_intel_dp_mode_fixup,
  1644. .prepare = cdv_intel_dp_prepare,
  1645. .mode_set = cdv_intel_dp_mode_set,
  1646. .commit = cdv_intel_dp_commit,
  1647. };
  1648. static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = {
  1649. .dpms = drm_helper_connector_dpms,
  1650. .detect = cdv_intel_dp_detect,
  1651. .fill_modes = drm_helper_probe_single_connector_modes,
  1652. .set_property = cdv_intel_dp_set_property,
  1653. .destroy = cdv_intel_dp_destroy,
  1654. };
  1655. static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = {
  1656. .get_modes = cdv_intel_dp_get_modes,
  1657. .mode_valid = cdv_intel_dp_mode_valid,
  1658. .best_encoder = gma_best_encoder,
  1659. };
  1660. static const struct drm_encoder_funcs cdv_intel_dp_enc_funcs = {
  1661. .destroy = cdv_intel_dp_encoder_destroy,
  1662. };
  1663. static void cdv_intel_dp_add_properties(struct drm_connector *connector)
  1664. {
  1665. cdv_intel_attach_force_audio_property(connector);
  1666. cdv_intel_attach_broadcast_rgb_property(connector);
  1667. }
  1668. /* check the VBT to see whether the eDP is on DP-D port */
  1669. static bool cdv_intel_dpc_is_edp(struct drm_device *dev)
  1670. {
  1671. struct drm_psb_private *dev_priv = dev->dev_private;
  1672. struct child_device_config *p_child;
  1673. int i;
  1674. if (!dev_priv->child_dev_num)
  1675. return false;
  1676. for (i = 0; i < dev_priv->child_dev_num; i++) {
  1677. p_child = dev_priv->child_dev + i;
  1678. if (p_child->dvo_port == PORT_IDPC &&
  1679. p_child->device_type == DEVICE_TYPE_eDP)
  1680. return true;
  1681. }
  1682. return false;
  1683. }
  1684. /* Cedarview display clock gating
  1685. We need this disable dot get correct behaviour while enabling
  1686. DP/eDP. TODO - investigate if we can turn it back to normality
  1687. after enabling */
  1688. static void cdv_disable_intel_clock_gating(struct drm_device *dev)
  1689. {
  1690. u32 reg_value;
  1691. reg_value = REG_READ(DSPCLK_GATE_D);
  1692. reg_value |= (DPUNIT_PIPEB_GATE_DISABLE |
  1693. DPUNIT_PIPEA_GATE_DISABLE |
  1694. DPCUNIT_CLOCK_GATE_DISABLE |
  1695. DPLSUNIT_CLOCK_GATE_DISABLE |
  1696. DPOUNIT_CLOCK_GATE_DISABLE |
  1697. DPIOUNIT_CLOCK_GATE_DISABLE);
  1698. REG_WRITE(DSPCLK_GATE_D, reg_value);
  1699. udelay(500);
  1700. }
  1701. void
  1702. cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg)
  1703. {
  1704. struct gma_encoder *gma_encoder;
  1705. struct gma_connector *gma_connector;
  1706. struct drm_connector *connector;
  1707. struct drm_encoder *encoder;
  1708. struct cdv_intel_dp *intel_dp;
  1709. const char *name = NULL;
  1710. int type = DRM_MODE_CONNECTOR_DisplayPort;
  1711. gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL);
  1712. if (!gma_encoder)
  1713. return;
  1714. gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL);
  1715. if (!gma_connector)
  1716. goto err_connector;
  1717. intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL);
  1718. if (!intel_dp)
  1719. goto err_priv;
  1720. if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev))
  1721. type = DRM_MODE_CONNECTOR_eDP;
  1722. connector = &gma_connector->base;
  1723. encoder = &gma_encoder->base;
  1724. drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type);
  1725. drm_encoder_init(dev, encoder, &cdv_intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS);
  1726. gma_connector_attach_encoder(gma_connector, gma_encoder);
  1727. if (type == DRM_MODE_CONNECTOR_DisplayPort)
  1728. gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
  1729. else
  1730. gma_encoder->type = INTEL_OUTPUT_EDP;
  1731. gma_encoder->dev_priv=intel_dp;
  1732. intel_dp->encoder = gma_encoder;
  1733. intel_dp->output_reg = output_reg;
  1734. drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs);
  1735. drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs);
  1736. connector->polled = DRM_CONNECTOR_POLL_HPD;
  1737. connector->interlace_allowed = false;
  1738. connector->doublescan_allowed = false;
  1739. drm_connector_register(connector);
  1740. /* Set up the DDC bus. */
  1741. switch (output_reg) {
  1742. case DP_B:
  1743. name = "DPDDC-B";
  1744. gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT);
  1745. break;
  1746. case DP_C:
  1747. name = "DPDDC-C";
  1748. gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT);
  1749. break;
  1750. }
  1751. cdv_disable_intel_clock_gating(dev);
  1752. cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name);
  1753. /* FIXME:fail check */
  1754. cdv_intel_dp_add_properties(connector);
  1755. if (is_edp(gma_encoder)) {
  1756. int ret;
  1757. struct edp_power_seq cur;
  1758. u32 pp_on, pp_off, pp_div;
  1759. u32 pwm_ctrl;
  1760. pp_on = REG_READ(PP_CONTROL);
  1761. pp_on &= ~PANEL_UNLOCK_MASK;
  1762. pp_on |= PANEL_UNLOCK_REGS;
  1763. REG_WRITE(PP_CONTROL, pp_on);
  1764. pwm_ctrl = REG_READ(BLC_PWM_CTL2);
  1765. pwm_ctrl |= PWM_PIPE_B;
  1766. REG_WRITE(BLC_PWM_CTL2, pwm_ctrl);
  1767. pp_on = REG_READ(PP_ON_DELAYS);
  1768. pp_off = REG_READ(PP_OFF_DELAYS);
  1769. pp_div = REG_READ(PP_DIVISOR);
  1770. /* Pull timing values out of registers */
  1771. cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
  1772. PANEL_POWER_UP_DELAY_SHIFT;
  1773. cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
  1774. PANEL_LIGHT_ON_DELAY_SHIFT;
  1775. cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
  1776. PANEL_LIGHT_OFF_DELAY_SHIFT;
  1777. cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
  1778. PANEL_POWER_DOWN_DELAY_SHIFT;
  1779. cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
  1780. PANEL_POWER_CYCLE_DELAY_SHIFT);
  1781. DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
  1782. cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
  1783. intel_dp->panel_power_up_delay = cur.t1_t3 / 10;
  1784. intel_dp->backlight_on_delay = cur.t8 / 10;
  1785. intel_dp->backlight_off_delay = cur.t9 / 10;
  1786. intel_dp->panel_power_down_delay = cur.t10 / 10;
  1787. intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100;
  1788. DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
  1789. intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
  1790. intel_dp->panel_power_cycle_delay);
  1791. DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
  1792. intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
  1793. cdv_intel_edp_panel_vdd_on(gma_encoder);
  1794. ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV,
  1795. intel_dp->dpcd,
  1796. sizeof(intel_dp->dpcd));
  1797. cdv_intel_edp_panel_vdd_off(gma_encoder);
  1798. if (ret == 0) {
  1799. /* if this fails, presume the device is a ghost */
  1800. DRM_INFO("failed to retrieve link info, disabling eDP\n");
  1801. cdv_intel_dp_encoder_destroy(encoder);
  1802. cdv_intel_dp_destroy(connector);
  1803. goto err_priv;
  1804. } else {
  1805. DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n",
  1806. intel_dp->dpcd[0], intel_dp->dpcd[1],
  1807. intel_dp->dpcd[2], intel_dp->dpcd[3]);
  1808. }
  1809. /* The CDV reference driver moves pnale backlight setup into the displays that
  1810. have a backlight: this is a good idea and one we should probably adopt, however
  1811. we need to migrate all the drivers before we can do that */
  1812. /*cdv_intel_panel_setup_backlight(dev); */
  1813. }
  1814. return;
  1815. err_priv:
  1816. kfree(gma_connector);
  1817. err_connector:
  1818. kfree(gma_encoder);
  1819. }