exynos_dp_core.c 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404
  1. /*
  2. * Samsung SoC DP (Display Port) interface driver.
  3. *
  4. * Copyright (C) 2012 Samsung Electronics Co., Ltd.
  5. * Author: Jingoo Han <jg1.han@samsung.com>
  6. *
  7. * This program is free software; you can redistribute it and/or modify it
  8. * under the terms of the GNU General Public License as published by the
  9. * Free Software Foundation; either version 2 of the License, or (at your
  10. * option) any later version.
  11. */
  12. #include <linux/module.h>
  13. #include <linux/platform_device.h>
  14. #include <linux/err.h>
  15. #include <linux/clk.h>
  16. #include <linux/io.h>
  17. #include <linux/interrupt.h>
  18. #include <linux/of.h>
  19. #include <linux/of_gpio.h>
  20. #include <linux/of_graph.h>
  21. #include <linux/gpio.h>
  22. #include <linux/component.h>
  23. #include <linux/phy/phy.h>
  24. #include <video/of_display_timing.h>
  25. #include <video/of_videomode.h>
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc.h>
  28. #include <drm/drm_crtc_helper.h>
  29. #include <drm/drm_atomic_helper.h>
  30. #include <drm/drm_panel.h>
  31. #include "exynos_dp_core.h"
  32. #include "exynos_drm_crtc.h"
  33. #define ctx_from_connector(c) container_of(c, struct exynos_dp_device, \
  34. connector)
  35. static inline struct exynos_drm_crtc *dp_to_crtc(struct exynos_dp_device *dp)
  36. {
  37. return to_exynos_crtc(dp->encoder.crtc);
  38. }
  39. static inline struct exynos_dp_device *encoder_to_dp(
  40. struct drm_encoder *e)
  41. {
  42. return container_of(e, struct exynos_dp_device, encoder);
  43. }
  44. struct bridge_init {
  45. struct i2c_client *client;
  46. struct device_node *node;
  47. };
  48. static void exynos_dp_init_dp(struct exynos_dp_device *dp)
  49. {
  50. exynos_dp_reset(dp);
  51. exynos_dp_swreset(dp);
  52. exynos_dp_init_analog_param(dp);
  53. exynos_dp_init_interrupt(dp);
  54. /* SW defined function Normal operation */
  55. exynos_dp_enable_sw_function(dp);
  56. exynos_dp_config_interrupt(dp);
  57. exynos_dp_init_analog_func(dp);
  58. exynos_dp_init_hpd(dp);
  59. exynos_dp_init_aux(dp);
  60. }
  61. static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
  62. {
  63. int timeout_loop = 0;
  64. while (exynos_dp_get_plug_in_status(dp) != 0) {
  65. timeout_loop++;
  66. if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
  67. dev_err(dp->dev, "failed to get hpd plug status\n");
  68. return -ETIMEDOUT;
  69. }
  70. usleep_range(10, 11);
  71. }
  72. return 0;
  73. }
  74. static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
  75. {
  76. int i;
  77. unsigned char sum = 0;
  78. for (i = 0; i < EDID_BLOCK_LENGTH; i++)
  79. sum = sum + edid_data[i];
  80. return sum;
  81. }
  82. static int exynos_dp_read_edid(struct exynos_dp_device *dp)
  83. {
  84. unsigned char edid[EDID_BLOCK_LENGTH * 2];
  85. unsigned int extend_block = 0;
  86. unsigned char sum;
  87. unsigned char test_vector;
  88. int retval;
  89. /*
  90. * EDID device address is 0x50.
  91. * However, if necessary, you must have set upper address
  92. * into E-EDID in I2C device, 0x30.
  93. */
  94. /* Read Extension Flag, Number of 128-byte EDID extension blocks */
  95. retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
  96. EDID_EXTENSION_FLAG,
  97. &extend_block);
  98. if (retval)
  99. return retval;
  100. if (extend_block > 0) {
  101. dev_dbg(dp->dev, "EDID data includes a single extension!\n");
  102. /* Read EDID data */
  103. retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
  104. EDID_HEADER_PATTERN,
  105. EDID_BLOCK_LENGTH,
  106. &edid[EDID_HEADER_PATTERN]);
  107. if (retval != 0) {
  108. dev_err(dp->dev, "EDID Read failed!\n");
  109. return -EIO;
  110. }
  111. sum = exynos_dp_calc_edid_check_sum(edid);
  112. if (sum != 0) {
  113. dev_err(dp->dev, "EDID bad checksum!\n");
  114. return -EIO;
  115. }
  116. /* Read additional EDID data */
  117. retval = exynos_dp_read_bytes_from_i2c(dp,
  118. I2C_EDID_DEVICE_ADDR,
  119. EDID_BLOCK_LENGTH,
  120. EDID_BLOCK_LENGTH,
  121. &edid[EDID_BLOCK_LENGTH]);
  122. if (retval != 0) {
  123. dev_err(dp->dev, "EDID Read failed!\n");
  124. return -EIO;
  125. }
  126. sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
  127. if (sum != 0) {
  128. dev_err(dp->dev, "EDID bad checksum!\n");
  129. return -EIO;
  130. }
  131. exynos_dp_read_byte_from_dpcd(dp, DP_TEST_REQUEST,
  132. &test_vector);
  133. if (test_vector & DP_TEST_LINK_EDID_READ) {
  134. exynos_dp_write_byte_to_dpcd(dp,
  135. DP_TEST_EDID_CHECKSUM,
  136. edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
  137. exynos_dp_write_byte_to_dpcd(dp,
  138. DP_TEST_RESPONSE,
  139. DP_TEST_EDID_CHECKSUM_WRITE);
  140. }
  141. } else {
  142. dev_info(dp->dev, "EDID data does not include any extensions.\n");
  143. /* Read EDID data */
  144. retval = exynos_dp_read_bytes_from_i2c(dp,
  145. I2C_EDID_DEVICE_ADDR,
  146. EDID_HEADER_PATTERN,
  147. EDID_BLOCK_LENGTH,
  148. &edid[EDID_HEADER_PATTERN]);
  149. if (retval != 0) {
  150. dev_err(dp->dev, "EDID Read failed!\n");
  151. return -EIO;
  152. }
  153. sum = exynos_dp_calc_edid_check_sum(edid);
  154. if (sum != 0) {
  155. dev_err(dp->dev, "EDID bad checksum!\n");
  156. return -EIO;
  157. }
  158. exynos_dp_read_byte_from_dpcd(dp,
  159. DP_TEST_REQUEST,
  160. &test_vector);
  161. if (test_vector & DP_TEST_LINK_EDID_READ) {
  162. exynos_dp_write_byte_to_dpcd(dp,
  163. DP_TEST_EDID_CHECKSUM,
  164. edid[EDID_CHECKSUM]);
  165. exynos_dp_write_byte_to_dpcd(dp,
  166. DP_TEST_RESPONSE,
  167. DP_TEST_EDID_CHECKSUM_WRITE);
  168. }
  169. }
  170. dev_dbg(dp->dev, "EDID Read success!\n");
  171. return 0;
  172. }
  173. static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
  174. {
  175. u8 buf[12];
  176. int i;
  177. int retval;
  178. /* Read DPCD DP_DPCD_REV~RECEIVE_PORT1_CAP_1 */
  179. retval = exynos_dp_read_bytes_from_dpcd(dp, DP_DPCD_REV,
  180. 12, buf);
  181. if (retval)
  182. return retval;
  183. /* Read EDID */
  184. for (i = 0; i < 3; i++) {
  185. retval = exynos_dp_read_edid(dp);
  186. if (!retval)
  187. break;
  188. }
  189. return retval;
  190. }
  191. static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
  192. bool enable)
  193. {
  194. u8 data;
  195. exynos_dp_read_byte_from_dpcd(dp, DP_LANE_COUNT_SET, &data);
  196. if (enable)
  197. exynos_dp_write_byte_to_dpcd(dp, DP_LANE_COUNT_SET,
  198. DP_LANE_COUNT_ENHANCED_FRAME_EN |
  199. DPCD_LANE_COUNT_SET(data));
  200. else
  201. exynos_dp_write_byte_to_dpcd(dp, DP_LANE_COUNT_SET,
  202. DPCD_LANE_COUNT_SET(data));
  203. }
  204. static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
  205. {
  206. u8 data;
  207. int retval;
  208. exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LANE_COUNT, &data);
  209. retval = DPCD_ENHANCED_FRAME_CAP(data);
  210. return retval;
  211. }
  212. static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
  213. {
  214. u8 data;
  215. data = exynos_dp_is_enhanced_mode_available(dp);
  216. exynos_dp_enable_rx_to_enhanced_mode(dp, data);
  217. exynos_dp_enable_enhanced_mode(dp, data);
  218. }
  219. static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
  220. {
  221. exynos_dp_set_training_pattern(dp, DP_NONE);
  222. exynos_dp_write_byte_to_dpcd(dp,
  223. DP_TRAINING_PATTERN_SET,
  224. DP_TRAINING_PATTERN_DISABLE);
  225. }
  226. static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
  227. int pre_emphasis, int lane)
  228. {
  229. switch (lane) {
  230. case 0:
  231. exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
  232. break;
  233. case 1:
  234. exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
  235. break;
  236. case 2:
  237. exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
  238. break;
  239. case 3:
  240. exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
  241. break;
  242. }
  243. }
  244. static int exynos_dp_link_start(struct exynos_dp_device *dp)
  245. {
  246. u8 buf[4];
  247. int lane, lane_count, pll_tries, retval;
  248. lane_count = dp->link_train.lane_count;
  249. dp->link_train.lt_state = CLOCK_RECOVERY;
  250. dp->link_train.eq_loop = 0;
  251. for (lane = 0; lane < lane_count; lane++)
  252. dp->link_train.cr_loop[lane] = 0;
  253. /* Set link rate and count as you want to establish*/
  254. exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
  255. exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
  256. /* Setup RX configuration */
  257. buf[0] = dp->link_train.link_rate;
  258. buf[1] = dp->link_train.lane_count;
  259. retval = exynos_dp_write_bytes_to_dpcd(dp, DP_LINK_BW_SET,
  260. 2, buf);
  261. if (retval)
  262. return retval;
  263. /* Set TX pre-emphasis to minimum */
  264. for (lane = 0; lane < lane_count; lane++)
  265. exynos_dp_set_lane_lane_pre_emphasis(dp,
  266. PRE_EMPHASIS_LEVEL_0, lane);
  267. /* Wait for PLL lock */
  268. pll_tries = 0;
  269. while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
  270. if (pll_tries == DP_TIMEOUT_LOOP_COUNT) {
  271. dev_err(dp->dev, "Wait for PLL lock timed out\n");
  272. return -ETIMEDOUT;
  273. }
  274. pll_tries++;
  275. usleep_range(90, 120);
  276. }
  277. /* Set training pattern 1 */
  278. exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
  279. /* Set RX training pattern */
  280. retval = exynos_dp_write_byte_to_dpcd(dp,
  281. DP_TRAINING_PATTERN_SET,
  282. DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1);
  283. if (retval)
  284. return retval;
  285. for (lane = 0; lane < lane_count; lane++)
  286. buf[lane] = DP_TRAIN_PRE_EMPH_LEVEL_0 |
  287. DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
  288. retval = exynos_dp_write_bytes_to_dpcd(dp, DP_TRAINING_LANE0_SET,
  289. lane_count, buf);
  290. return retval;
  291. }
  292. static unsigned char exynos_dp_get_lane_status(u8 link_status[2], int lane)
  293. {
  294. int shift = (lane & 1) * 4;
  295. u8 link_value = link_status[lane>>1];
  296. return (link_value >> shift) & 0xf;
  297. }
  298. static int exynos_dp_clock_recovery_ok(u8 link_status[2], int lane_count)
  299. {
  300. int lane;
  301. u8 lane_status;
  302. for (lane = 0; lane < lane_count; lane++) {
  303. lane_status = exynos_dp_get_lane_status(link_status, lane);
  304. if ((lane_status & DP_LANE_CR_DONE) == 0)
  305. return -EINVAL;
  306. }
  307. return 0;
  308. }
  309. static int exynos_dp_channel_eq_ok(u8 link_status[2], u8 link_align,
  310. int lane_count)
  311. {
  312. int lane;
  313. u8 lane_status;
  314. if ((link_align & DP_INTERLANE_ALIGN_DONE) == 0)
  315. return -EINVAL;
  316. for (lane = 0; lane < lane_count; lane++) {
  317. lane_status = exynos_dp_get_lane_status(link_status, lane);
  318. lane_status &= DP_CHANNEL_EQ_BITS;
  319. if (lane_status != DP_CHANNEL_EQ_BITS)
  320. return -EINVAL;
  321. }
  322. return 0;
  323. }
  324. static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
  325. int lane)
  326. {
  327. int shift = (lane & 1) * 4;
  328. u8 link_value = adjust_request[lane>>1];
  329. return (link_value >> shift) & 0x3;
  330. }
  331. static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
  332. u8 adjust_request[2],
  333. int lane)
  334. {
  335. int shift = (lane & 1) * 4;
  336. u8 link_value = adjust_request[lane>>1];
  337. return ((link_value >> shift) & 0xc) >> 2;
  338. }
  339. static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
  340. u8 training_lane_set, int lane)
  341. {
  342. switch (lane) {
  343. case 0:
  344. exynos_dp_set_lane0_link_training(dp, training_lane_set);
  345. break;
  346. case 1:
  347. exynos_dp_set_lane1_link_training(dp, training_lane_set);
  348. break;
  349. case 2:
  350. exynos_dp_set_lane2_link_training(dp, training_lane_set);
  351. break;
  352. case 3:
  353. exynos_dp_set_lane3_link_training(dp, training_lane_set);
  354. break;
  355. }
  356. }
  357. static unsigned int exynos_dp_get_lane_link_training(
  358. struct exynos_dp_device *dp,
  359. int lane)
  360. {
  361. u32 reg;
  362. switch (lane) {
  363. case 0:
  364. reg = exynos_dp_get_lane0_link_training(dp);
  365. break;
  366. case 1:
  367. reg = exynos_dp_get_lane1_link_training(dp);
  368. break;
  369. case 2:
  370. reg = exynos_dp_get_lane2_link_training(dp);
  371. break;
  372. case 3:
  373. reg = exynos_dp_get_lane3_link_training(dp);
  374. break;
  375. default:
  376. WARN_ON(1);
  377. return 0;
  378. }
  379. return reg;
  380. }
  381. static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
  382. {
  383. exynos_dp_training_pattern_dis(dp);
  384. exynos_dp_set_enhanced_mode(dp);
  385. dp->link_train.lt_state = FAILED;
  386. }
  387. static void exynos_dp_get_adjust_training_lane(struct exynos_dp_device *dp,
  388. u8 adjust_request[2])
  389. {
  390. int lane, lane_count;
  391. u8 voltage_swing, pre_emphasis, training_lane;
  392. lane_count = dp->link_train.lane_count;
  393. for (lane = 0; lane < lane_count; lane++) {
  394. voltage_swing = exynos_dp_get_adjust_request_voltage(
  395. adjust_request, lane);
  396. pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
  397. adjust_request, lane);
  398. training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
  399. DPCD_PRE_EMPHASIS_SET(pre_emphasis);
  400. if (voltage_swing == VOLTAGE_LEVEL_3)
  401. training_lane |= DP_TRAIN_MAX_SWING_REACHED;
  402. if (pre_emphasis == PRE_EMPHASIS_LEVEL_3)
  403. training_lane |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
  404. dp->link_train.training_lane[lane] = training_lane;
  405. }
  406. }
  407. static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
  408. {
  409. int lane, lane_count, retval;
  410. u8 voltage_swing, pre_emphasis, training_lane;
  411. u8 link_status[2], adjust_request[2];
  412. usleep_range(100, 101);
  413. lane_count = dp->link_train.lane_count;
  414. retval = exynos_dp_read_bytes_from_dpcd(dp,
  415. DP_LANE0_1_STATUS, 2, link_status);
  416. if (retval)
  417. return retval;
  418. retval = exynos_dp_read_bytes_from_dpcd(dp,
  419. DP_ADJUST_REQUEST_LANE0_1, 2, adjust_request);
  420. if (retval)
  421. return retval;
  422. if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
  423. /* set training pattern 2 for EQ */
  424. exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
  425. retval = exynos_dp_write_byte_to_dpcd(dp,
  426. DP_TRAINING_PATTERN_SET,
  427. DP_LINK_SCRAMBLING_DISABLE |
  428. DP_TRAINING_PATTERN_2);
  429. if (retval)
  430. return retval;
  431. dev_info(dp->dev, "Link Training Clock Recovery success\n");
  432. dp->link_train.lt_state = EQUALIZER_TRAINING;
  433. } else {
  434. for (lane = 0; lane < lane_count; lane++) {
  435. training_lane = exynos_dp_get_lane_link_training(
  436. dp, lane);
  437. voltage_swing = exynos_dp_get_adjust_request_voltage(
  438. adjust_request, lane);
  439. pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
  440. adjust_request, lane);
  441. if (DPCD_VOLTAGE_SWING_GET(training_lane) ==
  442. voltage_swing &&
  443. DPCD_PRE_EMPHASIS_GET(training_lane) ==
  444. pre_emphasis)
  445. dp->link_train.cr_loop[lane]++;
  446. if (dp->link_train.cr_loop[lane] == MAX_CR_LOOP ||
  447. voltage_swing == VOLTAGE_LEVEL_3 ||
  448. pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
  449. dev_err(dp->dev, "CR Max reached (%d,%d,%d)\n",
  450. dp->link_train.cr_loop[lane],
  451. voltage_swing, pre_emphasis);
  452. exynos_dp_reduce_link_rate(dp);
  453. return -EIO;
  454. }
  455. }
  456. }
  457. exynos_dp_get_adjust_training_lane(dp, adjust_request);
  458. for (lane = 0; lane < lane_count; lane++)
  459. exynos_dp_set_lane_link_training(dp,
  460. dp->link_train.training_lane[lane], lane);
  461. retval = exynos_dp_write_bytes_to_dpcd(dp,
  462. DP_TRAINING_LANE0_SET, lane_count,
  463. dp->link_train.training_lane);
  464. if (retval)
  465. return retval;
  466. return retval;
  467. }
  468. static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
  469. {
  470. int lane, lane_count, retval;
  471. u32 reg;
  472. u8 link_align, link_status[2], adjust_request[2];
  473. usleep_range(400, 401);
  474. lane_count = dp->link_train.lane_count;
  475. retval = exynos_dp_read_bytes_from_dpcd(dp,
  476. DP_LANE0_1_STATUS, 2, link_status);
  477. if (retval)
  478. return retval;
  479. if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
  480. exynos_dp_reduce_link_rate(dp);
  481. return -EIO;
  482. }
  483. retval = exynos_dp_read_bytes_from_dpcd(dp,
  484. DP_ADJUST_REQUEST_LANE0_1, 2, adjust_request);
  485. if (retval)
  486. return retval;
  487. retval = exynos_dp_read_byte_from_dpcd(dp,
  488. DP_LANE_ALIGN_STATUS_UPDATED, &link_align);
  489. if (retval)
  490. return retval;
  491. exynos_dp_get_adjust_training_lane(dp, adjust_request);
  492. if (!exynos_dp_channel_eq_ok(link_status, link_align, lane_count)) {
  493. /* traing pattern Set to Normal */
  494. exynos_dp_training_pattern_dis(dp);
  495. dev_info(dp->dev, "Link Training success!\n");
  496. exynos_dp_get_link_bandwidth(dp, &reg);
  497. dp->link_train.link_rate = reg;
  498. dev_dbg(dp->dev, "final bandwidth = %.2x\n",
  499. dp->link_train.link_rate);
  500. exynos_dp_get_lane_count(dp, &reg);
  501. dp->link_train.lane_count = reg;
  502. dev_dbg(dp->dev, "final lane count = %.2x\n",
  503. dp->link_train.lane_count);
  504. /* set enhanced mode if available */
  505. exynos_dp_set_enhanced_mode(dp);
  506. dp->link_train.lt_state = FINISHED;
  507. return 0;
  508. }
  509. /* not all locked */
  510. dp->link_train.eq_loop++;
  511. if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
  512. dev_err(dp->dev, "EQ Max loop\n");
  513. exynos_dp_reduce_link_rate(dp);
  514. return -EIO;
  515. }
  516. for (lane = 0; lane < lane_count; lane++)
  517. exynos_dp_set_lane_link_training(dp,
  518. dp->link_train.training_lane[lane], lane);
  519. retval = exynos_dp_write_bytes_to_dpcd(dp, DP_TRAINING_LANE0_SET,
  520. lane_count, dp->link_train.training_lane);
  521. return retval;
  522. }
  523. static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
  524. u8 *bandwidth)
  525. {
  526. u8 data;
  527. /*
  528. * For DP rev.1.1, Maximum link rate of Main Link lanes
  529. * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps
  530. */
  531. exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LINK_RATE, &data);
  532. *bandwidth = data;
  533. }
  534. static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
  535. u8 *lane_count)
  536. {
  537. u8 data;
  538. /*
  539. * For DP rev.1.1, Maximum number of Main Link lanes
  540. * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes
  541. */
  542. exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LANE_COUNT, &data);
  543. *lane_count = DPCD_MAX_LANE_COUNT(data);
  544. }
  545. static void exynos_dp_init_training(struct exynos_dp_device *dp,
  546. enum link_lane_count_type max_lane,
  547. enum link_rate_type max_rate)
  548. {
  549. /*
  550. * MACRO_RST must be applied after the PLL_LOCK to avoid
  551. * the DP inter pair skew issue for at least 10 us
  552. */
  553. exynos_dp_reset_macro(dp);
  554. /* Initialize by reading RX's DPCD */
  555. exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
  556. exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
  557. if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
  558. (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
  559. dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
  560. dp->link_train.link_rate);
  561. dp->link_train.link_rate = LINK_RATE_1_62GBPS;
  562. }
  563. if (dp->link_train.lane_count == 0) {
  564. dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
  565. dp->link_train.lane_count);
  566. dp->link_train.lane_count = (u8)LANE_COUNT1;
  567. }
  568. /* Setup TX lane count & rate */
  569. if (dp->link_train.lane_count > max_lane)
  570. dp->link_train.lane_count = max_lane;
  571. if (dp->link_train.link_rate > max_rate)
  572. dp->link_train.link_rate = max_rate;
  573. /* All DP analog module power up */
  574. exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
  575. }
  576. static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
  577. {
  578. int retval = 0, training_finished = 0;
  579. dp->link_train.lt_state = START;
  580. /* Process here */
  581. while (!retval && !training_finished) {
  582. switch (dp->link_train.lt_state) {
  583. case START:
  584. retval = exynos_dp_link_start(dp);
  585. if (retval)
  586. dev_err(dp->dev, "LT link start failed!\n");
  587. break;
  588. case CLOCK_RECOVERY:
  589. retval = exynos_dp_process_clock_recovery(dp);
  590. if (retval)
  591. dev_err(dp->dev, "LT CR failed!\n");
  592. break;
  593. case EQUALIZER_TRAINING:
  594. retval = exynos_dp_process_equalizer_training(dp);
  595. if (retval)
  596. dev_err(dp->dev, "LT EQ failed!\n");
  597. break;
  598. case FINISHED:
  599. training_finished = 1;
  600. break;
  601. case FAILED:
  602. return -EREMOTEIO;
  603. }
  604. }
  605. if (retval)
  606. dev_err(dp->dev, "eDP link training failed (%d)\n", retval);
  607. return retval;
  608. }
  609. static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
  610. u32 count,
  611. u32 bwtype)
  612. {
  613. int i;
  614. int retval;
  615. for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
  616. exynos_dp_init_training(dp, count, bwtype);
  617. retval = exynos_dp_sw_link_training(dp);
  618. if (retval == 0)
  619. break;
  620. usleep_range(100, 110);
  621. }
  622. return retval;
  623. }
  624. static int exynos_dp_config_video(struct exynos_dp_device *dp)
  625. {
  626. int retval = 0;
  627. int timeout_loop = 0;
  628. int done_count = 0;
  629. exynos_dp_config_video_slave_mode(dp);
  630. exynos_dp_set_video_color_format(dp);
  631. if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
  632. dev_err(dp->dev, "PLL is not locked yet.\n");
  633. return -EINVAL;
  634. }
  635. for (;;) {
  636. timeout_loop++;
  637. if (exynos_dp_is_slave_video_stream_clock_on(dp) == 0)
  638. break;
  639. if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
  640. dev_err(dp->dev, "Timeout of video streamclk ok\n");
  641. return -ETIMEDOUT;
  642. }
  643. usleep_range(1, 2);
  644. }
  645. /* Set to use the register calculated M/N video */
  646. exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
  647. /* For video bist, Video timing must be generated by register */
  648. exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
  649. /* Disable video mute */
  650. exynos_dp_enable_video_mute(dp, 0);
  651. /* Configure video slave mode */
  652. exynos_dp_enable_video_master(dp, 0);
  653. timeout_loop = 0;
  654. for (;;) {
  655. timeout_loop++;
  656. if (exynos_dp_is_video_stream_on(dp) == 0) {
  657. done_count++;
  658. if (done_count > 10)
  659. break;
  660. } else if (done_count) {
  661. done_count = 0;
  662. }
  663. if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
  664. dev_err(dp->dev, "Timeout of video streamclk ok\n");
  665. return -ETIMEDOUT;
  666. }
  667. usleep_range(1000, 1001);
  668. }
  669. if (retval != 0)
  670. dev_err(dp->dev, "Video stream is not detected!\n");
  671. return retval;
  672. }
  673. static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
  674. {
  675. u8 data;
  676. if (enable) {
  677. exynos_dp_enable_scrambling(dp);
  678. exynos_dp_read_byte_from_dpcd(dp,
  679. DP_TRAINING_PATTERN_SET,
  680. &data);
  681. exynos_dp_write_byte_to_dpcd(dp,
  682. DP_TRAINING_PATTERN_SET,
  683. (u8)(data & ~DP_LINK_SCRAMBLING_DISABLE));
  684. } else {
  685. exynos_dp_disable_scrambling(dp);
  686. exynos_dp_read_byte_from_dpcd(dp,
  687. DP_TRAINING_PATTERN_SET,
  688. &data);
  689. exynos_dp_write_byte_to_dpcd(dp,
  690. DP_TRAINING_PATTERN_SET,
  691. (u8)(data | DP_LINK_SCRAMBLING_DISABLE));
  692. }
  693. }
  694. static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
  695. {
  696. struct exynos_dp_device *dp = arg;
  697. enum dp_irq_type irq_type;
  698. irq_type = exynos_dp_get_irq_type(dp);
  699. switch (irq_type) {
  700. case DP_IRQ_TYPE_HP_CABLE_IN:
  701. dev_dbg(dp->dev, "Received irq - cable in\n");
  702. schedule_work(&dp->hotplug_work);
  703. exynos_dp_clear_hotplug_interrupts(dp);
  704. break;
  705. case DP_IRQ_TYPE_HP_CABLE_OUT:
  706. dev_dbg(dp->dev, "Received irq - cable out\n");
  707. exynos_dp_clear_hotplug_interrupts(dp);
  708. break;
  709. case DP_IRQ_TYPE_HP_CHANGE:
  710. /*
  711. * We get these change notifications once in a while, but there
  712. * is nothing we can do with them. Just ignore it for now and
  713. * only handle cable changes.
  714. */
  715. dev_dbg(dp->dev, "Received irq - hotplug change; ignoring.\n");
  716. exynos_dp_clear_hotplug_interrupts(dp);
  717. break;
  718. default:
  719. dev_err(dp->dev, "Received irq - unknown type!\n");
  720. break;
  721. }
  722. return IRQ_HANDLED;
  723. }
  724. static void exynos_dp_hotplug(struct work_struct *work)
  725. {
  726. struct exynos_dp_device *dp;
  727. dp = container_of(work, struct exynos_dp_device, hotplug_work);
  728. if (dp->drm_dev)
  729. drm_helper_hpd_irq_event(dp->drm_dev);
  730. }
  731. static void exynos_dp_commit(struct drm_encoder *encoder)
  732. {
  733. struct exynos_dp_device *dp = encoder_to_dp(encoder);
  734. int ret;
  735. /* Keep the panel disabled while we configure video */
  736. if (dp->panel) {
  737. if (drm_panel_disable(dp->panel))
  738. DRM_ERROR("failed to disable the panel\n");
  739. }
  740. ret = exynos_dp_detect_hpd(dp);
  741. if (ret) {
  742. /* Cable has been disconnected, we're done */
  743. return;
  744. }
  745. ret = exynos_dp_handle_edid(dp);
  746. if (ret) {
  747. dev_err(dp->dev, "unable to handle edid\n");
  748. return;
  749. }
  750. ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
  751. dp->video_info->link_rate);
  752. if (ret) {
  753. dev_err(dp->dev, "unable to do link train\n");
  754. return;
  755. }
  756. exynos_dp_enable_scramble(dp, 1);
  757. exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
  758. exynos_dp_enable_enhanced_mode(dp, 1);
  759. exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
  760. exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
  761. exynos_dp_init_video(dp);
  762. ret = exynos_dp_config_video(dp);
  763. if (ret)
  764. dev_err(dp->dev, "unable to config video\n");
  765. /* Safe to enable the panel now */
  766. if (dp->panel) {
  767. if (drm_panel_enable(dp->panel))
  768. DRM_ERROR("failed to enable the panel\n");
  769. }
  770. /* Enable video */
  771. exynos_dp_start_video(dp);
  772. }
  773. static enum drm_connector_status exynos_dp_detect(
  774. struct drm_connector *connector, bool force)
  775. {
  776. return connector_status_connected;
  777. }
  778. static void exynos_dp_connector_destroy(struct drm_connector *connector)
  779. {
  780. drm_connector_unregister(connector);
  781. drm_connector_cleanup(connector);
  782. }
  783. static struct drm_connector_funcs exynos_dp_connector_funcs = {
  784. .dpms = drm_atomic_helper_connector_dpms,
  785. .fill_modes = drm_helper_probe_single_connector_modes,
  786. .detect = exynos_dp_detect,
  787. .destroy = exynos_dp_connector_destroy,
  788. .reset = drm_atomic_helper_connector_reset,
  789. .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
  790. .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
  791. };
  792. static int exynos_dp_get_modes(struct drm_connector *connector)
  793. {
  794. struct exynos_dp_device *dp = ctx_from_connector(connector);
  795. struct drm_display_mode *mode;
  796. if (dp->panel)
  797. return drm_panel_get_modes(dp->panel);
  798. mode = drm_mode_create(connector->dev);
  799. if (!mode) {
  800. DRM_ERROR("failed to create a new display mode.\n");
  801. return 0;
  802. }
  803. drm_display_mode_from_videomode(&dp->priv.vm, mode);
  804. mode->width_mm = dp->priv.width_mm;
  805. mode->height_mm = dp->priv.height_mm;
  806. connector->display_info.width_mm = mode->width_mm;
  807. connector->display_info.height_mm = mode->height_mm;
  808. mode->type = DRM_MODE_TYPE_DRIVER | DRM_MODE_TYPE_PREFERRED;
  809. drm_mode_set_name(mode);
  810. drm_mode_probed_add(connector, mode);
  811. return 1;
  812. }
  813. static struct drm_encoder *exynos_dp_best_encoder(
  814. struct drm_connector *connector)
  815. {
  816. struct exynos_dp_device *dp = ctx_from_connector(connector);
  817. return &dp->encoder;
  818. }
  819. static struct drm_connector_helper_funcs exynos_dp_connector_helper_funcs = {
  820. .get_modes = exynos_dp_get_modes,
  821. .best_encoder = exynos_dp_best_encoder,
  822. };
  823. /* returns the number of bridges attached */
  824. static int exynos_drm_attach_lcd_bridge(struct exynos_dp_device *dp,
  825. struct drm_encoder *encoder)
  826. {
  827. int ret;
  828. encoder->bridge = dp->bridge;
  829. dp->bridge->encoder = encoder;
  830. ret = drm_bridge_attach(encoder->dev, dp->bridge);
  831. if (ret) {
  832. DRM_ERROR("Failed to attach bridge to drm\n");
  833. return ret;
  834. }
  835. return 0;
  836. }
  837. static int exynos_dp_create_connector(struct drm_encoder *encoder)
  838. {
  839. struct exynos_dp_device *dp = encoder_to_dp(encoder);
  840. struct drm_connector *connector = &dp->connector;
  841. int ret;
  842. /* Pre-empt DP connector creation if there's a bridge */
  843. if (dp->bridge) {
  844. ret = exynos_drm_attach_lcd_bridge(dp, encoder);
  845. if (!ret)
  846. return 0;
  847. }
  848. connector->polled = DRM_CONNECTOR_POLL_HPD;
  849. ret = drm_connector_init(dp->drm_dev, connector,
  850. &exynos_dp_connector_funcs, DRM_MODE_CONNECTOR_eDP);
  851. if (ret) {
  852. DRM_ERROR("Failed to initialize connector with drm\n");
  853. return ret;
  854. }
  855. drm_connector_helper_add(connector, &exynos_dp_connector_helper_funcs);
  856. drm_connector_register(connector);
  857. drm_mode_connector_attach_encoder(connector, encoder);
  858. if (dp->panel)
  859. ret = drm_panel_attach(dp->panel, &dp->connector);
  860. return ret;
  861. }
  862. static bool exynos_dp_mode_fixup(struct drm_encoder *encoder,
  863. const struct drm_display_mode *mode,
  864. struct drm_display_mode *adjusted_mode)
  865. {
  866. return true;
  867. }
  868. static void exynos_dp_mode_set(struct drm_encoder *encoder,
  869. struct drm_display_mode *mode,
  870. struct drm_display_mode *adjusted_mode)
  871. {
  872. }
  873. static void exynos_dp_enable(struct drm_encoder *encoder)
  874. {
  875. struct exynos_dp_device *dp = encoder_to_dp(encoder);
  876. struct exynos_drm_crtc *crtc = dp_to_crtc(dp);
  877. if (dp->dpms_mode == DRM_MODE_DPMS_ON)
  878. return;
  879. if (dp->panel) {
  880. if (drm_panel_prepare(dp->panel)) {
  881. DRM_ERROR("failed to setup the panel\n");
  882. return;
  883. }
  884. }
  885. if (crtc->ops->clock_enable)
  886. crtc->ops->clock_enable(dp_to_crtc(dp), true);
  887. clk_prepare_enable(dp->clock);
  888. phy_power_on(dp->phy);
  889. exynos_dp_init_dp(dp);
  890. enable_irq(dp->irq);
  891. exynos_dp_commit(&dp->encoder);
  892. dp->dpms_mode = DRM_MODE_DPMS_ON;
  893. }
  894. static void exynos_dp_disable(struct drm_encoder *encoder)
  895. {
  896. struct exynos_dp_device *dp = encoder_to_dp(encoder);
  897. struct exynos_drm_crtc *crtc = dp_to_crtc(dp);
  898. if (dp->dpms_mode != DRM_MODE_DPMS_ON)
  899. return;
  900. if (dp->panel) {
  901. if (drm_panel_disable(dp->panel)) {
  902. DRM_ERROR("failed to disable the panel\n");
  903. return;
  904. }
  905. }
  906. disable_irq(dp->irq);
  907. flush_work(&dp->hotplug_work);
  908. phy_power_off(dp->phy);
  909. clk_disable_unprepare(dp->clock);
  910. if (crtc->ops->clock_enable)
  911. crtc->ops->clock_enable(dp_to_crtc(dp), false);
  912. if (dp->panel) {
  913. if (drm_panel_unprepare(dp->panel))
  914. DRM_ERROR("failed to turnoff the panel\n");
  915. }
  916. dp->dpms_mode = DRM_MODE_DPMS_OFF;
  917. }
  918. static struct drm_encoder_helper_funcs exynos_dp_encoder_helper_funcs = {
  919. .mode_fixup = exynos_dp_mode_fixup,
  920. .mode_set = exynos_dp_mode_set,
  921. .enable = exynos_dp_enable,
  922. .disable = exynos_dp_disable,
  923. };
  924. static struct drm_encoder_funcs exynos_dp_encoder_funcs = {
  925. .destroy = drm_encoder_cleanup,
  926. };
  927. static struct video_info *exynos_dp_dt_parse_pdata(struct device *dev)
  928. {
  929. struct device_node *dp_node = dev->of_node;
  930. struct video_info *dp_video_config;
  931. dp_video_config = devm_kzalloc(dev,
  932. sizeof(*dp_video_config), GFP_KERNEL);
  933. if (!dp_video_config)
  934. return ERR_PTR(-ENOMEM);
  935. dp_video_config->h_sync_polarity =
  936. of_property_read_bool(dp_node, "hsync-active-high");
  937. dp_video_config->v_sync_polarity =
  938. of_property_read_bool(dp_node, "vsync-active-high");
  939. dp_video_config->interlaced =
  940. of_property_read_bool(dp_node, "interlaced");
  941. if (of_property_read_u32(dp_node, "samsung,color-space",
  942. &dp_video_config->color_space)) {
  943. dev_err(dev, "failed to get color-space\n");
  944. return ERR_PTR(-EINVAL);
  945. }
  946. if (of_property_read_u32(dp_node, "samsung,dynamic-range",
  947. &dp_video_config->dynamic_range)) {
  948. dev_err(dev, "failed to get dynamic-range\n");
  949. return ERR_PTR(-EINVAL);
  950. }
  951. if (of_property_read_u32(dp_node, "samsung,ycbcr-coeff",
  952. &dp_video_config->ycbcr_coeff)) {
  953. dev_err(dev, "failed to get ycbcr-coeff\n");
  954. return ERR_PTR(-EINVAL);
  955. }
  956. if (of_property_read_u32(dp_node, "samsung,color-depth",
  957. &dp_video_config->color_depth)) {
  958. dev_err(dev, "failed to get color-depth\n");
  959. return ERR_PTR(-EINVAL);
  960. }
  961. if (of_property_read_u32(dp_node, "samsung,link-rate",
  962. &dp_video_config->link_rate)) {
  963. dev_err(dev, "failed to get link-rate\n");
  964. return ERR_PTR(-EINVAL);
  965. }
  966. if (of_property_read_u32(dp_node, "samsung,lane-count",
  967. &dp_video_config->lane_count)) {
  968. dev_err(dev, "failed to get lane-count\n");
  969. return ERR_PTR(-EINVAL);
  970. }
  971. return dp_video_config;
  972. }
  973. static int exynos_dp_dt_parse_panel(struct exynos_dp_device *dp)
  974. {
  975. int ret;
  976. ret = of_get_videomode(dp->dev->of_node, &dp->priv.vm,
  977. OF_USE_NATIVE_MODE);
  978. if (ret) {
  979. DRM_ERROR("failed: of_get_videomode() : %d\n", ret);
  980. return ret;
  981. }
  982. return 0;
  983. }
  984. static int exynos_dp_bind(struct device *dev, struct device *master, void *data)
  985. {
  986. struct exynos_dp_device *dp = dev_get_drvdata(dev);
  987. struct platform_device *pdev = to_platform_device(dev);
  988. struct drm_device *drm_dev = data;
  989. struct drm_encoder *encoder = &dp->encoder;
  990. struct resource *res;
  991. unsigned int irq_flags;
  992. int pipe, ret = 0;
  993. dp->dev = &pdev->dev;
  994. dp->dpms_mode = DRM_MODE_DPMS_OFF;
  995. dp->video_info = exynos_dp_dt_parse_pdata(&pdev->dev);
  996. if (IS_ERR(dp->video_info))
  997. return PTR_ERR(dp->video_info);
  998. dp->phy = devm_phy_get(dp->dev, "dp");
  999. if (IS_ERR(dp->phy)) {
  1000. dev_err(dp->dev, "no DP phy configured\n");
  1001. ret = PTR_ERR(dp->phy);
  1002. if (ret) {
  1003. /*
  1004. * phy itself is not enabled, so we can move forward
  1005. * assigning NULL to phy pointer.
  1006. */
  1007. if (ret == -ENOSYS || ret == -ENODEV)
  1008. dp->phy = NULL;
  1009. else
  1010. return ret;
  1011. }
  1012. }
  1013. if (!dp->panel && !dp->bridge) {
  1014. ret = exynos_dp_dt_parse_panel(dp);
  1015. if (ret)
  1016. return ret;
  1017. }
  1018. dp->clock = devm_clk_get(&pdev->dev, "dp");
  1019. if (IS_ERR(dp->clock)) {
  1020. dev_err(&pdev->dev, "failed to get clock\n");
  1021. return PTR_ERR(dp->clock);
  1022. }
  1023. clk_prepare_enable(dp->clock);
  1024. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  1025. dp->reg_base = devm_ioremap_resource(&pdev->dev, res);
  1026. if (IS_ERR(dp->reg_base))
  1027. return PTR_ERR(dp->reg_base);
  1028. dp->hpd_gpio = of_get_named_gpio(dev->of_node, "samsung,hpd-gpio", 0);
  1029. if (gpio_is_valid(dp->hpd_gpio)) {
  1030. /*
  1031. * Set up the hotplug GPIO from the device tree as an interrupt.
  1032. * Simply specifying a different interrupt in the device tree
  1033. * doesn't work since we handle hotplug rather differently when
  1034. * using a GPIO. We also need the actual GPIO specifier so
  1035. * that we can get the current state of the GPIO.
  1036. */
  1037. ret = devm_gpio_request_one(&pdev->dev, dp->hpd_gpio, GPIOF_IN,
  1038. "hpd_gpio");
  1039. if (ret) {
  1040. dev_err(&pdev->dev, "failed to get hpd gpio\n");
  1041. return ret;
  1042. }
  1043. dp->irq = gpio_to_irq(dp->hpd_gpio);
  1044. irq_flags = IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING;
  1045. } else {
  1046. dp->hpd_gpio = -ENODEV;
  1047. dp->irq = platform_get_irq(pdev, 0);
  1048. irq_flags = 0;
  1049. }
  1050. if (dp->irq == -ENXIO) {
  1051. dev_err(&pdev->dev, "failed to get irq\n");
  1052. return -ENODEV;
  1053. }
  1054. INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
  1055. phy_power_on(dp->phy);
  1056. exynos_dp_init_dp(dp);
  1057. ret = devm_request_irq(&pdev->dev, dp->irq, exynos_dp_irq_handler,
  1058. irq_flags, "exynos-dp", dp);
  1059. if (ret) {
  1060. dev_err(&pdev->dev, "failed to request irq\n");
  1061. return ret;
  1062. }
  1063. disable_irq(dp->irq);
  1064. dp->drm_dev = drm_dev;
  1065. pipe = exynos_drm_crtc_get_pipe_from_type(drm_dev,
  1066. EXYNOS_DISPLAY_TYPE_LCD);
  1067. if (pipe < 0)
  1068. return pipe;
  1069. encoder->possible_crtcs = 1 << pipe;
  1070. DRM_DEBUG_KMS("possible_crtcs = 0x%x\n", encoder->possible_crtcs);
  1071. drm_encoder_init(drm_dev, encoder, &exynos_dp_encoder_funcs,
  1072. DRM_MODE_ENCODER_TMDS);
  1073. drm_encoder_helper_add(encoder, &exynos_dp_encoder_helper_funcs);
  1074. ret = exynos_dp_create_connector(encoder);
  1075. if (ret) {
  1076. DRM_ERROR("failed to create connector ret = %d\n", ret);
  1077. drm_encoder_cleanup(encoder);
  1078. return ret;
  1079. }
  1080. return 0;
  1081. }
  1082. static void exynos_dp_unbind(struct device *dev, struct device *master,
  1083. void *data)
  1084. {
  1085. struct exynos_dp_device *dp = dev_get_drvdata(dev);
  1086. exynos_dp_disable(&dp->encoder);
  1087. }
  1088. static const struct component_ops exynos_dp_ops = {
  1089. .bind = exynos_dp_bind,
  1090. .unbind = exynos_dp_unbind,
  1091. };
  1092. static int exynos_dp_probe(struct platform_device *pdev)
  1093. {
  1094. struct device *dev = &pdev->dev;
  1095. struct device_node *panel_node, *bridge_node, *endpoint;
  1096. struct exynos_dp_device *dp;
  1097. dp = devm_kzalloc(&pdev->dev, sizeof(struct exynos_dp_device),
  1098. GFP_KERNEL);
  1099. if (!dp)
  1100. return -ENOMEM;
  1101. platform_set_drvdata(pdev, dp);
  1102. panel_node = of_parse_phandle(dev->of_node, "panel", 0);
  1103. if (panel_node) {
  1104. dp->panel = of_drm_find_panel(panel_node);
  1105. of_node_put(panel_node);
  1106. if (!dp->panel)
  1107. return -EPROBE_DEFER;
  1108. }
  1109. endpoint = of_graph_get_next_endpoint(dev->of_node, NULL);
  1110. if (endpoint) {
  1111. bridge_node = of_graph_get_remote_port_parent(endpoint);
  1112. if (bridge_node) {
  1113. dp->bridge = of_drm_find_bridge(bridge_node);
  1114. of_node_put(bridge_node);
  1115. if (!dp->bridge)
  1116. return -EPROBE_DEFER;
  1117. } else
  1118. return -EPROBE_DEFER;
  1119. }
  1120. return component_add(&pdev->dev, &exynos_dp_ops);
  1121. }
  1122. static int exynos_dp_remove(struct platform_device *pdev)
  1123. {
  1124. component_del(&pdev->dev, &exynos_dp_ops);
  1125. return 0;
  1126. }
  1127. static const struct of_device_id exynos_dp_match[] = {
  1128. { .compatible = "samsung,exynos5-dp" },
  1129. {},
  1130. };
  1131. MODULE_DEVICE_TABLE(of, exynos_dp_match);
  1132. struct platform_driver dp_driver = {
  1133. .probe = exynos_dp_probe,
  1134. .remove = exynos_dp_remove,
  1135. .driver = {
  1136. .name = "exynos-dp",
  1137. .owner = THIS_MODULE,
  1138. .of_match_table = exynos_dp_match,
  1139. },
  1140. };
  1141. MODULE_AUTHOR("Jingoo Han <jg1.han@samsung.com>");
  1142. MODULE_DESCRIPTION("Samsung SoC DP Driver");
  1143. MODULE_LICENSE("GPL v2");