atombios_encoders.c 70 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156
  1. /*
  2. * Copyright 2007-11 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included in
  13. * all copies or substantial portions of the Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21. * OTHER DEALINGS IN THE SOFTWARE.
  22. *
  23. * Authors: Dave Airlie
  24. * Alex Deucher
  25. */
  26. #include <drm/drmP.h>
  27. #include <drm/drm_crtc_helper.h>
  28. #include <drm/amdgpu_drm.h>
  29. #include "amdgpu.h"
  30. #include "amdgpu_connectors.h"
  31. #include "atom.h"
  32. #include "atombios_encoders.h"
  33. #include "atombios_dp.h"
  34. #include <linux/backlight.h>
  35. #include "bif/bif_4_1_d.h"
  36. u8
  37. amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  38. {
  39. u8 backlight_level;
  40. u32 bios_2_scratch;
  41. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  42. backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  43. ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  44. return backlight_level;
  45. }
  46. void
  47. amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  48. u8 backlight_level)
  49. {
  50. u32 bios_2_scratch;
  51. bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  52. bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  53. bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  54. ATOM_S2_CURRENT_BL_LEVEL_MASK);
  55. WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  56. }
  57. u8
  58. amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  59. {
  60. struct drm_device *dev = amdgpu_encoder->base.dev;
  61. struct amdgpu_device *adev = dev->dev_private;
  62. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  63. return 0;
  64. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  65. }
  66. void
  67. amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  68. u8 level)
  69. {
  70. struct drm_encoder *encoder = &amdgpu_encoder->base;
  71. struct drm_device *dev = amdgpu_encoder->base.dev;
  72. struct amdgpu_device *adev = dev->dev_private;
  73. struct amdgpu_encoder_atom_dig *dig;
  74. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  75. return;
  76. if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  77. amdgpu_encoder->enc_priv) {
  78. dig = amdgpu_encoder->enc_priv;
  79. dig->backlight_level = level;
  80. amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  81. switch (amdgpu_encoder->encoder_id) {
  82. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  83. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  84. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  85. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  86. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  87. if (dig->backlight_level == 0)
  88. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  89. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  90. else {
  91. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  92. ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
  93. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  94. ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
  95. }
  96. break;
  97. default:
  98. break;
  99. }
  100. }
  101. }
  102. #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
  103. static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
  104. {
  105. u8 level;
  106. /* Convert brightness to hardware level */
  107. if (bd->props.brightness < 0)
  108. level = 0;
  109. else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
  110. level = AMDGPU_MAX_BL_LEVEL;
  111. else
  112. level = bd->props.brightness;
  113. return level;
  114. }
  115. static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
  116. {
  117. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  118. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  119. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
  120. amdgpu_atombios_encoder_backlight_level(bd));
  121. return 0;
  122. }
  123. static int
  124. amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
  125. {
  126. struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
  127. struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
  128. struct drm_device *dev = amdgpu_encoder->base.dev;
  129. struct amdgpu_device *adev = dev->dev_private;
  130. return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  131. }
  132. static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
  133. .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
  134. .update_status = amdgpu_atombios_encoder_update_backlight_status,
  135. };
  136. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
  137. struct drm_connector *drm_connector)
  138. {
  139. struct drm_device *dev = amdgpu_encoder->base.dev;
  140. struct amdgpu_device *adev = dev->dev_private;
  141. struct backlight_device *bd;
  142. struct backlight_properties props;
  143. struct amdgpu_backlight_privdata *pdata;
  144. struct amdgpu_encoder_atom_dig *dig;
  145. u8 backlight_level;
  146. char bl_name[16];
  147. /* Mac laptops with multiple GPUs use the gmux driver for backlight
  148. * so don't register a backlight device
  149. */
  150. if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
  151. (adev->pdev->device == 0x6741))
  152. return;
  153. if (!amdgpu_encoder->enc_priv)
  154. return;
  155. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  156. return;
  157. pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
  158. if (!pdata) {
  159. DRM_ERROR("Memory allocation failed\n");
  160. goto error;
  161. }
  162. memset(&props, 0, sizeof(props));
  163. props.max_brightness = AMDGPU_MAX_BL_LEVEL;
  164. props.type = BACKLIGHT_RAW;
  165. snprintf(bl_name, sizeof(bl_name),
  166. "amdgpu_bl%d", dev->primary->index);
  167. bd = backlight_device_register(bl_name, drm_connector->kdev,
  168. pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
  169. if (IS_ERR(bd)) {
  170. DRM_ERROR("Backlight registration failed\n");
  171. goto error;
  172. }
  173. pdata->encoder = amdgpu_encoder;
  174. backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  175. dig = amdgpu_encoder->enc_priv;
  176. dig->bl_dev = bd;
  177. bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
  178. bd->props.power = FB_BLANK_UNBLANK;
  179. backlight_update_status(bd);
  180. DRM_INFO("amdgpu atom DIG backlight initialized\n");
  181. return;
  182. error:
  183. kfree(pdata);
  184. return;
  185. }
  186. void
  187. amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
  188. {
  189. struct drm_device *dev = amdgpu_encoder->base.dev;
  190. struct amdgpu_device *adev = dev->dev_private;
  191. struct backlight_device *bd = NULL;
  192. struct amdgpu_encoder_atom_dig *dig;
  193. if (!amdgpu_encoder->enc_priv)
  194. return;
  195. if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  196. return;
  197. dig = amdgpu_encoder->enc_priv;
  198. bd = dig->bl_dev;
  199. dig->bl_dev = NULL;
  200. if (bd) {
  201. struct amdgpu_legacy_backlight_privdata *pdata;
  202. pdata = bl_get_data(bd);
  203. backlight_device_unregister(bd);
  204. kfree(pdata);
  205. DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
  206. }
  207. }
  208. #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
  209. void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
  210. {
  211. }
  212. void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
  213. {
  214. }
  215. #endif
  216. bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
  217. {
  218. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  219. switch (amdgpu_encoder->encoder_id) {
  220. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  221. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  222. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  223. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  224. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  225. return true;
  226. default:
  227. return false;
  228. }
  229. }
  230. bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
  231. const struct drm_display_mode *mode,
  232. struct drm_display_mode *adjusted_mode)
  233. {
  234. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  235. /* set the active encoder to connector routing */
  236. amdgpu_encoder_set_active_device(encoder);
  237. drm_mode_set_crtcinfo(adjusted_mode, 0);
  238. /* hw bug */
  239. if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
  240. && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
  241. adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
  242. /* vertical FP must be at least 1 */
  243. if (mode->crtc_vsync_start == mode->crtc_vdisplay)
  244. adjusted_mode->crtc_vsync_start++;
  245. /* get the native mode for scaling */
  246. if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
  247. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  248. else if (amdgpu_encoder->rmx_type != RMX_OFF)
  249. amdgpu_panel_mode_fixup(encoder, adjusted_mode);
  250. if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
  251. (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
  252. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  253. amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
  254. }
  255. return true;
  256. }
  257. static void
  258. amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
  259. {
  260. struct drm_device *dev = encoder->dev;
  261. struct amdgpu_device *adev = dev->dev_private;
  262. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  263. DAC_ENCODER_CONTROL_PS_ALLOCATION args;
  264. int index = 0;
  265. memset(&args, 0, sizeof(args));
  266. switch (amdgpu_encoder->encoder_id) {
  267. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  268. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  269. index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
  270. break;
  271. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  272. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  273. index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
  274. break;
  275. }
  276. args.ucAction = action;
  277. args.ucDacStandard = ATOM_DAC1_PS2;
  278. args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  279. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  280. }
  281. static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
  282. {
  283. int bpc = 8;
  284. if (encoder->crtc) {
  285. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  286. bpc = amdgpu_crtc->bpc;
  287. }
  288. switch (bpc) {
  289. case 0:
  290. return PANEL_BPC_UNDEFINE;
  291. case 6:
  292. return PANEL_6BIT_PER_COLOR;
  293. case 8:
  294. default:
  295. return PANEL_8BIT_PER_COLOR;
  296. case 10:
  297. return PANEL_10BIT_PER_COLOR;
  298. case 12:
  299. return PANEL_12BIT_PER_COLOR;
  300. case 16:
  301. return PANEL_16BIT_PER_COLOR;
  302. }
  303. }
  304. union dvo_encoder_control {
  305. ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
  306. DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
  307. DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
  308. DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
  309. };
  310. static void
  311. amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
  312. {
  313. struct drm_device *dev = encoder->dev;
  314. struct amdgpu_device *adev = dev->dev_private;
  315. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  316. union dvo_encoder_control args;
  317. int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
  318. uint8_t frev, crev;
  319. memset(&args, 0, sizeof(args));
  320. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  321. return;
  322. switch (frev) {
  323. case 1:
  324. switch (crev) {
  325. case 1:
  326. /* R4xx, R5xx */
  327. args.ext_tmds.sXTmdsEncoder.ucEnable = action;
  328. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  329. args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
  330. args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
  331. break;
  332. case 2:
  333. /* RS600/690/740 */
  334. args.dvo.sDVOEncoder.ucAction = action;
  335. args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  336. /* DFP1, CRT1, TV1 depending on the type of port */
  337. args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
  338. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  339. args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
  340. break;
  341. case 3:
  342. /* R6xx */
  343. args.dvo_v3.ucAction = action;
  344. args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  345. args.dvo_v3.ucDVOConfig = 0; /* XXX */
  346. break;
  347. case 4:
  348. /* DCE8 */
  349. args.dvo_v4.ucAction = action;
  350. args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  351. args.dvo_v4.ucDVOConfig = 0; /* XXX */
  352. args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  353. break;
  354. default:
  355. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  356. break;
  357. }
  358. break;
  359. default:
  360. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  361. break;
  362. }
  363. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  364. }
  365. int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
  366. {
  367. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  368. struct drm_connector *connector;
  369. struct amdgpu_connector *amdgpu_connector;
  370. struct amdgpu_connector_atom_dig *dig_connector;
  371. /* dp bridges are always DP */
  372. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
  373. return ATOM_ENCODER_MODE_DP;
  374. /* DVO is always DVO */
  375. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
  376. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
  377. return ATOM_ENCODER_MODE_DVO;
  378. connector = amdgpu_get_connector_for_encoder(encoder);
  379. /* if we don't have an active device yet, just use one of
  380. * the connectors tied to the encoder.
  381. */
  382. if (!connector)
  383. connector = amdgpu_get_connector_for_encoder_init(encoder);
  384. amdgpu_connector = to_amdgpu_connector(connector);
  385. switch (connector->connector_type) {
  386. case DRM_MODE_CONNECTOR_DVII:
  387. case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
  388. if (amdgpu_audio != 0) {
  389. if (amdgpu_connector->use_digital &&
  390. (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
  391. return ATOM_ENCODER_MODE_HDMI;
  392. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  393. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  394. return ATOM_ENCODER_MODE_HDMI;
  395. else if (amdgpu_connector->use_digital)
  396. return ATOM_ENCODER_MODE_DVI;
  397. else
  398. return ATOM_ENCODER_MODE_CRT;
  399. } else if (amdgpu_connector->use_digital) {
  400. return ATOM_ENCODER_MODE_DVI;
  401. } else {
  402. return ATOM_ENCODER_MODE_CRT;
  403. }
  404. break;
  405. case DRM_MODE_CONNECTOR_DVID:
  406. case DRM_MODE_CONNECTOR_HDMIA:
  407. default:
  408. if (amdgpu_audio != 0) {
  409. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  410. return ATOM_ENCODER_MODE_HDMI;
  411. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  412. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  413. return ATOM_ENCODER_MODE_HDMI;
  414. else
  415. return ATOM_ENCODER_MODE_DVI;
  416. } else {
  417. return ATOM_ENCODER_MODE_DVI;
  418. }
  419. break;
  420. case DRM_MODE_CONNECTOR_LVDS:
  421. return ATOM_ENCODER_MODE_LVDS;
  422. break;
  423. case DRM_MODE_CONNECTOR_DisplayPort:
  424. dig_connector = amdgpu_connector->con_priv;
  425. if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
  426. (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
  427. return ATOM_ENCODER_MODE_DP;
  428. } else if (amdgpu_audio != 0) {
  429. if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
  430. return ATOM_ENCODER_MODE_HDMI;
  431. else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
  432. (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
  433. return ATOM_ENCODER_MODE_HDMI;
  434. else
  435. return ATOM_ENCODER_MODE_DVI;
  436. } else {
  437. return ATOM_ENCODER_MODE_DVI;
  438. }
  439. break;
  440. case DRM_MODE_CONNECTOR_eDP:
  441. return ATOM_ENCODER_MODE_DP;
  442. case DRM_MODE_CONNECTOR_DVIA:
  443. case DRM_MODE_CONNECTOR_VGA:
  444. return ATOM_ENCODER_MODE_CRT;
  445. break;
  446. case DRM_MODE_CONNECTOR_Composite:
  447. case DRM_MODE_CONNECTOR_SVIDEO:
  448. case DRM_MODE_CONNECTOR_9PinDIN:
  449. /* fix me */
  450. return ATOM_ENCODER_MODE_TV;
  451. /*return ATOM_ENCODER_MODE_CV;*/
  452. break;
  453. }
  454. }
  455. /*
  456. * DIG Encoder/Transmitter Setup
  457. *
  458. * DCE 6.0
  459. * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
  460. * Supports up to 6 digital outputs
  461. * - 6 DIG encoder blocks.
  462. * - DIG to PHY mapping is hardcoded
  463. * DIG1 drives UNIPHY0 link A, A+B
  464. * DIG2 drives UNIPHY0 link B
  465. * DIG3 drives UNIPHY1 link A, A+B
  466. * DIG4 drives UNIPHY1 link B
  467. * DIG5 drives UNIPHY2 link A, A+B
  468. * DIG6 drives UNIPHY2 link B
  469. *
  470. * Routing
  471. * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
  472. * Examples:
  473. * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
  474. * crtc1 -> dig1 -> UNIPHY0 link B -> DP
  475. * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
  476. * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
  477. */
  478. union dig_encoder_control {
  479. DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
  480. DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
  481. DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
  482. DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
  483. DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
  484. };
  485. void
  486. amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
  487. int action, int panel_mode)
  488. {
  489. struct drm_device *dev = encoder->dev;
  490. struct amdgpu_device *adev = dev->dev_private;
  491. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  492. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  493. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  494. union dig_encoder_control args;
  495. int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
  496. uint8_t frev, crev;
  497. int dp_clock = 0;
  498. int dp_lane_count = 0;
  499. int hpd_id = AMDGPU_HPD_NONE;
  500. if (connector) {
  501. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  502. struct amdgpu_connector_atom_dig *dig_connector =
  503. amdgpu_connector->con_priv;
  504. dp_clock = dig_connector->dp_clock;
  505. dp_lane_count = dig_connector->dp_lane_count;
  506. hpd_id = amdgpu_connector->hpd.hpd;
  507. }
  508. /* no dig encoder assigned */
  509. if (dig->dig_encoder == -1)
  510. return;
  511. memset(&args, 0, sizeof(args));
  512. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  513. return;
  514. switch (frev) {
  515. case 1:
  516. switch (crev) {
  517. case 1:
  518. args.v1.ucAction = action;
  519. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  520. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  521. args.v3.ucPanelMode = panel_mode;
  522. else
  523. args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  524. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
  525. args.v1.ucLaneNum = dp_lane_count;
  526. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  527. args.v1.ucLaneNum = 8;
  528. else
  529. args.v1.ucLaneNum = 4;
  530. if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
  531. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  532. switch (amdgpu_encoder->encoder_id) {
  533. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  534. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
  535. break;
  536. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  537. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  538. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
  539. break;
  540. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  541. args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
  542. break;
  543. }
  544. if (dig->linkb)
  545. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
  546. else
  547. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
  548. break;
  549. case 2:
  550. case 3:
  551. args.v3.ucAction = action;
  552. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  553. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  554. args.v3.ucPanelMode = panel_mode;
  555. else
  556. args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  557. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
  558. args.v3.ucLaneNum = dp_lane_count;
  559. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  560. args.v3.ucLaneNum = 8;
  561. else
  562. args.v3.ucLaneNum = 4;
  563. if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
  564. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  565. args.v3.acConfig.ucDigSel = dig->dig_encoder;
  566. args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  567. break;
  568. case 4:
  569. args.v4.ucAction = action;
  570. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  571. if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
  572. args.v4.ucPanelMode = panel_mode;
  573. else
  574. args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  575. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
  576. args.v4.ucLaneNum = dp_lane_count;
  577. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  578. args.v4.ucLaneNum = 8;
  579. else
  580. args.v4.ucLaneNum = 4;
  581. if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
  582. if (dp_clock == 540000)
  583. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
  584. else if (dp_clock == 324000)
  585. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
  586. else if (dp_clock == 270000)
  587. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
  588. else
  589. args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
  590. }
  591. args.v4.acConfig.ucDigSel = dig->dig_encoder;
  592. args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  593. if (hpd_id == AMDGPU_HPD_NONE)
  594. args.v4.ucHPD_ID = 0;
  595. else
  596. args.v4.ucHPD_ID = hpd_id + 1;
  597. break;
  598. case 5:
  599. switch (action) {
  600. case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
  601. args.v5.asDPPanelModeParam.ucAction = action;
  602. args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
  603. args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
  604. break;
  605. case ATOM_ENCODER_CMD_STREAM_SETUP:
  606. args.v5.asStreamParam.ucAction = action;
  607. args.v5.asStreamParam.ucDigId = dig->dig_encoder;
  608. args.v5.asStreamParam.ucDigMode =
  609. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  610. if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
  611. args.v5.asStreamParam.ucLaneNum = dp_lane_count;
  612. else if (amdgpu_dig_monitor_is_duallink(encoder,
  613. amdgpu_encoder->pixel_clock))
  614. args.v5.asStreamParam.ucLaneNum = 8;
  615. else
  616. args.v5.asStreamParam.ucLaneNum = 4;
  617. args.v5.asStreamParam.ulPixelClock =
  618. cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  619. args.v5.asStreamParam.ucBitPerColor =
  620. amdgpu_atombios_encoder_get_bpc(encoder);
  621. args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
  622. break;
  623. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
  624. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
  625. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
  626. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
  627. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
  628. case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
  629. case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
  630. case ATOM_ENCODER_CMD_DP_VIDEO_ON:
  631. args.v5.asCmdParam.ucAction = action;
  632. args.v5.asCmdParam.ucDigId = dig->dig_encoder;
  633. break;
  634. default:
  635. DRM_ERROR("Unsupported action 0x%x\n", action);
  636. break;
  637. }
  638. break;
  639. default:
  640. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  641. break;
  642. }
  643. break;
  644. default:
  645. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  646. break;
  647. }
  648. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  649. }
  650. union dig_transmitter_control {
  651. DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
  652. DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
  653. DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
  654. DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
  655. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
  656. DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
  657. };
  658. void
  659. amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
  660. uint8_t lane_num, uint8_t lane_set)
  661. {
  662. struct drm_device *dev = encoder->dev;
  663. struct amdgpu_device *adev = dev->dev_private;
  664. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  665. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  666. struct drm_connector *connector;
  667. union dig_transmitter_control args;
  668. int index = 0;
  669. uint8_t frev, crev;
  670. bool is_dp = false;
  671. int pll_id = 0;
  672. int dp_clock = 0;
  673. int dp_lane_count = 0;
  674. int connector_object_id = 0;
  675. int igp_lane_info = 0;
  676. int dig_encoder = dig->dig_encoder;
  677. int hpd_id = AMDGPU_HPD_NONE;
  678. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  679. connector = amdgpu_get_connector_for_encoder_init(encoder);
  680. /* just needed to avoid bailing in the encoder check. the encoder
  681. * isn't used for init
  682. */
  683. dig_encoder = 0;
  684. } else
  685. connector = amdgpu_get_connector_for_encoder(encoder);
  686. if (connector) {
  687. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  688. struct amdgpu_connector_atom_dig *dig_connector =
  689. amdgpu_connector->con_priv;
  690. hpd_id = amdgpu_connector->hpd.hpd;
  691. dp_clock = dig_connector->dp_clock;
  692. dp_lane_count = dig_connector->dp_lane_count;
  693. connector_object_id =
  694. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  695. }
  696. if (encoder->crtc) {
  697. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  698. pll_id = amdgpu_crtc->pll_id;
  699. }
  700. /* no dig encoder assigned */
  701. if (dig_encoder == -1)
  702. return;
  703. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
  704. is_dp = true;
  705. memset(&args, 0, sizeof(args));
  706. switch (amdgpu_encoder->encoder_id) {
  707. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  708. index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
  709. break;
  710. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  711. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  712. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  713. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  714. index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  715. break;
  716. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  717. index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
  718. break;
  719. }
  720. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  721. return;
  722. switch (frev) {
  723. case 1:
  724. switch (crev) {
  725. case 1:
  726. args.v1.ucAction = action;
  727. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  728. args.v1.usInitInfo = cpu_to_le16(connector_object_id);
  729. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  730. args.v1.asMode.ucLaneSel = lane_num;
  731. args.v1.asMode.ucLaneSet = lane_set;
  732. } else {
  733. if (is_dp)
  734. args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
  735. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  736. args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  737. else
  738. args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  739. }
  740. args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
  741. if (dig_encoder)
  742. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
  743. else
  744. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
  745. if ((adev->flags & AMD_IS_APU) &&
  746. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
  747. if (is_dp ||
  748. !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
  749. if (igp_lane_info & 0x1)
  750. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
  751. else if (igp_lane_info & 0x2)
  752. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
  753. else if (igp_lane_info & 0x4)
  754. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
  755. else if (igp_lane_info & 0x8)
  756. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
  757. } else {
  758. if (igp_lane_info & 0x3)
  759. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
  760. else if (igp_lane_info & 0xc)
  761. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
  762. }
  763. }
  764. if (dig->linkb)
  765. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
  766. else
  767. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
  768. if (is_dp)
  769. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  770. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  771. if (dig->coherent_mode)
  772. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
  773. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  774. args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
  775. }
  776. break;
  777. case 2:
  778. args.v2.ucAction = action;
  779. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  780. args.v2.usInitInfo = cpu_to_le16(connector_object_id);
  781. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  782. args.v2.asMode.ucLaneSel = lane_num;
  783. args.v2.asMode.ucLaneSet = lane_set;
  784. } else {
  785. if (is_dp)
  786. args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
  787. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  788. args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  789. else
  790. args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  791. }
  792. args.v2.acConfig.ucEncoderSel = dig_encoder;
  793. if (dig->linkb)
  794. args.v2.acConfig.ucLinkSel = 1;
  795. switch (amdgpu_encoder->encoder_id) {
  796. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  797. args.v2.acConfig.ucTransmitterSel = 0;
  798. break;
  799. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  800. args.v2.acConfig.ucTransmitterSel = 1;
  801. break;
  802. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  803. args.v2.acConfig.ucTransmitterSel = 2;
  804. break;
  805. }
  806. if (is_dp) {
  807. args.v2.acConfig.fCoherentMode = 1;
  808. args.v2.acConfig.fDPConnector = 1;
  809. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  810. if (dig->coherent_mode)
  811. args.v2.acConfig.fCoherentMode = 1;
  812. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  813. args.v2.acConfig.fDualLinkConnector = 1;
  814. }
  815. break;
  816. case 3:
  817. args.v3.ucAction = action;
  818. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  819. args.v3.usInitInfo = cpu_to_le16(connector_object_id);
  820. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  821. args.v3.asMode.ucLaneSel = lane_num;
  822. args.v3.asMode.ucLaneSet = lane_set;
  823. } else {
  824. if (is_dp)
  825. args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
  826. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  827. args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  828. else
  829. args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  830. }
  831. if (is_dp)
  832. args.v3.ucLaneNum = dp_lane_count;
  833. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  834. args.v3.ucLaneNum = 8;
  835. else
  836. args.v3.ucLaneNum = 4;
  837. if (dig->linkb)
  838. args.v3.acConfig.ucLinkSel = 1;
  839. if (dig_encoder & 1)
  840. args.v3.acConfig.ucEncoderSel = 1;
  841. /* Select the PLL for the PHY
  842. * DP PHY should be clocked from external src if there is
  843. * one.
  844. */
  845. /* On DCE4, if there is an external clock, it generates the DP ref clock */
  846. if (is_dp && adev->clock.dp_extclk)
  847. args.v3.acConfig.ucRefClkSource = 2; /* external src */
  848. else
  849. args.v3.acConfig.ucRefClkSource = pll_id;
  850. switch (amdgpu_encoder->encoder_id) {
  851. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  852. args.v3.acConfig.ucTransmitterSel = 0;
  853. break;
  854. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  855. args.v3.acConfig.ucTransmitterSel = 1;
  856. break;
  857. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  858. args.v3.acConfig.ucTransmitterSel = 2;
  859. break;
  860. }
  861. if (is_dp)
  862. args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
  863. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  864. if (dig->coherent_mode)
  865. args.v3.acConfig.fCoherentMode = 1;
  866. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  867. args.v3.acConfig.fDualLinkConnector = 1;
  868. }
  869. break;
  870. case 4:
  871. args.v4.ucAction = action;
  872. if (action == ATOM_TRANSMITTER_ACTION_INIT) {
  873. args.v4.usInitInfo = cpu_to_le16(connector_object_id);
  874. } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
  875. args.v4.asMode.ucLaneSel = lane_num;
  876. args.v4.asMode.ucLaneSet = lane_set;
  877. } else {
  878. if (is_dp)
  879. args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
  880. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  881. args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
  882. else
  883. args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  884. }
  885. if (is_dp)
  886. args.v4.ucLaneNum = dp_lane_count;
  887. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  888. args.v4.ucLaneNum = 8;
  889. else
  890. args.v4.ucLaneNum = 4;
  891. if (dig->linkb)
  892. args.v4.acConfig.ucLinkSel = 1;
  893. if (dig_encoder & 1)
  894. args.v4.acConfig.ucEncoderSel = 1;
  895. /* Select the PLL for the PHY
  896. * DP PHY should be clocked from external src if there is
  897. * one.
  898. */
  899. /* On DCE5 DCPLL usually generates the DP ref clock */
  900. if (is_dp) {
  901. if (adev->clock.dp_extclk)
  902. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
  903. else
  904. args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
  905. } else
  906. args.v4.acConfig.ucRefClkSource = pll_id;
  907. switch (amdgpu_encoder->encoder_id) {
  908. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  909. args.v4.acConfig.ucTransmitterSel = 0;
  910. break;
  911. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  912. args.v4.acConfig.ucTransmitterSel = 1;
  913. break;
  914. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  915. args.v4.acConfig.ucTransmitterSel = 2;
  916. break;
  917. }
  918. if (is_dp)
  919. args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
  920. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  921. if (dig->coherent_mode)
  922. args.v4.acConfig.fCoherentMode = 1;
  923. if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  924. args.v4.acConfig.fDualLinkConnector = 1;
  925. }
  926. break;
  927. case 5:
  928. args.v5.ucAction = action;
  929. if (is_dp)
  930. args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
  931. else
  932. args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  933. switch (amdgpu_encoder->encoder_id) {
  934. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  935. if (dig->linkb)
  936. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  937. else
  938. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  939. break;
  940. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  941. if (dig->linkb)
  942. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  943. else
  944. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  945. break;
  946. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  947. if (dig->linkb)
  948. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  949. else
  950. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  951. break;
  952. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  953. args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  954. break;
  955. }
  956. if (is_dp)
  957. args.v5.ucLaneNum = dp_lane_count;
  958. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  959. args.v5.ucLaneNum = 8;
  960. else
  961. args.v5.ucLaneNum = 4;
  962. args.v5.ucConnObjId = connector_object_id;
  963. args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  964. if (is_dp && adev->clock.dp_extclk)
  965. args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
  966. else
  967. args.v5.asConfig.ucPhyClkSrcId = pll_id;
  968. if (is_dp)
  969. args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
  970. else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
  971. if (dig->coherent_mode)
  972. args.v5.asConfig.ucCoherentMode = 1;
  973. }
  974. if (hpd_id == AMDGPU_HPD_NONE)
  975. args.v5.asConfig.ucHPDSel = 0;
  976. else
  977. args.v5.asConfig.ucHPDSel = hpd_id + 1;
  978. args.v5.ucDigEncoderSel = 1 << dig_encoder;
  979. args.v5.ucDPLaneSet = lane_set;
  980. break;
  981. case 6:
  982. args.v6.ucAction = action;
  983. if (is_dp)
  984. args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
  985. else
  986. args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
  987. switch (amdgpu_encoder->encoder_id) {
  988. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  989. if (dig->linkb)
  990. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
  991. else
  992. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
  993. break;
  994. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  995. if (dig->linkb)
  996. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
  997. else
  998. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
  999. break;
  1000. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1001. if (dig->linkb)
  1002. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
  1003. else
  1004. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
  1005. break;
  1006. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1007. args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
  1008. break;
  1009. }
  1010. if (is_dp)
  1011. args.v6.ucLaneNum = dp_lane_count;
  1012. else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1013. args.v6.ucLaneNum = 8;
  1014. else
  1015. args.v6.ucLaneNum = 4;
  1016. args.v6.ucConnObjId = connector_object_id;
  1017. if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
  1018. args.v6.ucDPLaneSet = lane_set;
  1019. else
  1020. args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1021. if (hpd_id == AMDGPU_HPD_NONE)
  1022. args.v6.ucHPDSel = 0;
  1023. else
  1024. args.v6.ucHPDSel = hpd_id + 1;
  1025. args.v6.ucDigEncoderSel = 1 << dig_encoder;
  1026. break;
  1027. default:
  1028. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1029. break;
  1030. }
  1031. break;
  1032. default:
  1033. DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
  1034. break;
  1035. }
  1036. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1037. }
  1038. bool
  1039. amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
  1040. int action)
  1041. {
  1042. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1043. struct drm_device *dev = amdgpu_connector->base.dev;
  1044. struct amdgpu_device *adev = dev->dev_private;
  1045. union dig_transmitter_control args;
  1046. int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
  1047. uint8_t frev, crev;
  1048. if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
  1049. goto done;
  1050. if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
  1051. (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
  1052. goto done;
  1053. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1054. goto done;
  1055. memset(&args, 0, sizeof(args));
  1056. args.v1.ucAction = action;
  1057. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1058. /* wait for the panel to power up */
  1059. if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
  1060. int i;
  1061. for (i = 0; i < 300; i++) {
  1062. if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
  1063. return true;
  1064. mdelay(1);
  1065. }
  1066. return false;
  1067. }
  1068. done:
  1069. return true;
  1070. }
  1071. union external_encoder_control {
  1072. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
  1073. EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
  1074. };
  1075. static void
  1076. amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
  1077. struct drm_encoder *ext_encoder,
  1078. int action)
  1079. {
  1080. struct drm_device *dev = encoder->dev;
  1081. struct amdgpu_device *adev = dev->dev_private;
  1082. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1083. struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
  1084. union external_encoder_control args;
  1085. struct drm_connector *connector;
  1086. int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
  1087. u8 frev, crev;
  1088. int dp_clock = 0;
  1089. int dp_lane_count = 0;
  1090. int connector_object_id = 0;
  1091. u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1092. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1093. connector = amdgpu_get_connector_for_encoder_init(encoder);
  1094. else
  1095. connector = amdgpu_get_connector_for_encoder(encoder);
  1096. if (connector) {
  1097. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1098. struct amdgpu_connector_atom_dig *dig_connector =
  1099. amdgpu_connector->con_priv;
  1100. dp_clock = dig_connector->dp_clock;
  1101. dp_lane_count = dig_connector->dp_lane_count;
  1102. connector_object_id =
  1103. (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
  1104. }
  1105. memset(&args, 0, sizeof(args));
  1106. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1107. return;
  1108. switch (frev) {
  1109. case 1:
  1110. /* no params on frev 1 */
  1111. break;
  1112. case 2:
  1113. switch (crev) {
  1114. case 1:
  1115. case 2:
  1116. args.v1.sDigEncoder.ucAction = action;
  1117. args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1118. args.v1.sDigEncoder.ucEncoderMode =
  1119. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1120. if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
  1121. if (dp_clock == 270000)
  1122. args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
  1123. args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
  1124. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1125. args.v1.sDigEncoder.ucLaneNum = 8;
  1126. else
  1127. args.v1.sDigEncoder.ucLaneNum = 4;
  1128. break;
  1129. case 3:
  1130. args.v3.sExtEncoder.ucAction = action;
  1131. if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
  1132. args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
  1133. else
  1134. args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
  1135. args.v3.sExtEncoder.ucEncoderMode =
  1136. amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1137. if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
  1138. if (dp_clock == 270000)
  1139. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
  1140. else if (dp_clock == 540000)
  1141. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
  1142. args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
  1143. } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
  1144. args.v3.sExtEncoder.ucLaneNum = 8;
  1145. else
  1146. args.v3.sExtEncoder.ucLaneNum = 4;
  1147. switch (ext_enum) {
  1148. case GRAPH_OBJECT_ENUM_ID1:
  1149. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
  1150. break;
  1151. case GRAPH_OBJECT_ENUM_ID2:
  1152. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
  1153. break;
  1154. case GRAPH_OBJECT_ENUM_ID3:
  1155. args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
  1156. break;
  1157. }
  1158. args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
  1159. break;
  1160. default:
  1161. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1162. return;
  1163. }
  1164. break;
  1165. default:
  1166. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1167. return;
  1168. }
  1169. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1170. }
  1171. static void
  1172. amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
  1173. {
  1174. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1175. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1176. struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
  1177. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1178. struct amdgpu_connector *amdgpu_connector = NULL;
  1179. struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
  1180. if (connector) {
  1181. amdgpu_connector = to_amdgpu_connector(connector);
  1182. amdgpu_dig_connector = amdgpu_connector->con_priv;
  1183. }
  1184. if (action == ATOM_ENABLE) {
  1185. if (!connector)
  1186. dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
  1187. else
  1188. dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
  1189. /* setup and enable the encoder */
  1190. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
  1191. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1192. ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
  1193. dig->panel_mode);
  1194. if (ext_encoder)
  1195. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1196. EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
  1197. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1198. connector) {
  1199. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1200. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1201. ATOM_TRANSMITTER_ACTION_POWER_ON);
  1202. amdgpu_dig_connector->edp_on = true;
  1203. }
  1204. }
  1205. /* enable the transmitter */
  1206. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1207. ATOM_TRANSMITTER_ACTION_ENABLE,
  1208. 0, 0);
  1209. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1210. connector) {
  1211. /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
  1212. amdgpu_atombios_dp_link_train(encoder, connector);
  1213. amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
  1214. }
  1215. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1216. amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
  1217. if (ext_encoder)
  1218. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
  1219. } else {
  1220. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1221. connector)
  1222. amdgpu_atombios_encoder_setup_dig_encoder(encoder,
  1223. ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
  1224. if (ext_encoder)
  1225. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
  1226. if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
  1227. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1228. ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
  1229. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1230. connector)
  1231. amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
  1232. /* disable the transmitter */
  1233. amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
  1234. ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
  1235. if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
  1236. connector) {
  1237. if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
  1238. amdgpu_atombios_encoder_set_edp_panel_power(connector,
  1239. ATOM_TRANSMITTER_ACTION_POWER_OFF);
  1240. amdgpu_dig_connector->edp_on = false;
  1241. }
  1242. }
  1243. }
  1244. }
  1245. void
  1246. amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
  1247. {
  1248. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1249. DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
  1250. amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
  1251. amdgpu_encoder->active_device);
  1252. switch (amdgpu_encoder->encoder_id) {
  1253. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1254. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1255. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1256. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1257. switch (mode) {
  1258. case DRM_MODE_DPMS_ON:
  1259. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
  1260. break;
  1261. case DRM_MODE_DPMS_STANDBY:
  1262. case DRM_MODE_DPMS_SUSPEND:
  1263. case DRM_MODE_DPMS_OFF:
  1264. amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
  1265. break;
  1266. }
  1267. break;
  1268. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1269. switch (mode) {
  1270. case DRM_MODE_DPMS_ON:
  1271. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
  1272. break;
  1273. case DRM_MODE_DPMS_STANDBY:
  1274. case DRM_MODE_DPMS_SUSPEND:
  1275. case DRM_MODE_DPMS_OFF:
  1276. amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
  1277. break;
  1278. }
  1279. break;
  1280. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1281. switch (mode) {
  1282. case DRM_MODE_DPMS_ON:
  1283. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
  1284. break;
  1285. case DRM_MODE_DPMS_STANDBY:
  1286. case DRM_MODE_DPMS_SUSPEND:
  1287. case DRM_MODE_DPMS_OFF:
  1288. amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
  1289. break;
  1290. }
  1291. break;
  1292. default:
  1293. return;
  1294. }
  1295. }
  1296. union crtc_source_param {
  1297. SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
  1298. SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
  1299. SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
  1300. };
  1301. void
  1302. amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
  1303. {
  1304. struct drm_device *dev = encoder->dev;
  1305. struct amdgpu_device *adev = dev->dev_private;
  1306. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1307. struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
  1308. union crtc_source_param args;
  1309. int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
  1310. uint8_t frev, crev;
  1311. struct amdgpu_encoder_atom_dig *dig;
  1312. memset(&args, 0, sizeof(args));
  1313. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1314. return;
  1315. switch (frev) {
  1316. case 1:
  1317. switch (crev) {
  1318. case 1:
  1319. default:
  1320. args.v1.ucCRTC = amdgpu_crtc->crtc_id;
  1321. switch (amdgpu_encoder->encoder_id) {
  1322. case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
  1323. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
  1324. args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
  1325. break;
  1326. case ENCODER_OBJECT_ID_INTERNAL_LVDS:
  1327. case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
  1328. if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
  1329. args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
  1330. else
  1331. args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
  1332. break;
  1333. case ENCODER_OBJECT_ID_INTERNAL_DVO1:
  1334. case ENCODER_OBJECT_ID_INTERNAL_DDI:
  1335. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1336. args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
  1337. break;
  1338. case ENCODER_OBJECT_ID_INTERNAL_DAC1:
  1339. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1340. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1341. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1342. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1343. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1344. else
  1345. args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
  1346. break;
  1347. case ENCODER_OBJECT_ID_INTERNAL_DAC2:
  1348. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1349. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1350. args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
  1351. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1352. args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
  1353. else
  1354. args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
  1355. break;
  1356. }
  1357. break;
  1358. case 2:
  1359. args.v2.ucCRTC = amdgpu_crtc->crtc_id;
  1360. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1361. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1362. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1363. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1364. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1365. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1366. else
  1367. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1368. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1369. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1370. } else {
  1371. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1372. }
  1373. switch (amdgpu_encoder->encoder_id) {
  1374. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1375. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1376. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1377. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1378. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1379. dig = amdgpu_encoder->enc_priv;
  1380. switch (dig->dig_encoder) {
  1381. case 0:
  1382. args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1383. break;
  1384. case 1:
  1385. args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1386. break;
  1387. case 2:
  1388. args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1389. break;
  1390. case 3:
  1391. args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1392. break;
  1393. case 4:
  1394. args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1395. break;
  1396. case 5:
  1397. args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1398. break;
  1399. case 6:
  1400. args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1401. break;
  1402. }
  1403. break;
  1404. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1405. args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1406. break;
  1407. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1408. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1409. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1410. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1411. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1412. else
  1413. args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1414. break;
  1415. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1416. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1417. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1418. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1419. args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1420. else
  1421. args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1422. break;
  1423. }
  1424. break;
  1425. case 3:
  1426. args.v3.ucCRTC = amdgpu_crtc->crtc_id;
  1427. if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
  1428. struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
  1429. if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
  1430. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1431. else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
  1432. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
  1433. else
  1434. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1435. } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
  1436. args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
  1437. } else {
  1438. args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
  1439. }
  1440. args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
  1441. switch (amdgpu_encoder->encoder_id) {
  1442. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1443. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1444. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1445. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1446. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  1447. dig = amdgpu_encoder->enc_priv;
  1448. switch (dig->dig_encoder) {
  1449. case 0:
  1450. args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
  1451. break;
  1452. case 1:
  1453. args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
  1454. break;
  1455. case 2:
  1456. args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
  1457. break;
  1458. case 3:
  1459. args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
  1460. break;
  1461. case 4:
  1462. args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
  1463. break;
  1464. case 5:
  1465. args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
  1466. break;
  1467. case 6:
  1468. args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
  1469. break;
  1470. }
  1471. break;
  1472. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
  1473. args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
  1474. break;
  1475. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
  1476. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1477. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1478. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1479. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1480. else
  1481. args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
  1482. break;
  1483. case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
  1484. if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
  1485. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1486. else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
  1487. args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
  1488. else
  1489. args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
  1490. break;
  1491. }
  1492. break;
  1493. }
  1494. break;
  1495. default:
  1496. DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
  1497. return;
  1498. }
  1499. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1500. }
  1501. /* This only needs to be called once at startup */
  1502. void
  1503. amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
  1504. {
  1505. struct drm_device *dev = adev->ddev;
  1506. struct drm_encoder *encoder;
  1507. list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  1508. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1509. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1510. switch (amdgpu_encoder->encoder_id) {
  1511. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  1512. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
  1513. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
  1514. case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
  1515. amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
  1516. 0, 0);
  1517. break;
  1518. }
  1519. if (ext_encoder)
  1520. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1521. EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
  1522. }
  1523. }
  1524. static bool
  1525. amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
  1526. struct drm_connector *connector)
  1527. {
  1528. struct drm_device *dev = encoder->dev;
  1529. struct amdgpu_device *adev = dev->dev_private;
  1530. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1531. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1532. if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
  1533. ATOM_DEVICE_CV_SUPPORT |
  1534. ATOM_DEVICE_CRT_SUPPORT)) {
  1535. DAC_LOAD_DETECTION_PS_ALLOCATION args;
  1536. int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
  1537. uint8_t frev, crev;
  1538. memset(&args, 0, sizeof(args));
  1539. if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
  1540. return false;
  1541. args.sDacload.ucMisc = 0;
  1542. if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
  1543. (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
  1544. args.sDacload.ucDacType = ATOM_DAC_A;
  1545. else
  1546. args.sDacload.ucDacType = ATOM_DAC_B;
  1547. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
  1548. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
  1549. else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
  1550. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
  1551. else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1552. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
  1553. if (crev >= 3)
  1554. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1555. } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1556. args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
  1557. if (crev >= 3)
  1558. args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
  1559. }
  1560. amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
  1561. return true;
  1562. } else
  1563. return false;
  1564. }
  1565. enum drm_connector_status
  1566. amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
  1567. struct drm_connector *connector)
  1568. {
  1569. struct drm_device *dev = encoder->dev;
  1570. struct amdgpu_device *adev = dev->dev_private;
  1571. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1572. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1573. uint32_t bios_0_scratch;
  1574. if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
  1575. DRM_DEBUG_KMS("detect returned false \n");
  1576. return connector_status_unknown;
  1577. }
  1578. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1579. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1580. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1581. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1582. return connector_status_connected;
  1583. }
  1584. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1585. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1586. return connector_status_connected;
  1587. }
  1588. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1589. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1590. return connector_status_connected;
  1591. }
  1592. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1593. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1594. return connector_status_connected; /* CTV */
  1595. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1596. return connector_status_connected; /* STV */
  1597. }
  1598. return connector_status_disconnected;
  1599. }
  1600. enum drm_connector_status
  1601. amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
  1602. struct drm_connector *connector)
  1603. {
  1604. struct drm_device *dev = encoder->dev;
  1605. struct amdgpu_device *adev = dev->dev_private;
  1606. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1607. struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
  1608. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1609. u32 bios_0_scratch;
  1610. if (!ext_encoder)
  1611. return connector_status_unknown;
  1612. if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
  1613. return connector_status_unknown;
  1614. /* load detect on the dp bridge */
  1615. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1616. EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
  1617. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1618. DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
  1619. if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
  1620. if (bios_0_scratch & ATOM_S0_CRT1_MASK)
  1621. return connector_status_connected;
  1622. }
  1623. if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
  1624. if (bios_0_scratch & ATOM_S0_CRT2_MASK)
  1625. return connector_status_connected;
  1626. }
  1627. if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
  1628. if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
  1629. return connector_status_connected;
  1630. }
  1631. if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
  1632. if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
  1633. return connector_status_connected; /* CTV */
  1634. else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
  1635. return connector_status_connected; /* STV */
  1636. }
  1637. return connector_status_disconnected;
  1638. }
  1639. void
  1640. amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
  1641. {
  1642. struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
  1643. if (ext_encoder)
  1644. /* ddc_setup on the dp bridge */
  1645. amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
  1646. EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
  1647. }
  1648. void
  1649. amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
  1650. struct drm_encoder *encoder,
  1651. bool connected)
  1652. {
  1653. struct drm_device *dev = connector->dev;
  1654. struct amdgpu_device *adev = dev->dev_private;
  1655. struct amdgpu_connector *amdgpu_connector =
  1656. to_amdgpu_connector(connector);
  1657. struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
  1658. uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
  1659. bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
  1660. bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
  1661. bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
  1662. if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
  1663. (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
  1664. if (connected) {
  1665. DRM_DEBUG_KMS("LCD1 connected\n");
  1666. bios_0_scratch |= ATOM_S0_LCD1;
  1667. bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
  1668. bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
  1669. } else {
  1670. DRM_DEBUG_KMS("LCD1 disconnected\n");
  1671. bios_0_scratch &= ~ATOM_S0_LCD1;
  1672. bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
  1673. bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
  1674. }
  1675. }
  1676. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
  1677. (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
  1678. if (connected) {
  1679. DRM_DEBUG_KMS("CRT1 connected\n");
  1680. bios_0_scratch |= ATOM_S0_CRT1_COLOR;
  1681. bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
  1682. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
  1683. } else {
  1684. DRM_DEBUG_KMS("CRT1 disconnected\n");
  1685. bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
  1686. bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
  1687. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
  1688. }
  1689. }
  1690. if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
  1691. (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
  1692. if (connected) {
  1693. DRM_DEBUG_KMS("CRT2 connected\n");
  1694. bios_0_scratch |= ATOM_S0_CRT2_COLOR;
  1695. bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
  1696. bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
  1697. } else {
  1698. DRM_DEBUG_KMS("CRT2 disconnected\n");
  1699. bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
  1700. bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
  1701. bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
  1702. }
  1703. }
  1704. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
  1705. (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
  1706. if (connected) {
  1707. DRM_DEBUG_KMS("DFP1 connected\n");
  1708. bios_0_scratch |= ATOM_S0_DFP1;
  1709. bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
  1710. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
  1711. } else {
  1712. DRM_DEBUG_KMS("DFP1 disconnected\n");
  1713. bios_0_scratch &= ~ATOM_S0_DFP1;
  1714. bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
  1715. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
  1716. }
  1717. }
  1718. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
  1719. (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
  1720. if (connected) {
  1721. DRM_DEBUG_KMS("DFP2 connected\n");
  1722. bios_0_scratch |= ATOM_S0_DFP2;
  1723. bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
  1724. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
  1725. } else {
  1726. DRM_DEBUG_KMS("DFP2 disconnected\n");
  1727. bios_0_scratch &= ~ATOM_S0_DFP2;
  1728. bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
  1729. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
  1730. }
  1731. }
  1732. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
  1733. (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
  1734. if (connected) {
  1735. DRM_DEBUG_KMS("DFP3 connected\n");
  1736. bios_0_scratch |= ATOM_S0_DFP3;
  1737. bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
  1738. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
  1739. } else {
  1740. DRM_DEBUG_KMS("DFP3 disconnected\n");
  1741. bios_0_scratch &= ~ATOM_S0_DFP3;
  1742. bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
  1743. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
  1744. }
  1745. }
  1746. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
  1747. (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
  1748. if (connected) {
  1749. DRM_DEBUG_KMS("DFP4 connected\n");
  1750. bios_0_scratch |= ATOM_S0_DFP4;
  1751. bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
  1752. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
  1753. } else {
  1754. DRM_DEBUG_KMS("DFP4 disconnected\n");
  1755. bios_0_scratch &= ~ATOM_S0_DFP4;
  1756. bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
  1757. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
  1758. }
  1759. }
  1760. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
  1761. (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
  1762. if (connected) {
  1763. DRM_DEBUG_KMS("DFP5 connected\n");
  1764. bios_0_scratch |= ATOM_S0_DFP5;
  1765. bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
  1766. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
  1767. } else {
  1768. DRM_DEBUG_KMS("DFP5 disconnected\n");
  1769. bios_0_scratch &= ~ATOM_S0_DFP5;
  1770. bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
  1771. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
  1772. }
  1773. }
  1774. if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
  1775. (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
  1776. if (connected) {
  1777. DRM_DEBUG_KMS("DFP6 connected\n");
  1778. bios_0_scratch |= ATOM_S0_DFP6;
  1779. bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
  1780. bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
  1781. } else {
  1782. DRM_DEBUG_KMS("DFP6 disconnected\n");
  1783. bios_0_scratch &= ~ATOM_S0_DFP6;
  1784. bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
  1785. bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
  1786. }
  1787. }
  1788. WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
  1789. WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
  1790. WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
  1791. }
  1792. union lvds_info {
  1793. struct _ATOM_LVDS_INFO info;
  1794. struct _ATOM_LVDS_INFO_V12 info_12;
  1795. };
  1796. struct amdgpu_encoder_atom_dig *
  1797. amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
  1798. {
  1799. struct drm_device *dev = encoder->base.dev;
  1800. struct amdgpu_device *adev = dev->dev_private;
  1801. struct amdgpu_mode_info *mode_info = &adev->mode_info;
  1802. int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
  1803. uint16_t data_offset, misc;
  1804. union lvds_info *lvds_info;
  1805. uint8_t frev, crev;
  1806. struct amdgpu_encoder_atom_dig *lvds = NULL;
  1807. int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1808. if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
  1809. &frev, &crev, &data_offset)) {
  1810. lvds_info =
  1811. (union lvds_info *)(mode_info->atom_context->bios + data_offset);
  1812. lvds =
  1813. kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1814. if (!lvds)
  1815. return NULL;
  1816. lvds->native_mode.clock =
  1817. le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
  1818. lvds->native_mode.hdisplay =
  1819. le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
  1820. lvds->native_mode.vdisplay =
  1821. le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
  1822. lvds->native_mode.htotal = lvds->native_mode.hdisplay +
  1823. le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
  1824. lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
  1825. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
  1826. lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
  1827. le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
  1828. lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
  1829. le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
  1830. lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
  1831. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
  1832. lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
  1833. le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
  1834. lvds->panel_pwr_delay =
  1835. le16_to_cpu(lvds_info->info.usOffDelayInMs);
  1836. lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
  1837. misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
  1838. if (misc & ATOM_VSYNC_POLARITY)
  1839. lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
  1840. if (misc & ATOM_HSYNC_POLARITY)
  1841. lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
  1842. if (misc & ATOM_COMPOSITESYNC)
  1843. lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
  1844. if (misc & ATOM_INTERLACE)
  1845. lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
  1846. if (misc & ATOM_DOUBLE_CLOCK_MODE)
  1847. lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
  1848. lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
  1849. lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
  1850. /* set crtc values */
  1851. drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
  1852. lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
  1853. encoder->native_mode = lvds->native_mode;
  1854. if (encoder_enum == 2)
  1855. lvds->linkb = true;
  1856. else
  1857. lvds->linkb = false;
  1858. /* parse the lcd record table */
  1859. if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
  1860. ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
  1861. ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
  1862. bool bad_record = false;
  1863. u8 *record;
  1864. if ((frev == 1) && (crev < 2))
  1865. /* absolute */
  1866. record = (u8 *)(mode_info->atom_context->bios +
  1867. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1868. else
  1869. /* relative */
  1870. record = (u8 *)(mode_info->atom_context->bios +
  1871. data_offset +
  1872. le16_to_cpu(lvds_info->info.usModePatchTableOffset));
  1873. while (*record != ATOM_RECORD_END_TYPE) {
  1874. switch (*record) {
  1875. case LCD_MODE_PATCH_RECORD_MODE_TYPE:
  1876. record += sizeof(ATOM_PATCH_RECORD_MODE);
  1877. break;
  1878. case LCD_RTS_RECORD_TYPE:
  1879. record += sizeof(ATOM_LCD_RTS_RECORD);
  1880. break;
  1881. case LCD_CAP_RECORD_TYPE:
  1882. record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
  1883. break;
  1884. case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
  1885. fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
  1886. if (fake_edid_record->ucFakeEDIDLength) {
  1887. struct edid *edid;
  1888. int edid_size =
  1889. max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
  1890. edid = kmalloc(edid_size, GFP_KERNEL);
  1891. if (edid) {
  1892. memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
  1893. fake_edid_record->ucFakeEDIDLength);
  1894. if (drm_edid_is_valid(edid)) {
  1895. adev->mode_info.bios_hardcoded_edid = edid;
  1896. adev->mode_info.bios_hardcoded_edid_size = edid_size;
  1897. } else
  1898. kfree(edid);
  1899. }
  1900. }
  1901. record += fake_edid_record->ucFakeEDIDLength ?
  1902. fake_edid_record->ucFakeEDIDLength + 2 :
  1903. sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
  1904. break;
  1905. case LCD_PANEL_RESOLUTION_RECORD_TYPE:
  1906. panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
  1907. lvds->native_mode.width_mm = panel_res_record->usHSize;
  1908. lvds->native_mode.height_mm = panel_res_record->usVSize;
  1909. record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
  1910. break;
  1911. default:
  1912. DRM_ERROR("Bad LCD record %d\n", *record);
  1913. bad_record = true;
  1914. break;
  1915. }
  1916. if (bad_record)
  1917. break;
  1918. }
  1919. }
  1920. }
  1921. return lvds;
  1922. }
  1923. struct amdgpu_encoder_atom_dig *
  1924. amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
  1925. {
  1926. int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
  1927. struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
  1928. if (!dig)
  1929. return NULL;
  1930. /* coherent mode by default */
  1931. dig->coherent_mode = true;
  1932. dig->dig_encoder = -1;
  1933. if (encoder_enum == 2)
  1934. dig->linkb = true;
  1935. else
  1936. dig->linkb = false;
  1937. return dig;
  1938. }