1 | /* |
2 | * Copyright 2007-8 Advanced Micro Devices, Inc. |
3 | * Copyright 2008 Red Hat Inc. |
4 | * |
5 | * Permission is hereby granted, free of charge, to any person obtaining a |
6 | * copy of this software and associated documentation files (the "Software"), |
7 | * to deal in the Software without restriction, including without limitation |
8 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
9 | * and/or sell copies of the Software, and to permit persons to whom the |
10 | * Software is furnished to do so, subject to the following conditions: |
11 | * |
12 | * The above copyright notice and this permission notice shall be included in |
13 | * all copies or substantial portions of the Software. |
14 | * |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
18 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
19 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
20 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
21 | * OTHER DEALINGS IN THE SOFTWARE. |
22 | * |
23 | * Authors: Dave Airlie |
24 | * Alex Deucher |
25 | * Jerome Glisse |
26 | */ |
27 | #include <drm/drmP.h> |
28 | #include <drm/radeon_drm.h> |
29 | #include "radeon.h" |
30 | |
31 | #include "atom.h" |
32 | #include "atom-bits.h" |
33 | #include <drm/drm_dp_helper.h> |
34 | |
35 | /* move these to drm_dp_helper.c/h */ |
36 | #define DP_LINK_CONFIGURATION_SIZE 9 |
37 | #define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE |
38 | |
39 | static const char *voltage_names[] = { |
40 | "0.4V" , "0.6V" , "0.8V" , "1.2V" |
41 | }; |
42 | static const char *pre_emph_names[] = { |
43 | "0dB" , "3.5dB" , "6dB" , "9.5dB" |
44 | }; |
45 | |
46 | /***** radeon AUX functions *****/ |
47 | |
48 | /* Atom needs data in little endian format |
49 | * so swap as appropriate when copying data to |
50 | * or from atom. Note that atom operates on |
51 | * dw units. |
52 | */ |
53 | void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le) |
54 | { |
55 | #ifdef __BIG_ENDIAN |
56 | u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */ |
57 | u32 *dst32, *src32; |
58 | int i; |
59 | |
60 | memcpy(src_tmp, src, num_bytes); |
61 | src32 = (u32 *)src_tmp; |
62 | dst32 = (u32 *)dst_tmp; |
63 | if (to_le) { |
64 | for (i = 0; i < ((num_bytes + 3) / 4); i++) |
65 | dst32[i] = cpu_to_le32(src32[i]); |
66 | memcpy(dst, dst_tmp, num_bytes); |
67 | } else { |
68 | u8 dws = num_bytes & ~3; |
69 | for (i = 0; i < ((num_bytes + 3) / 4); i++) |
70 | dst32[i] = le32_to_cpu(src32[i]); |
71 | memcpy(dst, dst_tmp, dws); |
72 | if (num_bytes % 4) { |
73 | for (i = 0; i < (num_bytes % 4); i++) |
74 | dst[dws+i] = dst_tmp[dws+i]; |
75 | } |
76 | } |
77 | #else |
78 | memcpy(dst, src, num_bytes); |
79 | #endif |
80 | } |
81 | |
82 | union aux_channel_transaction { |
83 | PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; |
84 | PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; |
85 | }; |
86 | |
87 | static int radeon_process_aux_ch(struct radeon_i2c_chan *chan, |
88 | u8 *send, int send_bytes, |
89 | u8 *recv, int recv_size, |
90 | u8 delay, u8 *ack) |
91 | { |
92 | struct drm_device *dev = chan->dev; |
93 | struct radeon_device *rdev = dev->dev_private; |
94 | union aux_channel_transaction args; |
95 | int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); |
96 | unsigned char *base; |
97 | int recv_bytes; |
98 | |
99 | memset(&args, 0, sizeof(args)); |
100 | |
101 | base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1); |
102 | |
103 | radeon_atom_copy_swap(base, send, send_bytes, true); |
104 | |
105 | args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4)); |
106 | args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4)); |
107 | args.v1.ucDataOutLen = 0; |
108 | args.v1.ucChannelID = chan->rec.i2c_id; |
109 | args.v1.ucDelay = delay / 10; |
110 | if (ASIC_IS_DCE4(rdev)) |
111 | args.v2.ucHPD_ID = chan->rec.hpd; |
112 | |
113 | atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); |
114 | |
115 | *ack = args.v1.ucReplyStatus; |
116 | |
117 | /* timeout */ |
118 | if (args.v1.ucReplyStatus == 1) { |
119 | DRM_DEBUG_KMS("dp_aux_ch timeout\n" ); |
120 | return -ETIMEDOUT; |
121 | } |
122 | |
123 | /* flags not zero */ |
124 | if (args.v1.ucReplyStatus == 2) { |
125 | DRM_DEBUG_KMS("dp_aux_ch flags not zero\n" ); |
126 | return -EBUSY; |
127 | } |
128 | |
129 | /* error */ |
130 | if (args.v1.ucReplyStatus == 3) { |
131 | DRM_DEBUG_KMS("dp_aux_ch error\n" ); |
132 | return -EIO; |
133 | } |
134 | |
135 | recv_bytes = args.v1.ucDataOutLen; |
136 | if (recv_bytes > recv_size) |
137 | recv_bytes = recv_size; |
138 | |
139 | if (recv && recv_size) |
140 | radeon_atom_copy_swap(recv, base + 16, recv_bytes, false); |
141 | |
142 | return recv_bytes; |
143 | } |
144 | |
145 | #define BARE_ADDRESS_SIZE 3 |
146 | #define (BARE_ADDRESS_SIZE + 1) |
147 | |
148 | static ssize_t |
149 | radeon_dp_aux_transfer(struct drm_dp_aux *aux, struct drm_dp_aux_msg *msg) |
150 | { |
151 | struct radeon_i2c_chan *chan = |
152 | container_of(aux, struct radeon_i2c_chan, aux); |
153 | int ret; |
154 | u8 tx_buf[20]; |
155 | size_t tx_size; |
156 | u8 ack, delay = 0; |
157 | |
158 | if (WARN_ON(msg->size > 16)) |
159 | return -E2BIG; |
160 | |
161 | tx_buf[0] = msg->address & 0xff; |
162 | tx_buf[1] = msg->address >> 8; |
163 | tx_buf[2] = msg->request << 4; |
164 | tx_buf[3] = msg->size ? (msg->size - 1) : 0; |
165 | |
166 | switch (msg->request & ~DP_AUX_I2C_MOT) { |
167 | case DP_AUX_NATIVE_WRITE: |
168 | case DP_AUX_I2C_WRITE: |
169 | /* tx_size needs to be 4 even for bare address packets since the atom |
170 | * table needs the info in tx_buf[3]. |
171 | */ |
172 | tx_size = HEADER_SIZE + msg->size; |
173 | if (msg->size == 0) |
174 | tx_buf[3] |= BARE_ADDRESS_SIZE << 4; |
175 | else |
176 | tx_buf[3] |= tx_size << 4; |
177 | memcpy(tx_buf + HEADER_SIZE, msg->buffer, msg->size); |
178 | ret = radeon_process_aux_ch(chan, |
179 | tx_buf, tx_size, NULL, 0, delay, &ack); |
180 | if (ret >= 0) |
181 | /* Return payload size. */ |
182 | ret = msg->size; |
183 | break; |
184 | case DP_AUX_NATIVE_READ: |
185 | case DP_AUX_I2C_READ: |
186 | /* tx_size needs to be 4 even for bare address packets since the atom |
187 | * table needs the info in tx_buf[3]. |
188 | */ |
189 | tx_size = HEADER_SIZE; |
190 | if (msg->size == 0) |
191 | tx_buf[3] |= BARE_ADDRESS_SIZE << 4; |
192 | else |
193 | tx_buf[3] |= tx_size << 4; |
194 | ret = radeon_process_aux_ch(chan, |
195 | tx_buf, tx_size, msg->buffer, msg->size, delay, &ack); |
196 | break; |
197 | default: |
198 | ret = -EINVAL; |
199 | break; |
200 | } |
201 | |
202 | if (ret >= 0) |
203 | msg->reply = ack >> 4; |
204 | |
205 | return ret; |
206 | } |
207 | |
208 | void radeon_dp_aux_init(struct radeon_connector *radeon_connector) |
209 | { |
210 | int ret; |
211 | |
212 | radeon_connector->ddc_bus->rec.hpd = radeon_connector->hpd.hpd; |
213 | radeon_connector->ddc_bus->aux.dev = radeon_connector->base.kdev; |
214 | radeon_connector->ddc_bus->aux.transfer = radeon_dp_aux_transfer; |
215 | #ifdef __NetBSD__ |
216 | /* XXX dervied from sysfs/i2c on linux. */ |
217 | radeon_connector->ddc_bus->aux.name = "radeon_dp_aux" ; |
218 | #endif |
219 | ret = drm_dp_aux_register_i2c_bus(&radeon_connector->ddc_bus->aux); |
220 | if (!ret) |
221 | radeon_connector->ddc_bus->has_aux = true; |
222 | |
223 | WARN(ret, "drm_dp_aux_register_i2c_bus() failed with error %d\n" , ret); |
224 | } |
225 | |
226 | /***** general DP utility functions *****/ |
227 | |
228 | #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 |
229 | #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5 |
230 | |
231 | static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], |
232 | int lane_count, |
233 | u8 train_set[4]) |
234 | { |
235 | u8 v = 0; |
236 | u8 p = 0; |
237 | int lane; |
238 | |
239 | for (lane = 0; lane < lane_count; lane++) { |
240 | u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane); |
241 | u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane); |
242 | |
243 | DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n" , |
244 | lane, |
245 | voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT], |
246 | pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); |
247 | |
248 | if (this_v > v) |
249 | v = this_v; |
250 | if (this_p > p) |
251 | p = this_p; |
252 | } |
253 | |
254 | if (v >= DP_VOLTAGE_MAX) |
255 | v |= DP_TRAIN_MAX_SWING_REACHED; |
256 | |
257 | if (p >= DP_PRE_EMPHASIS_MAX) |
258 | p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; |
259 | |
260 | DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n" , |
261 | voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], |
262 | pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); |
263 | |
264 | for (lane = 0; lane < 4; lane++) |
265 | train_set[lane] = v | p; |
266 | } |
267 | |
268 | /* convert bits per color to bits per pixel */ |
269 | /* get bpc from the EDID */ |
270 | static int convert_bpc_to_bpp(int bpc) |
271 | { |
272 | if (bpc == 0) |
273 | return 24; |
274 | else |
275 | return bpc * 3; |
276 | } |
277 | |
278 | /* get the max pix clock supported by the link rate and lane num */ |
279 | static int dp_get_max_dp_pix_clock(int link_rate, |
280 | int lane_num, |
281 | int bpp) |
282 | { |
283 | return (link_rate * lane_num * 8) / bpp; |
284 | } |
285 | |
286 | /***** radeon specific DP functions *****/ |
287 | |
288 | /* First get the min lane# when low rate is used according to pixel clock |
289 | * (prefer low rate), second check max lane# supported by DP panel, |
290 | * if the max lane# < low rate lane# then use max lane# instead. |
291 | */ |
292 | static int radeon_dp_get_dp_lane_number(struct drm_connector *connector, |
293 | u8 dpcd[DP_DPCD_SIZE], |
294 | int pix_clock) |
295 | { |
296 | int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector)); |
297 | int max_link_rate = drm_dp_max_link_rate(dpcd); |
298 | int max_lane_num = drm_dp_max_lane_count(dpcd); |
299 | int lane_num; |
300 | int max_dp_pix_clock; |
301 | |
302 | for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) { |
303 | max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp); |
304 | if (pix_clock <= max_dp_pix_clock) |
305 | break; |
306 | } |
307 | |
308 | return lane_num; |
309 | } |
310 | |
311 | static int radeon_dp_get_dp_link_clock(struct drm_connector *connector, |
312 | u8 dpcd[DP_DPCD_SIZE], |
313 | int pix_clock) |
314 | { |
315 | int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector)); |
316 | int lane_num, max_pix_clock; |
317 | |
318 | if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == |
319 | ENCODER_OBJECT_ID_NUTMEG) |
320 | return 270000; |
321 | |
322 | lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock); |
323 | max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp); |
324 | if (pix_clock <= max_pix_clock) |
325 | return 162000; |
326 | max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp); |
327 | if (pix_clock <= max_pix_clock) |
328 | return 270000; |
329 | if (radeon_connector_is_dp12_capable(connector)) { |
330 | max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp); |
331 | if (pix_clock <= max_pix_clock) |
332 | return 540000; |
333 | } |
334 | |
335 | return drm_dp_max_link_rate(dpcd); |
336 | } |
337 | |
338 | static u8 radeon_dp_encoder_service(struct radeon_device *rdev, |
339 | int action, int dp_clock, |
340 | u8 ucconfig, u8 lane_num) |
341 | { |
342 | DP_ENCODER_SERVICE_PARAMETERS args; |
343 | int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); |
344 | |
345 | memset(&args, 0, sizeof(args)); |
346 | args.ucLinkClock = dp_clock / 10; |
347 | args.ucConfig = ucconfig; |
348 | args.ucAction = action; |
349 | args.ucLaneNum = lane_num; |
350 | args.ucStatus = 0; |
351 | |
352 | atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); |
353 | return args.ucStatus; |
354 | } |
355 | |
356 | u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector) |
357 | { |
358 | struct drm_device *dev = radeon_connector->base.dev; |
359 | struct radeon_device *rdev = dev->dev_private; |
360 | |
361 | return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0, |
362 | radeon_connector->ddc_bus->rec.i2c_id, 0); |
363 | } |
364 | |
365 | static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector) |
366 | { |
367 | struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; |
368 | u8 buf[3]; |
369 | |
370 | if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT)) |
371 | return; |
372 | |
373 | if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_SINK_OUI, buf, 3) == 3) |
374 | DRM_DEBUG_KMS("Sink OUI: %02hhx%02hhx%02hhx\n" , |
375 | buf[0], buf[1], buf[2]); |
376 | |
377 | if (drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_BRANCH_OUI, buf, 3) == 3) |
378 | DRM_DEBUG_KMS("Branch OUI: %02hhx%02hhx%02hhx\n" , |
379 | buf[0], buf[1], buf[2]); |
380 | } |
381 | |
382 | bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector) |
383 | { |
384 | struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; |
385 | u8 msg[DP_DPCD_SIZE]; |
386 | int ret, i; |
387 | |
388 | ret = drm_dp_dpcd_read(&radeon_connector->ddc_bus->aux, DP_DPCD_REV, msg, |
389 | DP_DPCD_SIZE); |
390 | if (ret > 0) { |
391 | memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE); |
392 | DRM_DEBUG_KMS("DPCD: " ); |
393 | for (i = 0; i < DP_DPCD_SIZE; i++) |
394 | DRM_DEBUG_KMS("%02x " , msg[i]); |
395 | DRM_DEBUG_KMS("\n" ); |
396 | |
397 | radeon_dp_probe_oui(radeon_connector); |
398 | |
399 | return true; |
400 | } |
401 | dig_connector->dpcd[0] = 0; |
402 | return false; |
403 | } |
404 | |
405 | int radeon_dp_get_panel_mode(struct drm_encoder *encoder, |
406 | struct drm_connector *connector) |
407 | { |
408 | struct drm_device *dev = encoder->dev; |
409 | struct radeon_device *rdev = dev->dev_private; |
410 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
411 | int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; |
412 | u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector); |
413 | u8 tmp; |
414 | |
415 | if (!ASIC_IS_DCE4(rdev)) |
416 | return panel_mode; |
417 | |
418 | if (!radeon_connector->con_priv) |
419 | return panel_mode; |
420 | |
421 | if (dp_bridge != ENCODER_OBJECT_ID_NONE) { |
422 | /* DP bridge chips */ |
423 | if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, |
424 | DP_EDP_CONFIGURATION_CAP, &tmp) == 1) { |
425 | if (tmp & 1) |
426 | panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; |
427 | else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) || |
428 | (dp_bridge == ENCODER_OBJECT_ID_TRAVIS)) |
429 | panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; |
430 | else |
431 | panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; |
432 | } |
433 | } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { |
434 | /* eDP */ |
435 | if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, |
436 | DP_EDP_CONFIGURATION_CAP, &tmp) == 1) { |
437 | if (tmp & 1) |
438 | panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; |
439 | } |
440 | } |
441 | |
442 | return panel_mode; |
443 | } |
444 | |
445 | void radeon_dp_set_link_config(struct drm_connector *connector, |
446 | const struct drm_display_mode *mode) |
447 | { |
448 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
449 | struct radeon_connector_atom_dig *dig_connector; |
450 | |
451 | if (!radeon_connector->con_priv) |
452 | return; |
453 | dig_connector = radeon_connector->con_priv; |
454 | |
455 | if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || |
456 | (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { |
457 | dig_connector->dp_clock = |
458 | radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); |
459 | dig_connector->dp_lane_count = |
460 | radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock); |
461 | } |
462 | } |
463 | |
464 | int radeon_dp_mode_valid_helper(struct drm_connector *connector, |
465 | struct drm_display_mode *mode) |
466 | { |
467 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
468 | struct radeon_connector_atom_dig *dig_connector; |
469 | int dp_clock; |
470 | |
471 | if (!radeon_connector->con_priv) |
472 | return MODE_CLOCK_HIGH; |
473 | dig_connector = radeon_connector->con_priv; |
474 | |
475 | dp_clock = |
476 | radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); |
477 | |
478 | if ((dp_clock == 540000) && |
479 | (!radeon_connector_is_dp12_capable(connector))) |
480 | return MODE_CLOCK_HIGH; |
481 | |
482 | return MODE_OK; |
483 | } |
484 | |
485 | bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector) |
486 | { |
487 | u8 link_status[DP_LINK_STATUS_SIZE]; |
488 | struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; |
489 | |
490 | if (drm_dp_dpcd_read_link_status(&radeon_connector->ddc_bus->aux, link_status) |
491 | <= 0) |
492 | return false; |
493 | if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count)) |
494 | return false; |
495 | return true; |
496 | } |
497 | |
498 | void radeon_dp_set_rx_power_state(struct drm_connector *connector, |
499 | u8 power_state) |
500 | { |
501 | struct radeon_connector *radeon_connector = to_radeon_connector(connector); |
502 | struct radeon_connector_atom_dig *dig_connector; |
503 | |
504 | if (!radeon_connector->con_priv) |
505 | return; |
506 | |
507 | dig_connector = radeon_connector->con_priv; |
508 | |
509 | /* power up/down the sink */ |
510 | if (dig_connector->dpcd[0] >= 0x11) { |
511 | drm_dp_dpcd_writeb(&radeon_connector->ddc_bus->aux, |
512 | DP_SET_POWER, power_state); |
513 | usleep_range(1000, 2000); |
514 | } |
515 | } |
516 | |
517 | |
518 | struct radeon_dp_link_train_info { |
519 | struct radeon_device *rdev; |
520 | struct drm_encoder *encoder; |
521 | struct drm_connector *connector; |
522 | int enc_id; |
523 | int dp_clock; |
524 | int dp_lane_count; |
525 | bool tp3_supported; |
526 | u8 dpcd[DP_RECEIVER_CAP_SIZE]; |
527 | u8 train_set[4]; |
528 | u8 link_status[DP_LINK_STATUS_SIZE]; |
529 | u8 tries; |
530 | bool use_dpencoder; |
531 | struct drm_dp_aux *aux; |
532 | }; |
533 | |
534 | static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info) |
535 | { |
536 | /* set the initial vs/emph on the source */ |
537 | atombios_dig_transmitter_setup(dp_info->encoder, |
538 | ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH, |
539 | 0, dp_info->train_set[0]); /* sets all lanes at once */ |
540 | |
541 | /* set the vs/emph on the sink */ |
542 | drm_dp_dpcd_write(dp_info->aux, DP_TRAINING_LANE0_SET, |
543 | dp_info->train_set, dp_info->dp_lane_count); |
544 | } |
545 | |
546 | static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp) |
547 | { |
548 | int rtp = 0; |
549 | |
550 | /* set training pattern on the source */ |
551 | if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) { |
552 | switch (tp) { |
553 | case DP_TRAINING_PATTERN_1: |
554 | rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1; |
555 | break; |
556 | case DP_TRAINING_PATTERN_2: |
557 | rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2; |
558 | break; |
559 | case DP_TRAINING_PATTERN_3: |
560 | rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3; |
561 | break; |
562 | } |
563 | atombios_dig_encoder_setup(dp_info->encoder, rtp, 0); |
564 | } else { |
565 | switch (tp) { |
566 | case DP_TRAINING_PATTERN_1: |
567 | rtp = 0; |
568 | break; |
569 | case DP_TRAINING_PATTERN_2: |
570 | rtp = 1; |
571 | break; |
572 | } |
573 | radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, |
574 | dp_info->dp_clock, dp_info->enc_id, rtp); |
575 | } |
576 | |
577 | /* enable training pattern on the sink */ |
578 | drm_dp_dpcd_writeb(dp_info->aux, DP_TRAINING_PATTERN_SET, tp); |
579 | } |
580 | |
581 | static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info) |
582 | { |
583 | struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder); |
584 | struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; |
585 | u8 tmp; |
586 | |
587 | /* power up the sink */ |
588 | radeon_dp_set_rx_power_state(dp_info->connector, DP_SET_POWER_D0); |
589 | |
590 | /* possibly enable downspread on the sink */ |
591 | if (dp_info->dpcd[3] & 0x1) |
592 | drm_dp_dpcd_writeb(dp_info->aux, |
593 | DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5); |
594 | else |
595 | drm_dp_dpcd_writeb(dp_info->aux, |
596 | DP_DOWNSPREAD_CTRL, 0); |
597 | |
598 | if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) && |
599 | (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) { |
600 | drm_dp_dpcd_writeb(dp_info->aux, DP_EDP_CONFIGURATION_SET, 1); |
601 | } |
602 | |
603 | /* set the lane count on the sink */ |
604 | tmp = dp_info->dp_lane_count; |
605 | if (drm_dp_enhanced_frame_cap(dp_info->dpcd)) |
606 | tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN; |
607 | drm_dp_dpcd_writeb(dp_info->aux, DP_LANE_COUNT_SET, tmp); |
608 | |
609 | /* set the link rate on the sink */ |
610 | tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock); |
611 | drm_dp_dpcd_writeb(dp_info->aux, DP_LINK_BW_SET, tmp); |
612 | |
613 | /* start training on the source */ |
614 | if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) |
615 | atombios_dig_encoder_setup(dp_info->encoder, |
616 | ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0); |
617 | else |
618 | radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START, |
619 | dp_info->dp_clock, dp_info->enc_id, 0); |
620 | |
621 | /* disable the training pattern on the sink */ |
622 | drm_dp_dpcd_writeb(dp_info->aux, |
623 | DP_TRAINING_PATTERN_SET, |
624 | DP_TRAINING_PATTERN_DISABLE); |
625 | |
626 | return 0; |
627 | } |
628 | |
629 | static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info) |
630 | { |
631 | udelay(400); |
632 | |
633 | /* disable the training pattern on the sink */ |
634 | drm_dp_dpcd_writeb(dp_info->aux, |
635 | DP_TRAINING_PATTERN_SET, |
636 | DP_TRAINING_PATTERN_DISABLE); |
637 | |
638 | /* disable the training pattern on the source */ |
639 | if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) |
640 | atombios_dig_encoder_setup(dp_info->encoder, |
641 | ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0); |
642 | else |
643 | radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE, |
644 | dp_info->dp_clock, dp_info->enc_id, 0); |
645 | |
646 | return 0; |
647 | } |
648 | |
649 | static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info) |
650 | { |
651 | bool clock_recovery; |
652 | u8 voltage; |
653 | int i; |
654 | |
655 | radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1); |
656 | memset(dp_info->train_set, 0, 4); |
657 | radeon_dp_update_vs_emph(dp_info); |
658 | |
659 | udelay(400); |
660 | |
661 | /* clock recovery loop */ |
662 | clock_recovery = false; |
663 | dp_info->tries = 0; |
664 | voltage = 0xff; |
665 | while (1) { |
666 | drm_dp_link_train_clock_recovery_delay(dp_info->dpcd); |
667 | |
668 | if (drm_dp_dpcd_read_link_status(dp_info->aux, |
669 | dp_info->link_status) <= 0) { |
670 | DRM_ERROR("displayport link status failed\n" ); |
671 | break; |
672 | } |
673 | |
674 | if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) { |
675 | clock_recovery = true; |
676 | break; |
677 | } |
678 | |
679 | for (i = 0; i < dp_info->dp_lane_count; i++) { |
680 | if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) |
681 | break; |
682 | } |
683 | if (i == dp_info->dp_lane_count) { |
684 | DRM_ERROR("clock recovery reached max voltage\n" ); |
685 | break; |
686 | } |
687 | |
688 | if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { |
689 | ++dp_info->tries; |
690 | if (dp_info->tries == 5) { |
691 | DRM_ERROR("clock recovery tried 5 times\n" ); |
692 | break; |
693 | } |
694 | } else |
695 | dp_info->tries = 0; |
696 | |
697 | voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; |
698 | |
699 | /* Compute new train_set as requested by sink */ |
700 | dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); |
701 | |
702 | radeon_dp_update_vs_emph(dp_info); |
703 | } |
704 | if (!clock_recovery) { |
705 | DRM_ERROR("clock recovery failed\n" ); |
706 | return -1; |
707 | } else { |
708 | DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n" , |
709 | dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, |
710 | (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> |
711 | DP_TRAIN_PRE_EMPHASIS_SHIFT); |
712 | return 0; |
713 | } |
714 | } |
715 | |
716 | static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info) |
717 | { |
718 | bool channel_eq; |
719 | |
720 | if (dp_info->tp3_supported) |
721 | radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3); |
722 | else |
723 | radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2); |
724 | |
725 | /* channel equalization loop */ |
726 | dp_info->tries = 0; |
727 | channel_eq = false; |
728 | while (1) { |
729 | drm_dp_link_train_channel_eq_delay(dp_info->dpcd); |
730 | |
731 | if (drm_dp_dpcd_read_link_status(dp_info->aux, |
732 | dp_info->link_status) <= 0) { |
733 | DRM_ERROR("displayport link status failed\n" ); |
734 | break; |
735 | } |
736 | |
737 | if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) { |
738 | channel_eq = true; |
739 | break; |
740 | } |
741 | |
742 | /* Try 5 times */ |
743 | if (dp_info->tries > 5) { |
744 | DRM_ERROR("channel eq failed: 5 tries\n" ); |
745 | break; |
746 | } |
747 | |
748 | /* Compute new train_set as requested by sink */ |
749 | dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); |
750 | |
751 | radeon_dp_update_vs_emph(dp_info); |
752 | dp_info->tries++; |
753 | } |
754 | |
755 | if (!channel_eq) { |
756 | DRM_ERROR("channel eq failed\n" ); |
757 | return -1; |
758 | } else { |
759 | DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n" , |
760 | dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, |
761 | (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) |
762 | >> DP_TRAIN_PRE_EMPHASIS_SHIFT); |
763 | return 0; |
764 | } |
765 | } |
766 | |
767 | void radeon_dp_link_train(struct drm_encoder *encoder, |
768 | struct drm_connector *connector) |
769 | { |
770 | struct drm_device *dev = encoder->dev; |
771 | struct radeon_device *rdev = dev->dev_private; |
772 | struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); |
773 | struct radeon_encoder_atom_dig *dig; |
774 | struct radeon_connector *radeon_connector; |
775 | struct radeon_connector_atom_dig *dig_connector; |
776 | struct radeon_dp_link_train_info dp_info; |
777 | int index; |
778 | u8 tmp, frev, crev; |
779 | |
780 | if (!radeon_encoder->enc_priv) |
781 | return; |
782 | dig = radeon_encoder->enc_priv; |
783 | |
784 | radeon_connector = to_radeon_connector(connector); |
785 | if (!radeon_connector->con_priv) |
786 | return; |
787 | dig_connector = radeon_connector->con_priv; |
788 | |
789 | if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) && |
790 | (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP)) |
791 | return; |
792 | |
793 | /* DPEncoderService newer than 1.1 can't program properly the |
794 | * training pattern. When facing such version use the |
795 | * DIGXEncoderControl (X== 1 | 2) |
796 | */ |
797 | dp_info.use_dpencoder = true; |
798 | index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); |
799 | if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) { |
800 | if (crev > 1) { |
801 | dp_info.use_dpencoder = false; |
802 | } |
803 | } |
804 | |
805 | dp_info.enc_id = 0; |
806 | if (dig->dig_encoder) |
807 | dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; |
808 | else |
809 | dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; |
810 | if (dig->linkb) |
811 | dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B; |
812 | else |
813 | dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A; |
814 | |
815 | if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp) |
816 | == 1) { |
817 | if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED)) |
818 | dp_info.tp3_supported = true; |
819 | else |
820 | dp_info.tp3_supported = false; |
821 | } else { |
822 | dp_info.tp3_supported = false; |
823 | } |
824 | |
825 | memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE); |
826 | dp_info.rdev = rdev; |
827 | dp_info.encoder = encoder; |
828 | dp_info.connector = connector; |
829 | dp_info.dp_lane_count = dig_connector->dp_lane_count; |
830 | dp_info.dp_clock = dig_connector->dp_clock; |
831 | dp_info.aux = &radeon_connector->ddc_bus->aux; |
832 | |
833 | if (radeon_dp_link_train_init(&dp_info)) |
834 | goto done; |
835 | if (radeon_dp_link_train_cr(&dp_info)) |
836 | goto done; |
837 | if (radeon_dp_link_train_ce(&dp_info)) |
838 | goto done; |
839 | done: |
840 | if (radeon_dp_link_train_finish(&dp_info)) |
841 | return; |
842 | } |
843 | |