udelay(500);
}
+void intel_dp_set_link_params(struct intel_dp *intel_dp,
+ const struct intel_crtc_state *pipe_config)
+{
+ intel_dp->link_rate = pipe_config->port_clock;
+ intel_dp->lane_count = pipe_config->lane_count;
+}
+
static void intel_dp_prepare(struct intel_encoder *encoder)
{
struct drm_device *dev = encoder->base.dev;
struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
struct drm_display_mode *adjusted_mode = &crtc->config->base.adjusted_mode;
+ intel_dp_set_link_params(intel_dp, crtc->config);
+
/*
* There are four kinds of DP registers:
*
intel_get_adjust_train(struct intel_dp *intel_dp,
const uint8_t link_status[DP_LINK_STATUS_SIZE])
{
- struct intel_crtc *crtc =
- to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
uint8_t v = 0;
uint8_t p = 0;
int lane;
uint8_t voltage_max;
uint8_t preemph_max;
- for (lane = 0; lane < crtc->config->lane_count; lane++) {
+ for (lane = 0; lane < intel_dp->lane_count; lane++) {
uint8_t this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
uint8_t this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_i915_private *dev_priv =
to_i915(intel_dig_port->base.base.dev);
- struct intel_crtc *crtc =
- to_intel_crtc(intel_dig_port->base.base.crtc);
uint8_t buf[sizeof(intel_dp->train_set) + 1];
int ret, len;
len = 1;
} else {
/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
- memcpy(buf + 1, intel_dp->train_set, crtc->config->lane_count);
- len = crtc->config->lane_count + 1;
+ memcpy(buf + 1, intel_dp->train_set, intel_dp->lane_count);
+ len = intel_dp->lane_count + 1;
}
ret = drm_dp_dpcd_write(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
struct drm_i915_private *dev_priv =
to_i915(intel_dig_port->base.base.dev);
- struct intel_crtc *crtc =
- to_intel_crtc(intel_dig_port->base.base.crtc);
int ret;
intel_get_adjust_train(intel_dp, link_status);
POSTING_READ(intel_dp->output_reg);
ret = drm_dp_dpcd_write(&intel_dp->aux, DP_TRAINING_LANE0_SET,
- intel_dp->train_set, crtc->config->lane_count);
+ intel_dp->train_set, intel_dp->lane_count);
- return ret == crtc->config->lane_count;
+ return ret == intel_dp->lane_count;
}
static void intel_dp_set_idle_link_train(struct intel_dp *intel_dp)
intel_dp_start_link_train(struct intel_dp *intel_dp)
{
struct drm_encoder *encoder = &dp_to_dig_port(intel_dp)->base.base;
- struct intel_crtc *crtc =
- to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
struct drm_device *dev = encoder->dev;
int i;
uint8_t voltage;
if (HAS_DDI(dev))
intel_ddi_prepare_link_retrain(encoder);
- intel_dp_compute_rate(intel_dp, crtc->config->port_clock,
+ intel_dp_compute_rate(intel_dp, intel_dp->link_rate,
&link_bw, &rate_select);
/* Write the link configuration data */
link_config[0] = link_bw;
- link_config[1] = crtc->config->lane_count;
+ link_config[1] = intel_dp->lane_count;
if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config, 2);
break;
}
- if (drm_dp_clock_recovery_ok(link_status, crtc->config->lane_count)) {
+ if (drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
DRM_DEBUG_KMS("clock recovery OK\n");
break;
}
}
/* Check to see if we've tried the max voltage */
- for (i = 0; i < crtc->config->lane_count; i++)
+ for (i = 0; i < intel_dp->lane_count; i++)
if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
break;
- if (i == crtc->config->lane_count) {
+ if (i == intel_dp->lane_count) {
++loop_tries;
if (loop_tries == 5) {
DRM_ERROR("too many full retries, give up\n");
void
intel_dp_complete_link_train(struct intel_dp *intel_dp)
{
- struct intel_crtc *crtc =
- to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
bool channel_eq = false;
int tries, cr_tries;
uint32_t DP = intel_dp->DP;
uint32_t training_pattern = DP_TRAINING_PATTERN_2;
/* Training Pattern 3 for HBR2 or 1.2 devices that support it*/
- if (crtc->config->port_clock == 540000 || intel_dp->use_tps3)
+ if (intel_dp->link_rate == 540000 || intel_dp->use_tps3)
training_pattern = DP_TRAINING_PATTERN_3;
/* channel equalization */
/* Make sure clock is still ok */
if (!drm_dp_clock_recovery_ok(link_status,
- crtc->config->lane_count)) {
+ intel_dp->lane_count)) {
intel_dp->train_set_valid = false;
intel_dp_start_link_train(intel_dp);
intel_dp_set_link_train(intel_dp, &DP,
}
if (drm_dp_channel_eq_ok(link_status,
- crtc->config->lane_count)) {
+ intel_dp->lane_count)) {
channel_eq = true;
break;
}
static int
intel_dp_check_mst_status(struct intel_dp *intel_dp)
{
- struct intel_crtc *crtc =
- to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
bool bret;
if (intel_dp->is_mst) {
/* check link status - esi[10] = 0x200c */
if (intel_dp->active_mst_links &&
- !drm_dp_channel_eq_ok(&esi[10], crtc->config->lane_count)) {
+ !drm_dp_channel_eq_ok(&esi[10], intel_dp->lane_count)) {
DRM_DEBUG_KMS("channel EQ not ok, retraining\n");
intel_dp_start_link_train(intel_dp);
intel_dp_complete_link_train(intel_dp);
{
struct drm_device *dev = intel_dp_to_dev(intel_dp);
struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
- struct intel_crtc *crtc =
- to_intel_crtc(dp_to_dig_port(intel_dp)->base.base.crtc);
u8 sink_irq_vector;
u8 link_status[DP_LINK_STATUS_SIZE];
DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
}
- if (!drm_dp_channel_eq_ok(link_status, crtc->config->lane_count)) {
+ if (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) {
DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
intel_encoder->base.name);
intel_dp_start_link_train(intel_dp);