diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 18:49:45 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 18:49:45 +0000 |
commit | 2c3c1048746a4622d8c89a29670120dc8fab93c4 (patch) | |
tree | 848558de17fb3008cdf4d861b01ac7781903ce39 /drivers/gpu/drm/msm/dp | |
parent | Initial commit. (diff) | |
download | linux-2c3c1048746a4622d8c89a29670120dc8fab93c4.tar.xz linux-2c3c1048746a4622d8c89a29670120dc8fab93c4.zip |
Adding upstream version 6.1.76.upstream/6.1.76upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'drivers/gpu/drm/msm/dp')
25 files changed, 10247 insertions, 0 deletions
diff --git a/drivers/gpu/drm/msm/dp/dp_audio.c b/drivers/gpu/drm/msm/dp/dp_audio.c new file mode 100644 index 000000000..1245c7aa4 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_audio.c @@ -0,0 +1,667 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2016-2020, The Linux Foundation. All rights reserved. + */ + + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <linux/of_platform.h> + +#include <drm/display/drm_dp_helper.h> +#include <drm/drm_edid.h> + +#include "dp_catalog.h" +#include "dp_audio.h" +#include "dp_panel.h" +#include "dp_display.h" + +#define HEADER_BYTE_2_BIT 0 +#define PARITY_BYTE_2_BIT 8 +#define HEADER_BYTE_1_BIT 16 +#define PARITY_BYTE_1_BIT 24 +#define HEADER_BYTE_3_BIT 16 +#define PARITY_BYTE_3_BIT 24 + +struct dp_audio_private { + struct platform_device *audio_pdev; + struct platform_device *pdev; + struct drm_device *drm_dev; + struct dp_catalog *catalog; + struct dp_panel *panel; + + bool engine_on; + u32 channels; + + struct dp_audio dp_audio; +}; + +static u8 dp_audio_get_g0_value(u8 data) +{ + u8 c[4]; + u8 g[4]; + u8 ret_data = 0; + u8 i; + + for (i = 0; i < 4; i++) + c[i] = (data >> i) & 0x01; + + g[0] = c[3]; + g[1] = c[0] ^ c[3]; + g[2] = c[1]; + g[3] = c[2]; + + for (i = 0; i < 4; i++) + ret_data = ((g[i] & 0x01) << i) | ret_data; + + return ret_data; +} + +static u8 dp_audio_get_g1_value(u8 data) +{ + u8 c[4]; + u8 g[4]; + u8 ret_data = 0; + u8 i; + + for (i = 0; i < 4; i++) + c[i] = (data >> i) & 0x01; + + g[0] = c[0] ^ c[3]; + g[1] = c[0] ^ c[1] ^ c[3]; + g[2] = c[1] ^ c[2]; + g[3] = c[2] ^ c[3]; + + for (i = 0; i < 4; i++) + ret_data = ((g[i] & 0x01) << i) | ret_data; + + return ret_data; +} + +static u8 dp_audio_calculate_parity(u32 data) +{ + u8 x0 = 0; + u8 x1 = 0; + u8 ci = 0; + u8 iData = 0; + u8 i = 0; + u8 parity_byte; + u8 num_byte = (data & 0xFF00) > 0 ? 8 : 2; + + for (i = 0; i < num_byte; i++) { + iData = (data >> i*4) & 0xF; + + ci = iData ^ x1; + x1 = x0 ^ dp_audio_get_g1_value(ci); + x0 = dp_audio_get_g0_value(ci); + } + + parity_byte = x1 | (x0 << 4); + + return parity_byte; +} + +static u32 dp_audio_get_header(struct dp_catalog *catalog, + enum dp_catalog_audio_sdp_type sdp, + enum dp_catalog_audio_header_type header) +{ + catalog->sdp_type = sdp; + catalog->sdp_header = header; + dp_catalog_audio_get_header(catalog); + + return catalog->audio_data; +} + +static void dp_audio_set_header(struct dp_catalog *catalog, + u32 data, + enum dp_catalog_audio_sdp_type sdp, + enum dp_catalog_audio_header_type header) +{ + catalog->sdp_type = sdp; + catalog->sdp_header = header; + catalog->audio_data = data; + dp_catalog_audio_set_header(catalog); +} + +static void dp_audio_stream_sdp(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 value, new_value; + u8 parity_byte; + + /* Config header and parity byte 1 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_1); + + new_value = 0x02; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_1_BIT) + | (parity_byte << PARITY_BYTE_1_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 1: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_1); + + /* Config header and parity byte 2 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_2); + new_value = value; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_2_BIT) + | (parity_byte << PARITY_BYTE_2_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 2: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_2); + + /* Config header and parity byte 3 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_3); + + new_value = audio->channels - 1; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_3_BIT) + | (parity_byte << PARITY_BYTE_3_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 3: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_STREAM, DP_AUDIO_SDP_HEADER_3); +} + +static void dp_audio_timestamp_sdp(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 value, new_value; + u8 parity_byte; + + /* Config header and parity byte 1 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_1); + + new_value = 0x1; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_1_BIT) + | (parity_byte << PARITY_BYTE_1_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 1: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_1); + + /* Config header and parity byte 2 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_2); + + new_value = 0x17; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_2_BIT) + | (parity_byte << PARITY_BYTE_2_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 2: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_2); + + /* Config header and parity byte 3 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_3); + + new_value = (0x0 | (0x11 << 2)); + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_3_BIT) + | (parity_byte << PARITY_BYTE_3_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 3: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_TIMESTAMP, DP_AUDIO_SDP_HEADER_3); +} + +static void dp_audio_infoframe_sdp(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 value, new_value; + u8 parity_byte; + + /* Config header and parity byte 1 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_1); + + new_value = 0x84; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_1_BIT) + | (parity_byte << PARITY_BYTE_1_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 1: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_1); + + /* Config header and parity byte 2 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_2); + + new_value = 0x1b; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_2_BIT) + | (parity_byte << PARITY_BYTE_2_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 2: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_2); + + /* Config header and parity byte 3 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_3); + + new_value = (0x0 | (0x11 << 2)); + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_3_BIT) + | (parity_byte << PARITY_BYTE_3_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 3: value = 0x%x, parity_byte = 0x%x\n", + new_value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_INFOFRAME, DP_AUDIO_SDP_HEADER_3); +} + +static void dp_audio_copy_management_sdp(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 value, new_value; + u8 parity_byte; + + /* Config header and parity byte 1 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_1); + + new_value = 0x05; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_1_BIT) + | (parity_byte << PARITY_BYTE_1_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 1: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_1); + + /* Config header and parity byte 2 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_2); + + new_value = 0x0F; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_2_BIT) + | (parity_byte << PARITY_BYTE_2_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 2: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_2); + + /* Config header and parity byte 3 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_3); + + new_value = 0x0; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_3_BIT) + | (parity_byte << PARITY_BYTE_3_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 3: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_COPYMANAGEMENT, DP_AUDIO_SDP_HEADER_3); +} + +static void dp_audio_isrc_sdp(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 value, new_value; + u8 parity_byte; + + /* Config header and parity byte 1 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_ISRC, DP_AUDIO_SDP_HEADER_1); + + new_value = 0x06; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_1_BIT) + | (parity_byte << PARITY_BYTE_1_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 1: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_ISRC, DP_AUDIO_SDP_HEADER_1); + + /* Config header and parity byte 2 */ + value = dp_audio_get_header(catalog, + DP_AUDIO_SDP_ISRC, DP_AUDIO_SDP_HEADER_2); + + new_value = 0x0F; + parity_byte = dp_audio_calculate_parity(new_value); + value |= ((new_value << HEADER_BYTE_2_BIT) + | (parity_byte << PARITY_BYTE_2_BIT)); + drm_dbg_dp(audio->drm_dev, + "Header Byte 2: value = 0x%x, parity_byte = 0x%x\n", + value, parity_byte); + dp_audio_set_header(catalog, value, + DP_AUDIO_SDP_ISRC, DP_AUDIO_SDP_HEADER_2); +} + +static void dp_audio_setup_sdp(struct dp_audio_private *audio) +{ + dp_catalog_audio_config_sdp(audio->catalog); + + dp_audio_stream_sdp(audio); + dp_audio_timestamp_sdp(audio); + dp_audio_infoframe_sdp(audio); + dp_audio_copy_management_sdp(audio); + dp_audio_isrc_sdp(audio); +} + +static void dp_audio_setup_acr(struct dp_audio_private *audio) +{ + u32 select = 0; + struct dp_catalog *catalog = audio->catalog; + + switch (audio->dp_audio.bw_code) { + case DP_LINK_BW_1_62: + select = 0; + break; + case DP_LINK_BW_2_7: + select = 1; + break; + case DP_LINK_BW_5_4: + select = 2; + break; + case DP_LINK_BW_8_1: + select = 3; + break; + default: + drm_dbg_dp(audio->drm_dev, "Unknown link rate\n"); + select = 0; + break; + } + + catalog->audio_data = select; + dp_catalog_audio_config_acr(catalog); +} + +static void dp_audio_safe_to_exit_level(struct dp_audio_private *audio) +{ + struct dp_catalog *catalog = audio->catalog; + u32 safe_to_exit_level = 0; + + switch (audio->dp_audio.lane_count) { + case 1: + safe_to_exit_level = 14; + break; + case 2: + safe_to_exit_level = 8; + break; + case 4: + safe_to_exit_level = 5; + break; + default: + drm_dbg_dp(audio->drm_dev, + "setting the default safe_to_exit_level = %u\n", + safe_to_exit_level); + safe_to_exit_level = 14; + break; + } + + catalog->audio_data = safe_to_exit_level; + dp_catalog_audio_sfe_level(catalog); +} + +static void dp_audio_enable(struct dp_audio_private *audio, bool enable) +{ + struct dp_catalog *catalog = audio->catalog; + + catalog->audio_data = enable; + dp_catalog_audio_enable(catalog); + + audio->engine_on = enable; +} + +static struct dp_audio_private *dp_audio_get_data(struct platform_device *pdev) +{ + struct dp_audio *dp_audio; + struct msm_dp *dp_display; + + if (!pdev) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-ENODEV); + } + + dp_display = platform_get_drvdata(pdev); + if (!dp_display) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-ENODEV); + } + + dp_audio = dp_display->dp_audio; + + if (!dp_audio) { + DRM_ERROR("invalid dp_audio data\n"); + return ERR_PTR(-EINVAL); + } + + return container_of(dp_audio, struct dp_audio_private, dp_audio); +} + +static int dp_audio_hook_plugged_cb(struct device *dev, void *data, + hdmi_codec_plugged_cb fn, + struct device *codec_dev) +{ + + struct platform_device *pdev; + struct msm_dp *dp_display; + + pdev = to_platform_device(dev); + if (!pdev) { + pr_err("invalid input\n"); + return -ENODEV; + } + + dp_display = platform_get_drvdata(pdev); + if (!dp_display) { + pr_err("invalid input\n"); + return -ENODEV; + } + + return dp_display_set_plugged_cb(dp_display, fn, codec_dev); +} + +static int dp_audio_get_eld(struct device *dev, + void *data, uint8_t *buf, size_t len) +{ + struct platform_device *pdev; + struct msm_dp *dp_display; + + pdev = to_platform_device(dev); + + if (!pdev) { + DRM_ERROR("invalid input\n"); + return -ENODEV; + } + + dp_display = platform_get_drvdata(pdev); + if (!dp_display) { + DRM_ERROR("invalid input\n"); + return -ENODEV; + } + + memcpy(buf, dp_display->connector->eld, + min(sizeof(dp_display->connector->eld), len)); + + return 0; +} + +int dp_audio_hw_params(struct device *dev, + void *data, + struct hdmi_codec_daifmt *daifmt, + struct hdmi_codec_params *params) +{ + int rc = 0; + struct dp_audio_private *audio; + struct platform_device *pdev; + struct msm_dp *dp_display; + + pdev = to_platform_device(dev); + dp_display = platform_get_drvdata(pdev); + + /* + * there could be cases where sound card can be opened even + * before OR even when DP is not connected . This can cause + * unclocked access as the audio subsystem relies on the DP + * driver to maintain the correct state of clocks. To protect + * such cases check for connection status and bail out if not + * connected. + */ + if (!dp_display->power_on) { + rc = -EINVAL; + goto end; + } + + audio = dp_audio_get_data(pdev); + if (IS_ERR(audio)) { + rc = PTR_ERR(audio); + goto end; + } + + audio->channels = params->channels; + + dp_audio_setup_sdp(audio); + dp_audio_setup_acr(audio); + dp_audio_safe_to_exit_level(audio); + dp_audio_enable(audio, true); + dp_display_signal_audio_start(dp_display); + dp_display->audio_enabled = true; + +end: + return rc; +} + +static void dp_audio_shutdown(struct device *dev, void *data) +{ + struct dp_audio_private *audio; + struct platform_device *pdev; + struct msm_dp *dp_display; + + pdev = to_platform_device(dev); + dp_display = platform_get_drvdata(pdev); + audio = dp_audio_get_data(pdev); + if (IS_ERR(audio)) { + DRM_ERROR("failed to get audio data\n"); + return; + } + + /* + * if audio was not enabled there is no need + * to execute the shutdown and we can bail out early. + * This also makes sure that we dont cause an unclocked + * access when audio subsystem calls this without DP being + * connected. is_connected cannot be used here as its set + * to false earlier than this call + */ + if (!dp_display->audio_enabled) + return; + + dp_audio_enable(audio, false); + /* signal the dp display to safely shutdown clocks */ + dp_display_signal_audio_complete(dp_display); +} + +static const struct hdmi_codec_ops dp_audio_codec_ops = { + .hw_params = dp_audio_hw_params, + .audio_shutdown = dp_audio_shutdown, + .get_eld = dp_audio_get_eld, + .hook_plugged_cb = dp_audio_hook_plugged_cb, +}; + +static struct hdmi_codec_pdata codec_data = { + .ops = &dp_audio_codec_ops, + .max_i2s_channels = 8, + .i2s = 1, +}; + +void dp_unregister_audio_driver(struct device *dev, struct dp_audio *dp_audio) +{ + struct dp_audio_private *audio_priv; + + audio_priv = container_of(dp_audio, struct dp_audio_private, dp_audio); + + if (audio_priv->audio_pdev) { + platform_device_unregister(audio_priv->audio_pdev); + audio_priv->audio_pdev = NULL; + } +} + +int dp_register_audio_driver(struct device *dev, + struct dp_audio *dp_audio) +{ + struct dp_audio_private *audio_priv; + + audio_priv = container_of(dp_audio, + struct dp_audio_private, dp_audio); + + audio_priv->audio_pdev = platform_device_register_data(dev, + HDMI_CODEC_DRV_NAME, + PLATFORM_DEVID_AUTO, + &codec_data, + sizeof(codec_data)); + return PTR_ERR_OR_ZERO(audio_priv->audio_pdev); +} + +struct dp_audio *dp_audio_get(struct platform_device *pdev, + struct dp_panel *panel, + struct dp_catalog *catalog) +{ + int rc = 0; + struct dp_audio_private *audio; + struct dp_audio *dp_audio; + + if (!pdev || !panel || !catalog) { + DRM_ERROR("invalid input\n"); + rc = -EINVAL; + goto error; + } + + audio = devm_kzalloc(&pdev->dev, sizeof(*audio), GFP_KERNEL); + if (!audio) { + rc = -ENOMEM; + goto error; + } + + audio->pdev = pdev; + audio->panel = panel; + audio->catalog = catalog; + + dp_audio = &audio->dp_audio; + + dp_catalog_audio_init(catalog); + + return dp_audio; +error: + return ERR_PTR(rc); +} + +void dp_audio_put(struct dp_audio *dp_audio) +{ + struct dp_audio_private *audio; + + if (!dp_audio) + return; + + audio = container_of(dp_audio, struct dp_audio_private, dp_audio); + + devm_kfree(&audio->pdev->dev, audio); +} diff --git a/drivers/gpu/drm/msm/dp/dp_audio.h b/drivers/gpu/drm/msm/dp/dp_audio.h new file mode 100644 index 000000000..4ab78880a --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_audio.h @@ -0,0 +1,74 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_AUDIO_H_ +#define _DP_AUDIO_H_ + +#include <linux/platform_device.h> + +#include "dp_panel.h" +#include "dp_catalog.h" +#include <sound/hdmi-codec.h> + +/** + * struct dp_audio + * @lane_count: number of lanes configured in current session + * @bw_code: link rate's bandwidth code for current session + */ +struct dp_audio { + u32 lane_count; + u32 bw_code; +}; + +/** + * dp_audio_get() + * + * Creates and instance of dp audio. + * + * @pdev: caller's platform device instance. + * @panel: an instance of dp_panel module. + * @catalog: an instance of dp_catalog module. + * + * Returns the error code in case of failure, otherwize + * an instance of newly created dp_module. + */ +struct dp_audio *dp_audio_get(struct platform_device *pdev, + struct dp_panel *panel, + struct dp_catalog *catalog); + +/** + * dp_register_audio_driver() + * + * Registers DP device with hdmi_codec interface. + * + * @dev: DP device instance. + * @dp_audio: an instance of dp_audio module. + * + * + * Returns the error code in case of failure, otherwise + * zero on success. + */ +int dp_register_audio_driver(struct device *dev, + struct dp_audio *dp_audio); + +void dp_unregister_audio_driver(struct device *dev, struct dp_audio *dp_audio); + +/** + * dp_audio_put() + * + * Cleans the dp_audio instance. + * + * @dp_audio: an instance of dp_audio. + */ +void dp_audio_put(struct dp_audio *dp_audio); + +int dp_audio_hw_params(struct device *dev, + void *data, + struct hdmi_codec_daifmt *daifmt, + struct hdmi_codec_params *params); + +#endif /* _DP_AUDIO_H_ */ + + diff --git a/drivers/gpu/drm/msm/dp/dp_aux.c b/drivers/gpu/drm/msm/dp/dp_aux.c new file mode 100644 index 000000000..84f9e3e5f --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_aux.c @@ -0,0 +1,541 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#include <linux/delay.h> +#include <drm/drm_print.h> + +#include "dp_reg.h" +#include "dp_aux.h" + +enum msm_dp_aux_err { + DP_AUX_ERR_NONE, + DP_AUX_ERR_ADDR, + DP_AUX_ERR_TOUT, + DP_AUX_ERR_NACK, + DP_AUX_ERR_DEFER, + DP_AUX_ERR_NACK_DEFER, + DP_AUX_ERR_PHY, +}; + +struct dp_aux_private { + struct device *dev; + struct dp_catalog *catalog; + + struct mutex mutex; + struct completion comp; + + enum msm_dp_aux_err aux_error_num; + u32 retry_cnt; + bool cmd_busy; + bool native; + bool read; + bool no_send_addr; + bool no_send_stop; + bool initted; + bool is_edp; + u32 offset; + u32 segment; + + struct drm_dp_aux dp_aux; +}; + +#define MAX_AUX_RETRIES 5 + +static ssize_t dp_aux_write(struct dp_aux_private *aux, + struct drm_dp_aux_msg *msg) +{ + u8 data[4]; + u32 reg; + ssize_t len; + u8 *msgdata = msg->buffer; + int const AUX_CMD_FIFO_LEN = 128; + int i = 0; + + if (aux->read) + len = 0; + else + len = msg->size; + + /* + * cmd fifo only has depth of 144 bytes + * limit buf length to 128 bytes here + */ + if (len > AUX_CMD_FIFO_LEN - 4) { + DRM_ERROR("buf size greater than allowed size of 128 bytes\n"); + return -EINVAL; + } + + /* Pack cmd and write to HW */ + data[0] = (msg->address >> 16) & 0xf; /* addr[19:16] */ + if (aux->read) + data[0] |= BIT(4); /* R/W */ + + data[1] = msg->address >> 8; /* addr[15:8] */ + data[2] = msg->address; /* addr[7:0] */ + data[3] = msg->size - 1; /* len[7:0] */ + + for (i = 0; i < len + 4; i++) { + reg = (i < 4) ? data[i] : msgdata[i - 4]; + reg <<= DP_AUX_DATA_OFFSET; + reg &= DP_AUX_DATA_MASK; + reg |= DP_AUX_DATA_WRITE; + /* index = 0, write */ + if (i == 0) + reg |= DP_AUX_DATA_INDEX_WRITE; + aux->catalog->aux_data = reg; + dp_catalog_aux_write_data(aux->catalog); + } + + dp_catalog_aux_clear_trans(aux->catalog, false); + dp_catalog_aux_clear_hw_interrupts(aux->catalog); + + reg = 0; /* Transaction number == 1 */ + if (!aux->native) { /* i2c */ + reg |= DP_AUX_TRANS_CTRL_I2C; + + if (aux->no_send_addr) + reg |= DP_AUX_TRANS_CTRL_NO_SEND_ADDR; + + if (aux->no_send_stop) + reg |= DP_AUX_TRANS_CTRL_NO_SEND_STOP; + } + + reg |= DP_AUX_TRANS_CTRL_GO; + aux->catalog->aux_data = reg; + dp_catalog_aux_write_trans(aux->catalog); + + return len; +} + +static ssize_t dp_aux_cmd_fifo_tx(struct dp_aux_private *aux, + struct drm_dp_aux_msg *msg) +{ + ssize_t ret; + unsigned long time_left; + + reinit_completion(&aux->comp); + + ret = dp_aux_write(aux, msg); + if (ret < 0) + return ret; + + time_left = wait_for_completion_timeout(&aux->comp, + msecs_to_jiffies(250)); + if (!time_left) + return -ETIMEDOUT; + + return ret; +} + +static ssize_t dp_aux_cmd_fifo_rx(struct dp_aux_private *aux, + struct drm_dp_aux_msg *msg) +{ + u32 data; + u8 *dp; + u32 i, actual_i; + u32 len = msg->size; + + dp_catalog_aux_clear_trans(aux->catalog, true); + + data = DP_AUX_DATA_INDEX_WRITE; /* INDEX_WRITE */ + data |= DP_AUX_DATA_READ; /* read */ + + aux->catalog->aux_data = data; + dp_catalog_aux_write_data(aux->catalog); + + dp = msg->buffer; + + /* discard first byte */ + data = dp_catalog_aux_read_data(aux->catalog); + + for (i = 0; i < len; i++) { + data = dp_catalog_aux_read_data(aux->catalog); + *dp++ = (u8)((data >> DP_AUX_DATA_OFFSET) & 0xff); + + actual_i = (data >> DP_AUX_DATA_INDEX_OFFSET) & 0xFF; + if (i != actual_i) + break; + } + + return i; +} + +static void dp_aux_update_offset_and_segment(struct dp_aux_private *aux, + struct drm_dp_aux_msg *input_msg) +{ + u32 edid_address = 0x50; + u32 segment_address = 0x30; + bool i2c_read = input_msg->request & + (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); + u8 *data; + + if (aux->native || i2c_read || ((input_msg->address != edid_address) && + (input_msg->address != segment_address))) + return; + + + data = input_msg->buffer; + if (input_msg->address == segment_address) + aux->segment = *data; + else + aux->offset = *data; +} + +/** + * dp_aux_transfer_helper() - helper function for EDID read transactions + * + * @aux: DP AUX private structure + * @input_msg: input message from DRM upstream APIs + * @send_seg: send the segment to sink + * + * return: void + * + * This helper function is used to fix EDID reads for non-compliant + * sinks that do not handle the i2c middle-of-transaction flag correctly. + */ +static void dp_aux_transfer_helper(struct dp_aux_private *aux, + struct drm_dp_aux_msg *input_msg, + bool send_seg) +{ + struct drm_dp_aux_msg helper_msg; + u32 message_size = 0x10; + u32 segment_address = 0x30; + u32 const edid_block_length = 0x80; + bool i2c_mot = input_msg->request & DP_AUX_I2C_MOT; + bool i2c_read = input_msg->request & + (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); + + if (!i2c_mot || !i2c_read || (input_msg->size == 0)) + return; + + /* + * Sending the segment value and EDID offset will be performed + * from the DRM upstream EDID driver for each block. Avoid + * duplicate AUX transactions related to this while reading the + * first 16 bytes of each block. + */ + if (!(aux->offset % edid_block_length) || !send_seg) + goto end; + + aux->read = false; + aux->cmd_busy = true; + aux->no_send_addr = true; + aux->no_send_stop = true; + + /* + * Send the segment address for every i2c read in which the + * middle-of-tranaction flag is set. This is required to support EDID + * reads of more than 2 blocks as the segment address is reset to 0 + * since we are overriding the middle-of-transaction flag for read + * transactions. + */ + + if (aux->segment) { + memset(&helper_msg, 0, sizeof(helper_msg)); + helper_msg.address = segment_address; + helper_msg.buffer = &aux->segment; + helper_msg.size = 1; + dp_aux_cmd_fifo_tx(aux, &helper_msg); + } + + /* + * Send the offset address for every i2c read in which the + * middle-of-transaction flag is set. This will ensure that the sink + * will update its read pointer and return the correct portion of the + * EDID buffer in the subsequent i2c read trasntion triggered in the + * native AUX transfer function. + */ + memset(&helper_msg, 0, sizeof(helper_msg)); + helper_msg.address = input_msg->address; + helper_msg.buffer = &aux->offset; + helper_msg.size = 1; + dp_aux_cmd_fifo_tx(aux, &helper_msg); + +end: + aux->offset += message_size; + if (aux->offset == 0x80 || aux->offset == 0x100) + aux->segment = 0x0; /* reset segment at end of block */ +} + +/* + * This function does the real job to process an AUX transaction. + * It will call aux_reset() function to reset the AUX channel, + * if the waiting is timeout. + */ +static ssize_t dp_aux_transfer(struct drm_dp_aux *dp_aux, + struct drm_dp_aux_msg *msg) +{ + ssize_t ret; + int const aux_cmd_native_max = 16; + int const aux_cmd_i2c_max = 128; + struct dp_aux_private *aux; + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + aux->native = msg->request & (DP_AUX_NATIVE_WRITE & DP_AUX_NATIVE_READ); + + /* Ignore address only message */ + if (msg->size == 0 || !msg->buffer) { + msg->reply = aux->native ? + DP_AUX_NATIVE_REPLY_ACK : DP_AUX_I2C_REPLY_ACK; + return msg->size; + } + + /* msg sanity check */ + if ((aux->native && msg->size > aux_cmd_native_max) || + msg->size > aux_cmd_i2c_max) { + DRM_ERROR("%s: invalid msg: size(%zu), request(%x)\n", + __func__, msg->size, msg->request); + return -EINVAL; + } + + mutex_lock(&aux->mutex); + if (!aux->initted) { + ret = -EIO; + goto exit; + } + + /* + * For eDP it's important to give a reasonably long wait here for HPD + * to be asserted. This is because the panel driver may have _just_ + * turned on the panel and then tried to do an AUX transfer. The panel + * driver has no way of knowing when the panel is ready, so it's up + * to us to wait. For DP we never get into this situation so let's + * avoid ever doing the extra long wait for DP. + */ + if (aux->is_edp) { + ret = dp_catalog_aux_wait_for_hpd_connect_state(aux->catalog); + if (ret) { + DRM_DEBUG_DP("Panel not ready for aux transactions\n"); + goto exit; + } + } + + dp_aux_update_offset_and_segment(aux, msg); + dp_aux_transfer_helper(aux, msg, true); + + aux->read = msg->request & (DP_AUX_I2C_READ & DP_AUX_NATIVE_READ); + aux->cmd_busy = true; + + if (aux->read) { + aux->no_send_addr = true; + aux->no_send_stop = false; + } else { + aux->no_send_addr = true; + aux->no_send_stop = true; + } + + ret = dp_aux_cmd_fifo_tx(aux, msg); + if (ret < 0) { + if (aux->native) { + aux->retry_cnt++; + if (!(aux->retry_cnt % MAX_AUX_RETRIES)) + dp_catalog_aux_update_cfg(aux->catalog); + } + /* reset aux if link is in connected state */ + if (dp_catalog_link_is_connected(aux->catalog)) + dp_catalog_aux_reset(aux->catalog); + } else { + aux->retry_cnt = 0; + switch (aux->aux_error_num) { + case DP_AUX_ERR_NONE: + if (aux->read) + ret = dp_aux_cmd_fifo_rx(aux, msg); + msg->reply = aux->native ? DP_AUX_NATIVE_REPLY_ACK : DP_AUX_I2C_REPLY_ACK; + break; + case DP_AUX_ERR_DEFER: + msg->reply = aux->native ? DP_AUX_NATIVE_REPLY_DEFER : DP_AUX_I2C_REPLY_DEFER; + break; + case DP_AUX_ERR_PHY: + case DP_AUX_ERR_ADDR: + case DP_AUX_ERR_NACK: + case DP_AUX_ERR_NACK_DEFER: + msg->reply = aux->native ? DP_AUX_NATIVE_REPLY_NACK : DP_AUX_I2C_REPLY_NACK; + break; + case DP_AUX_ERR_TOUT: + ret = -ETIMEDOUT; + break; + } + } + + aux->cmd_busy = false; + +exit: + mutex_unlock(&aux->mutex); + + return ret; +} + +void dp_aux_isr(struct drm_dp_aux *dp_aux) +{ + u32 isr; + struct dp_aux_private *aux; + + if (!dp_aux) { + DRM_ERROR("invalid input\n"); + return; + } + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + isr = dp_catalog_aux_get_irq(aux->catalog); + + /* no interrupts pending, return immediately */ + if (!isr) + return; + + if (!aux->cmd_busy) { + DRM_ERROR("Unexpected DP AUX IRQ %#010x when not busy\n", isr); + return; + } + + /* + * The logic below assumes only one error bit is set (other than "done" + * which can apparently be set at the same time as some of the other + * bits). Warn if more than one get set so we know we need to improve + * the logic. + */ + if (hweight32(isr & ~DP_INTR_AUX_XFER_DONE) > 1) + DRM_WARN("Some DP AUX interrupts unhandled: %#010x\n", isr); + + if (isr & DP_INTR_AUX_ERROR) { + aux->aux_error_num = DP_AUX_ERR_PHY; + dp_catalog_aux_clear_hw_interrupts(aux->catalog); + } else if (isr & DP_INTR_NACK_DEFER) { + aux->aux_error_num = DP_AUX_ERR_NACK_DEFER; + } else if (isr & DP_INTR_WRONG_ADDR) { + aux->aux_error_num = DP_AUX_ERR_ADDR; + } else if (isr & DP_INTR_TIMEOUT) { + aux->aux_error_num = DP_AUX_ERR_TOUT; + } else if (!aux->native && (isr & DP_INTR_I2C_NACK)) { + aux->aux_error_num = DP_AUX_ERR_NACK; + } else if (!aux->native && (isr & DP_INTR_I2C_DEFER)) { + if (isr & DP_INTR_AUX_XFER_DONE) + aux->aux_error_num = DP_AUX_ERR_NACK; + else + aux->aux_error_num = DP_AUX_ERR_DEFER; + } else if (isr & DP_INTR_AUX_XFER_DONE) { + aux->aux_error_num = DP_AUX_ERR_NONE; + } else { + DRM_WARN("Unexpected interrupt: %#010x\n", isr); + return; + } + + complete(&aux->comp); +} + +void dp_aux_reconfig(struct drm_dp_aux *dp_aux) +{ + struct dp_aux_private *aux; + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + dp_catalog_aux_update_cfg(aux->catalog); + dp_catalog_aux_reset(aux->catalog); +} + +void dp_aux_init(struct drm_dp_aux *dp_aux) +{ + struct dp_aux_private *aux; + + if (!dp_aux) { + DRM_ERROR("invalid input\n"); + return; + } + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + mutex_lock(&aux->mutex); + + dp_catalog_aux_enable(aux->catalog, true); + aux->retry_cnt = 0; + aux->initted = true; + + mutex_unlock(&aux->mutex); +} + +void dp_aux_deinit(struct drm_dp_aux *dp_aux) +{ + struct dp_aux_private *aux; + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + mutex_lock(&aux->mutex); + + aux->initted = false; + dp_catalog_aux_enable(aux->catalog, false); + + mutex_unlock(&aux->mutex); +} + +int dp_aux_register(struct drm_dp_aux *dp_aux) +{ + struct dp_aux_private *aux; + int ret; + + if (!dp_aux) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + aux->dp_aux.name = "dpu_dp_aux"; + aux->dp_aux.dev = aux->dev; + aux->dp_aux.transfer = dp_aux_transfer; + ret = drm_dp_aux_register(&aux->dp_aux); + if (ret) { + DRM_ERROR("%s: failed to register drm aux: %d\n", __func__, + ret); + return ret; + } + + return 0; +} + +void dp_aux_unregister(struct drm_dp_aux *dp_aux) +{ + drm_dp_aux_unregister(dp_aux); +} + +struct drm_dp_aux *dp_aux_get(struct device *dev, struct dp_catalog *catalog, + bool is_edp) +{ + struct dp_aux_private *aux; + + if (!catalog) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-ENODEV); + } + + aux = devm_kzalloc(dev, sizeof(*aux), GFP_KERNEL); + if (!aux) + return ERR_PTR(-ENOMEM); + + init_completion(&aux->comp); + aux->cmd_busy = false; + aux->is_edp = is_edp; + mutex_init(&aux->mutex); + + aux->dev = dev; + aux->catalog = catalog; + aux->retry_cnt = 0; + + return &aux->dp_aux; +} + +void dp_aux_put(struct drm_dp_aux *dp_aux) +{ + struct dp_aux_private *aux; + + if (!dp_aux) + return; + + aux = container_of(dp_aux, struct dp_aux_private, dp_aux); + + mutex_destroy(&aux->mutex); + + devm_kfree(aux->dev, aux); +} diff --git a/drivers/gpu/drm/msm/dp/dp_aux.h b/drivers/gpu/drm/msm/dp/dp_aux.h new file mode 100644 index 000000000..e930974bc --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_aux.h @@ -0,0 +1,23 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_AUX_H_ +#define _DP_AUX_H_ + +#include "dp_catalog.h" +#include <drm/display/drm_dp_helper.h> + +int dp_aux_register(struct drm_dp_aux *dp_aux); +void dp_aux_unregister(struct drm_dp_aux *dp_aux); +void dp_aux_isr(struct drm_dp_aux *dp_aux); +void dp_aux_init(struct drm_dp_aux *dp_aux); +void dp_aux_deinit(struct drm_dp_aux *dp_aux); +void dp_aux_reconfig(struct drm_dp_aux *dp_aux); + +struct drm_dp_aux *dp_aux_get(struct device *dev, struct dp_catalog *catalog, + bool is_edp); +void dp_aux_put(struct drm_dp_aux *aux); + +#endif /*__DP_AUX_H_*/ diff --git a/drivers/gpu/drm/msm/dp/dp_catalog.c b/drivers/gpu/drm/msm/dp/dp_catalog.c new file mode 100644 index 000000000..421391755 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_catalog.c @@ -0,0 +1,1096 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <linux/delay.h> +#include <linux/iopoll.h> +#include <linux/phy/phy.h> +#include <linux/phy/phy-dp.h> +#include <linux/rational.h> +#include <drm/display/drm_dp_helper.h> +#include <drm/drm_print.h> + +#include "dp_catalog.h" +#include "dp_reg.h" + +#define POLLING_SLEEP_US 1000 +#define POLLING_TIMEOUT_US 10000 + +#define SCRAMBLER_RESET_COUNT_VALUE 0xFC + +#define DP_INTERRUPT_STATUS_ACK_SHIFT 1 +#define DP_INTERRUPT_STATUS_MASK_SHIFT 2 + +#define DP_INTF_CONFIG_DATABUS_WIDEN BIT(4) + +#define DP_INTERRUPT_STATUS1 \ + (DP_INTR_AUX_XFER_DONE| \ + DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \ + DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \ + DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \ + DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR) + +#define DP_INTERRUPT_STATUS1_ACK \ + (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT) +#define DP_INTERRUPT_STATUS1_MASK \ + (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT) + +#define DP_INTERRUPT_STATUS2 \ + (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \ + DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED) + +#define DP_INTERRUPT_STATUS2_ACK \ + (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT) +#define DP_INTERRUPT_STATUS2_MASK \ + (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT) + +struct dp_catalog_private { + struct device *dev; + struct drm_device *drm_dev; + struct dp_io *io; + u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX]; + struct dp_catalog dp_catalog; + u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX]; +}; + +void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + struct dss_io_data *dss = &catalog->io->dp_controller; + + msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb"); + msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux"); + msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link"); + msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0"); +} + +static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset) +{ + return readl_relaxed(catalog->io->dp_controller.aux.base + offset); +} + +static inline void dp_write_aux(struct dp_catalog_private *catalog, + u32 offset, u32 data) +{ + /* + * To make sure aux reg writes happens before any other operation, + * this function uses writel() instread of writel_relaxed() + */ + writel(data, catalog->io->dp_controller.aux.base + offset); +} + +static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset) +{ + return readl_relaxed(catalog->io->dp_controller.ahb.base + offset); +} + +static inline void dp_write_ahb(struct dp_catalog_private *catalog, + u32 offset, u32 data) +{ + /* + * To make sure phy reg writes happens before any other operation, + * this function uses writel() instread of writel_relaxed() + */ + writel(data, catalog->io->dp_controller.ahb.base + offset); +} + +static inline void dp_write_p0(struct dp_catalog_private *catalog, + u32 offset, u32 data) +{ + /* + * To make sure interface reg writes happens before any other operation, + * this function uses writel() instread of writel_relaxed() + */ + writel(data, catalog->io->dp_controller.p0.base + offset); +} + +static inline u32 dp_read_p0(struct dp_catalog_private *catalog, + u32 offset) +{ + /* + * To make sure interface reg writes happens before any other operation, + * this function uses writel() instread of writel_relaxed() + */ + return readl_relaxed(catalog->io->dp_controller.p0.base + offset); +} + +static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset) +{ + return readl_relaxed(catalog->io->dp_controller.link.base + offset); +} + +static inline void dp_write_link(struct dp_catalog_private *catalog, + u32 offset, u32 data) +{ + /* + * To make sure link reg writes happens before any other operation, + * this function uses writel() instread of writel_relaxed() + */ + writel(data, catalog->io->dp_controller.link.base + offset); +} + +/* aux related catalog functions */ +u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + return dp_read_aux(catalog, REG_DP_AUX_DATA); +} + +int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data); + return 0; +} + +int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data); + return 0; +} + +int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read) +{ + u32 data; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + if (read) { + data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL); + data &= ~DP_AUX_TRANS_CTRL_GO; + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data); + } else { + dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0); + } + return 0; +} + +int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS); + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f); + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f); + dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0); + return 0; +} + +/** + * dp_catalog_aux_reset() - reset AUX controller + * + * @dp_catalog: DP catalog structure + * + * return: void + * + * This function reset AUX controller + * + * NOTE: reset AUX controller will also clear any pending HPD related interrupts + * + */ +void dp_catalog_aux_reset(struct dp_catalog *dp_catalog) +{ + u32 aux_ctrl; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL); + + aux_ctrl |= DP_AUX_CTRL_RESET; + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); + usleep_range(1000, 1100); /* h/w recommended delay */ + + aux_ctrl &= ~DP_AUX_CTRL_RESET; + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); +} + +void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable) +{ + u32 aux_ctrl; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL); + + if (enable) { + dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff); + dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff); + aux_ctrl |= DP_AUX_CTRL_ENABLE; + } else { + aux_ctrl &= ~DP_AUX_CTRL_ENABLE; + } + + dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl); +} + +void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + struct dp_io *dp_io = catalog->io; + struct phy *phy = dp_io->phy; + + phy_calibrate(phy); +} + +int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog) +{ + u32 state; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + /* poll for hpd connected status every 2ms and timeout after 500ms */ + return readl_poll_timeout(catalog->io->dp_controller.aux.base + + REG_DP_DP_HPD_INT_STATUS, + state, state & DP_DP_HPD_STATE_STATUS_CONNECTED, + 2000, 500000); +} + +static void dump_regs(void __iomem *base, int len) +{ + int i; + u32 x0, x4, x8, xc; + u32 addr_off = 0; + + len = DIV_ROUND_UP(len, 16); + for (i = 0; i < len; i++) { + x0 = readl_relaxed(base + addr_off); + x4 = readl_relaxed(base + addr_off + 0x04); + x8 = readl_relaxed(base + addr_off + 0x08); + xc = readl_relaxed(base + addr_off + 0x0c); + + pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc); + addr_off += 16; + } +} + +void dp_catalog_dump_regs(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + struct dss_io_data *io = &catalog->io->dp_controller; + + pr_info("AHB regs\n"); + dump_regs(io->ahb.base, io->ahb.len); + + pr_info("AUXCLK regs\n"); + dump_regs(io->aux.base, io->aux.len); + + pr_info("LCLK regs\n"); + dump_regs(io->link.base, io->link.len); + + pr_info("P0CLK regs\n"); + dump_regs(io->p0.base, io->p0.len); +} + +u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 intr, intr_ack; + + intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS); + intr &= ~DP_INTERRUPT_STATUS1_MASK; + intr_ack = (intr & DP_INTERRUPT_STATUS1) + << DP_INTERRUPT_STATUS_ACK_SHIFT; + dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack | + DP_INTERRUPT_STATUS1_MASK); + + return intr; + +} + +/* controller related catalog functions */ +void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog, + u32 dp_tu, u32 valid_boundary, + u32 valid_boundary2) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary); + dp_write_link(catalog, REG_DP_TU, dp_tu); + dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2); +} + +void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_link(catalog, REG_DP_STATE_CTRL, state); +} + +void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg); + + dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg); +} + +void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */ + u32 ln_mapping; + + ln_mapping = ln_0 << LANE0_MAPPING_SHIFT; + ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT; + ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT; + ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT; + + dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING, + ln_mapping); +} + +void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, + bool enable) +{ + u32 mainlink_ctrl; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable); + if (enable) { + /* + * To make sure link reg writes happens before other operation, + * dp_write_link() function uses writel() + */ + mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); + + mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET | + DP_MAINLINK_CTRL_ENABLE); + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); + + mainlink_ctrl |= DP_MAINLINK_CTRL_RESET; + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); + + mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET; + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); + + mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE | + DP_MAINLINK_FB_BOUNDARY_SEL); + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); + } else { + mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); + mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE; + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl); + } +} + +void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog, + u32 colorimetry_cfg, + u32 test_bits_depth) +{ + u32 misc_val; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0); + + /* clear bpp bits */ + misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT); + misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT; + misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT; + /* Configure clock to synchronous mode */ + misc_val |= DP_MISC0_SYNCHRONOUS_CLK; + + drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val); + dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val); +} + +void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog, + u32 rate, u32 stream_rate_khz, + bool fixed_nvid) +{ + u32 pixel_m, pixel_n; + u32 mvid, nvid, pixel_div = 0, dispcc_input_rate; + u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE; + u32 const link_rate_hbr2 = 540000; + u32 const link_rate_hbr3 = 810000; + unsigned long den, num; + + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + if (rate == link_rate_hbr3) + pixel_div = 6; + else if (rate == 162000 || rate == 270000) + pixel_div = 2; + else if (rate == link_rate_hbr2) + pixel_div = 4; + else + DRM_ERROR("Invalid pixel mux divider\n"); + + dispcc_input_rate = (rate * 10) / pixel_div; + + rational_best_approximation(dispcc_input_rate, stream_rate_khz, + (unsigned long)(1 << 16) - 1, + (unsigned long)(1 << 16) - 1, &den, &num); + + den = ~(den - num); + den = den & 0xFFFF; + pixel_m = num; + pixel_n = den; + + mvid = (pixel_m & 0xFFFF) * 5; + nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF); + + if (nvid < nvid_fixed) { + u32 temp; + + temp = (nvid_fixed / nvid) * nvid; + mvid = (nvid_fixed / nvid) * mvid; + nvid = temp; + } + + if (link_rate_hbr2 == rate) + nvid *= 2; + + if (link_rate_hbr3 == rate) + nvid *= 3; + + drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid); + dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid); + dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid); + dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0); +} + +int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog, + u32 state_bit) +{ + int bit, ret; + u32 data; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + bit = BIT(state_bit - 1); + drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit); + dp_catalog_ctrl_state_ctrl(dp_catalog, bit); + + bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT; + + /* Poll for mainlink ready status */ + ret = readx_poll_timeout(readl, catalog->io->dp_controller.link.base + + REG_DP_MAINLINK_READY, + data, data & bit, + POLLING_SLEEP_US, POLLING_TIMEOUT_US); + if (ret < 0) { + DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit); + return ret; + } + return 0; +} + +/** + * dp_catalog_hw_revision() - retrieve DP hw revision + * + * @dp_catalog: DP catalog structure + * + * Return: DP controller hw revision + * + */ +u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog) +{ + const struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + return dp_read_ahb(catalog, REG_DP_HW_VERSION); +} + +/** + * dp_catalog_ctrl_reset() - reset DP controller + * + * @dp_catalog: DP catalog structure + * + * return: void + * + * This function reset the DP controller + * + * NOTE: reset DP controller will also clear any pending HPD related interrupts + * + */ +void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog) +{ + u32 sw_reset; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET); + + sw_reset |= DP_SW_RESET; + dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset); + usleep_range(1000, 1100); /* h/w recommended delay */ + + sw_reset &= ~DP_SW_RESET; + dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset); +} + +bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog) +{ + u32 data; + int ret; + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + /* Poll for mainlink ready status */ + ret = readl_poll_timeout(catalog->io->dp_controller.link.base + + REG_DP_MAINLINK_READY, + data, data & DP_MAINLINK_READY_FOR_VIDEO, + POLLING_SLEEP_US, POLLING_TIMEOUT_US); + if (ret < 0) { + DRM_ERROR("mainlink not ready\n"); + return false; + } + + return true; +} + +void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog, + bool enable) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + if (enable) { + dp_write_ahb(catalog, REG_DP_INTR_STATUS, + DP_INTERRUPT_STATUS1_MASK); + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, + DP_INTERRUPT_STATUS2_MASK); + } else { + dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00); + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00); + } +} + +void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog, + u32 intr_mask, bool en) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK); + + config = (en ? config | intr_mask : config & ~intr_mask); + + drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n", + intr_mask, config); + dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK, + config & DP_DP_HPD_INT_MASK); +} + +void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER); + + /* Configure REFTIMER and enable it */ + reftimer |= DP_DP_HPD_REFTIMER_ENABLE; + dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer); + + /* Enable HPD */ + dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN); +} + +u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 status; + + status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS); + drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status); + status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT; + status &= DP_DP_HPD_STATE_STATUS_BITS_MASK; + + return status; +} + +u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + int isr, mask; + + isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS); + dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK, + (isr & DP_DP_HPD_INT_MASK)); + mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK); + + /* + * We only want to return interrupts that are unmasked to the caller. + * However, the interrupt status field also contains other + * informational bits about the HPD state status, so we only mask + * out the part of the register that tells us about which interrupts + * are pending. + */ + return isr & (mask | ~DP_DP_HPD_INT_MASK); +} + +int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 intr, intr_ack; + + intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2); + intr &= ~DP_INTERRUPT_STATUS2_MASK; + intr_ack = (intr & DP_INTERRUPT_STATUS2) + << DP_INTERRUPT_STATUS_ACK_SHIFT; + dp_write_ahb(catalog, REG_DP_INTR_STATUS2, + intr_ack | DP_INTERRUPT_STATUS2_MASK); + + return intr; +} + +void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_ahb(catalog, REG_DP_PHY_CTRL, + DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL); + usleep_range(1000, 1100); /* h/w recommended delay */ + dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0); +} + +int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, + u8 v_level, u8 p_level) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + struct dp_io *dp_io = catalog->io; + struct phy *phy = dp_io->phy; + struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp; + + /* TODO: Update for all lanes instead of just first one */ + opts_dp->voltage[0] = v_level; + opts_dp->pre[0] = p_level; + opts_dp->set_voltages = 1; + phy_configure(phy, &dp_io->phy_opts); + opts_dp->set_voltages = 0; + + return 0; +} + +void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog, + u32 pattern) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 value = 0x0; + + /* Make sure to clear the current pattern before starting a new one */ + dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0); + + drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern); + switch (pattern) { + case DP_PHY_TEST_PATTERN_D10_2: + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_TRAINING_PATTERN1); + break; + case DP_PHY_TEST_PATTERN_ERROR_COUNT: + value &= ~(1 << 16); + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, + value); + value |= SCRAMBLER_RESET_COUNT_VALUE; + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, + value); + dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, + DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2); + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE); + break; + case DP_PHY_TEST_PATTERN_PRBS7: + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_PRBS7); + break; + case DP_PHY_TEST_PATTERN_80BIT_CUSTOM: + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN); + /* 00111110000011111000001111100000 */ + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0, + 0x3E0F83E0); + /* 00001111100000111110000011111000 */ + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1, + 0x0F83E0F8); + /* 1111100000111110 */ + dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2, + 0x0000F83E); + break; + case DP_PHY_TEST_PATTERN_CP2520: + value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); + value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER; + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value); + + value = DP_HBR2_ERM_PATTERN; + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, + value); + value |= SCRAMBLER_RESET_COUNT_VALUE; + dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET, + value); + dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, + DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2); + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE); + value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL); + value |= DP_MAINLINK_CTRL_ENABLE; + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value); + break; + case DP_PHY_TEST_PATTERN_SEL_MASK: + dp_write_link(catalog, REG_DP_MAINLINK_CTRL, + DP_MAINLINK_CTRL_ENABLE); + dp_write_link(catalog, REG_DP_STATE_CTRL, + DP_STATE_CTRL_LINK_TRAINING_PATTERN4); + break; + default: + drm_dbg_dp(catalog->drm_dev, + "No valid test pattern requested: %#x\n", pattern); + break; + } +} + +u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + return dp_read_link(catalog, REG_DP_MAINLINK_READY); +} + +/* panel related catalog functions */ +int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 reg; + + dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, + dp_catalog->total); + dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, + dp_catalog->sync_start); + dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, + dp_catalog->width_blanking); + dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active); + + reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG); + + if (dp_catalog->wide_bus_en) + reg |= DP_INTF_CONFIG_DATABUS_WIDEN; + else + reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN; + + + DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg); + + dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg); + return 0; +} + +void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog, + struct drm_display_mode *drm_mode) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + u32 hsync_period, vsync_period; + u32 display_v_start, display_v_end; + u32 hsync_start_x, hsync_end_x; + u32 v_sync_width; + u32 hsync_ctl; + u32 display_hctl; + + /* TPG config parameters*/ + hsync_period = drm_mode->htotal; + vsync_period = drm_mode->vtotal; + + display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) * + hsync_period); + display_v_end = ((vsync_period - (drm_mode->vsync_start - + drm_mode->vdisplay)) + * hsync_period) - 1; + + display_v_start += drm_mode->htotal - drm_mode->hsync_start; + display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay); + + hsync_start_x = drm_mode->htotal - drm_mode->hsync_start; + hsync_end_x = hsync_period - (drm_mode->hsync_start - + drm_mode->hdisplay) - 1; + + v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start; + + hsync_ctl = (hsync_period << 16) | + (drm_mode->hsync_end - drm_mode->hsync_start); + display_hctl = (hsync_end_x << 16) | hsync_start_x; + + + dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0); + dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl); + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period * + hsync_period); + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width * + hsync_period); + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl); + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0); + dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start); + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end); + dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0); + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0); + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0); + dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0); + + dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, + DP_TPG_CHECKERED_RECT_PATTERN); + dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG, + DP_TPG_VIDEO_CONFIG_BPP_8BIT | + DP_TPG_VIDEO_CONFIG_RGB); + dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, + DP_BIST_ENABLE_DPBIST_EN); + dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, + DP_TIMING_ENGINE_EN_EN); + drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__); +} + +void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0); + dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0); + dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0); +} + +struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io) +{ + struct dp_catalog_private *catalog; + + if (!io) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-EINVAL); + } + + catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL); + if (!catalog) + return ERR_PTR(-ENOMEM); + + catalog->dev = dev; + catalog->io = io; + + return &catalog->dp_catalog; +} + +void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX]; + enum dp_catalog_audio_sdp_type sdp; + enum dp_catalog_audio_header_type header; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + sdp_map = catalog->audio_map; + sdp = dp_catalog->sdp_type; + header = dp_catalog->sdp_header; + + dp_catalog->audio_data = dp_read_link(catalog, + sdp_map[sdp][header]); +} + +void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX]; + enum dp_catalog_audio_sdp_type sdp; + enum dp_catalog_audio_header_type header; + u32 data; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + sdp_map = catalog->audio_map; + sdp = dp_catalog->sdp_type; + header = dp_catalog->sdp_header; + data = dp_catalog->audio_data; + + dp_write_link(catalog, sdp_map[sdp][header], data); +} + +void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + u32 acr_ctrl, select; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + select = dp_catalog->audio_data; + acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14); + + drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n", + select, acr_ctrl); + + dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl); +} + +void dp_catalog_audio_enable(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + bool enable; + u32 audio_ctrl; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + enable = !!dp_catalog->audio_data; + audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG); + + if (enable) + audio_ctrl |= BIT(0); + else + audio_ctrl &= ~BIT(0); + + drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl); + + dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl); + /* make sure audio engine is disabled */ + wmb(); +} + +void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + u32 sdp_cfg = 0; + u32 sdp_cfg2 = 0; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG); + /* AUDIO_TIMESTAMP_SDP_EN */ + sdp_cfg |= BIT(1); + /* AUDIO_STREAM_SDP_EN */ + sdp_cfg |= BIT(2); + /* AUDIO_COPY_MANAGEMENT_SDP_EN */ + sdp_cfg |= BIT(5); + /* AUDIO_ISRC_SDP_EN */ + sdp_cfg |= BIT(6); + /* AUDIO_INFOFRAME_SDP_EN */ + sdp_cfg |= BIT(20); + + drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg); + + dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg); + + sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2); + /* IFRM_REGSRC -> Do not use reg values */ + sdp_cfg2 &= ~BIT(0); + /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */ + sdp_cfg2 &= ~BIT(1); + + drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2); + + dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2); +} + +void dp_catalog_audio_init(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + + static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = { + { + MMSS_DP_AUDIO_STREAM_0, + MMSS_DP_AUDIO_STREAM_1, + MMSS_DP_AUDIO_STREAM_1, + }, + { + MMSS_DP_AUDIO_TIMESTAMP_0, + MMSS_DP_AUDIO_TIMESTAMP_1, + MMSS_DP_AUDIO_TIMESTAMP_1, + }, + { + MMSS_DP_AUDIO_INFOFRAME_0, + MMSS_DP_AUDIO_INFOFRAME_1, + MMSS_DP_AUDIO_INFOFRAME_1, + }, + { + MMSS_DP_AUDIO_COPYMANAGEMENT_0, + MMSS_DP_AUDIO_COPYMANAGEMENT_1, + MMSS_DP_AUDIO_COPYMANAGEMENT_1, + }, + { + MMSS_DP_AUDIO_ISRC_0, + MMSS_DP_AUDIO_ISRC_1, + MMSS_DP_AUDIO_ISRC_1, + }, + }; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + catalog->audio_map = sdp_map; +} + +void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog) +{ + struct dp_catalog_private *catalog; + u32 mainlink_levels, safe_to_exit_level; + + if (!dp_catalog) + return; + + catalog = container_of(dp_catalog, + struct dp_catalog_private, dp_catalog); + + safe_to_exit_level = dp_catalog->audio_data; + mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS); + mainlink_levels &= 0xFE0; + mainlink_levels |= safe_to_exit_level; + + drm_dbg_dp(catalog->drm_dev, + "mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n", + mainlink_levels, safe_to_exit_level); + + dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels); +} diff --git a/drivers/gpu/drm/msm/dp/dp_catalog.h b/drivers/gpu/drm/msm/dp/dp_catalog.h new file mode 100644 index 000000000..f36b7b372 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_catalog.h @@ -0,0 +1,138 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_CATALOG_H_ +#define _DP_CATALOG_H_ + +#include <drm/drm_modes.h> + +#include "dp_parser.h" +#include "disp/msm_disp_snapshot.h" + +/* interrupts */ +#define DP_INTR_HPD BIT(0) +#define DP_INTR_AUX_XFER_DONE BIT(3) +#define DP_INTR_WRONG_ADDR BIT(6) +#define DP_INTR_TIMEOUT BIT(9) +#define DP_INTR_NACK_DEFER BIT(12) +#define DP_INTR_WRONG_DATA_CNT BIT(15) +#define DP_INTR_I2C_NACK BIT(18) +#define DP_INTR_I2C_DEFER BIT(21) +#define DP_INTR_PLL_UNLOCKED BIT(24) +#define DP_INTR_AUX_ERROR BIT(27) + +#define DP_INTR_READY_FOR_VIDEO BIT(0) +#define DP_INTR_IDLE_PATTERN_SENT BIT(3) +#define DP_INTR_FRAME_END BIT(6) +#define DP_INTR_CRC_UPDATED BIT(9) + +#define DP_AUX_CFG_MAX_VALUE_CNT 3 + +/* PHY AUX config registers */ +enum dp_phy_aux_config_type { + PHY_AUX_CFG0, + PHY_AUX_CFG1, + PHY_AUX_CFG2, + PHY_AUX_CFG3, + PHY_AUX_CFG4, + PHY_AUX_CFG5, + PHY_AUX_CFG6, + PHY_AUX_CFG7, + PHY_AUX_CFG8, + PHY_AUX_CFG9, + PHY_AUX_CFG_MAX, +}; + +enum dp_catalog_audio_sdp_type { + DP_AUDIO_SDP_STREAM, + DP_AUDIO_SDP_TIMESTAMP, + DP_AUDIO_SDP_INFOFRAME, + DP_AUDIO_SDP_COPYMANAGEMENT, + DP_AUDIO_SDP_ISRC, + DP_AUDIO_SDP_MAX, +}; + +enum dp_catalog_audio_header_type { + DP_AUDIO_SDP_HEADER_1, + DP_AUDIO_SDP_HEADER_2, + DP_AUDIO_SDP_HEADER_3, + DP_AUDIO_SDP_HEADER_MAX, +}; + +struct dp_catalog { + u32 aux_data; + u32 total; + u32 sync_start; + u32 width_blanking; + u32 dp_active; + enum dp_catalog_audio_sdp_type sdp_type; + enum dp_catalog_audio_header_type sdp_header; + u32 audio_data; + bool wide_bus_en; +}; + +/* Debug module */ +void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state); + +/* AUX APIs */ +u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog); +int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog); +int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog); +int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read); +int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog); +void dp_catalog_aux_reset(struct dp_catalog *dp_catalog); +void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable); +void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog); +int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog); +u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog); + +/* DP Controller APIs */ +void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state); +void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 config); +void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog); +void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog, bool enable); +void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog, u32 cc, u32 tb); +void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog, u32 rate, + u32 stream_rate_khz, bool fixed_nvid); +int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog, u32 pattern); +u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog); +void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog); +bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog); +void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog, bool enable); +void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog, + u32 intr_mask, bool en); +void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog); +u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog); +u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog); +void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog); +int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog, u8 v_level, + u8 p_level); +int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog); +void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog, + u32 dp_tu, u32 valid_boundary, + u32 valid_boundary2); +void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog, + u32 pattern); +u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog); + +/* DP Panel APIs */ +int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog); +void dp_catalog_dump_regs(struct dp_catalog *dp_catalog); +void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog, + struct drm_display_mode *drm_mode); +void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog); + +struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io); + +/* DP Audio APIs */ +void dp_catalog_audio_get_header(struct dp_catalog *catalog); +void dp_catalog_audio_set_header(struct dp_catalog *catalog); +void dp_catalog_audio_config_acr(struct dp_catalog *catalog); +void dp_catalog_audio_enable(struct dp_catalog *catalog); +void dp_catalog_audio_config_sdp(struct dp_catalog *catalog); +void dp_catalog_audio_init(struct dp_catalog *catalog); +void dp_catalog_audio_sfe_level(struct dp_catalog *catalog); + +#endif /* _DP_CATALOG_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_ctrl.c b/drivers/gpu/drm/msm/dp/dp_ctrl.c new file mode 100644 index 000000000..103eef9f0 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_ctrl.c @@ -0,0 +1,2049 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <linux/types.h> +#include <linux/completion.h> +#include <linux/delay.h> +#include <linux/phy/phy.h> +#include <linux/phy/phy-dp.h> +#include <linux/pm_opp.h> + +#include <drm/display/drm_dp_helper.h> +#include <drm/drm_fixed.h> +#include <drm/drm_print.h> + +#include "dp_reg.h" +#include "dp_ctrl.h" +#include "dp_link.h" + +#define DP_KHZ_TO_HZ 1000 +#define IDLE_PATTERN_COMPLETION_TIMEOUT_JIFFIES (30 * HZ / 1000) /* 30 ms */ +#define WAIT_FOR_VIDEO_READY_TIMEOUT_JIFFIES (HZ / 2) + +#define DP_CTRL_INTR_READY_FOR_VIDEO BIT(0) +#define DP_CTRL_INTR_IDLE_PATTERN_SENT BIT(3) + +#define MR_LINK_TRAINING1 0x8 +#define MR_LINK_SYMBOL_ERM 0x80 +#define MR_LINK_PRBS7 0x100 +#define MR_LINK_CUSTOM80 0x200 +#define MR_LINK_TRAINING4 0x40 + +enum { + DP_TRAINING_NONE, + DP_TRAINING_1, + DP_TRAINING_2, +}; + +struct dp_tu_calc_input { + u64 lclk; /* 162, 270, 540 and 810 */ + u64 pclk_khz; /* in KHz */ + u64 hactive; /* active h-width */ + u64 hporch; /* bp + fp + pulse */ + int nlanes; /* no.of.lanes */ + int bpp; /* bits */ + int pixel_enc; /* 444, 420, 422 */ + int dsc_en; /* dsc on/off */ + int async_en; /* async mode */ + int fec_en; /* fec */ + int compress_ratio; /* 2:1 = 200, 3:1 = 300, 3.75:1 = 375 */ + int num_of_dsc_slices; /* number of slices per line */ +}; + +struct dp_vc_tu_mapping_table { + u32 vic; + u8 lanes; + u8 lrate; /* DP_LINK_RATE -> 162(6), 270(10), 540(20), 810 (30) */ + u8 bpp; + u8 valid_boundary_link; + u16 delay_start_link; + bool boundary_moderation_en; + u8 valid_lower_boundary_link; + u8 upper_boundary_count; + u8 lower_boundary_count; + u8 tu_size_minus1; +}; + +struct dp_ctrl_private { + struct dp_ctrl dp_ctrl; + struct drm_device *drm_dev; + struct device *dev; + struct drm_dp_aux *aux; + struct dp_panel *panel; + struct dp_link *link; + struct dp_power *power; + struct dp_parser *parser; + struct dp_catalog *catalog; + + struct completion idle_comp; + struct completion video_comp; +}; + +static int dp_aux_link_configure(struct drm_dp_aux *aux, + struct dp_link_info *link) +{ + u8 values[2]; + int err; + + values[0] = drm_dp_link_rate_to_bw_code(link->rate); + values[1] = link->num_lanes; + + if (link->capabilities & DP_LINK_CAP_ENHANCED_FRAMING) + values[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; + + err = drm_dp_dpcd_write(aux, DP_LINK_BW_SET, values, sizeof(values)); + if (err < 0) + return err; + + return 0; +} + +void dp_ctrl_push_idle(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + + reinit_completion(&ctrl->idle_comp); + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_PUSH_IDLE); + + if (!wait_for_completion_timeout(&ctrl->idle_comp, + IDLE_PATTERN_COMPLETION_TIMEOUT_JIFFIES)) + pr_warn("PUSH_IDLE pattern timedout\n"); + + drm_dbg_dp(ctrl->drm_dev, "mainlink off\n"); +} + +static void dp_ctrl_config_ctrl(struct dp_ctrl_private *ctrl) +{ + u32 config = 0, tbd; + const u8 *dpcd = ctrl->panel->dpcd; + + /* Default-> LSCLK DIV: 1/4 LCLK */ + config |= (2 << DP_CONFIGURATION_CTRL_LSCLK_DIV_SHIFT); + + /* Scrambler reset enable */ + if (drm_dp_alternate_scrambler_reset_cap(dpcd)) + config |= DP_CONFIGURATION_CTRL_ASSR; + + tbd = dp_link_get_test_bits_depth(ctrl->link, + ctrl->panel->dp_mode.bpp); + + if (tbd == DP_TEST_BIT_DEPTH_UNKNOWN) { + pr_debug("BIT_DEPTH not set. Configure default\n"); + tbd = DP_TEST_BIT_DEPTH_8; + } + + config |= tbd << DP_CONFIGURATION_CTRL_BPC_SHIFT; + + /* Num of Lanes */ + config |= ((ctrl->link->link_params.num_lanes - 1) + << DP_CONFIGURATION_CTRL_NUM_OF_LANES_SHIFT); + + if (drm_dp_enhanced_frame_cap(dpcd)) + config |= DP_CONFIGURATION_CTRL_ENHANCED_FRAMING; + + config |= DP_CONFIGURATION_CTRL_P_INTERLACED; /* progressive video */ + + /* sync clock & static Mvid */ + config |= DP_CONFIGURATION_CTRL_STATIC_DYNAMIC_CN; + config |= DP_CONFIGURATION_CTRL_SYNC_ASYNC_CLK; + + dp_catalog_ctrl_config_ctrl(ctrl->catalog, config); +} + +static void dp_ctrl_configure_source_params(struct dp_ctrl_private *ctrl) +{ + u32 cc, tb; + + dp_catalog_ctrl_lane_mapping(ctrl->catalog); + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, true); + + dp_ctrl_config_ctrl(ctrl); + + tb = dp_link_get_test_bits_depth(ctrl->link, + ctrl->panel->dp_mode.bpp); + cc = dp_link_get_colorimetry_config(ctrl->link); + dp_catalog_ctrl_config_misc(ctrl->catalog, cc, tb); + dp_panel_timing_cfg(ctrl->panel); +} + +/* + * The structure and few functions present below are IP/Hardware + * specific implementation. Most of the implementation will not + * have coding comments + */ +struct tu_algo_data { + s64 lclk_fp; + s64 pclk_fp; + s64 lwidth; + s64 lwidth_fp; + s64 hbp_relative_to_pclk; + s64 hbp_relative_to_pclk_fp; + int nlanes; + int bpp; + int pixelEnc; + int dsc_en; + int async_en; + int bpc; + + uint delay_start_link_extra_pixclk; + int extra_buffer_margin; + s64 ratio_fp; + s64 original_ratio_fp; + + s64 err_fp; + s64 n_err_fp; + s64 n_n_err_fp; + int tu_size; + int tu_size_desired; + int tu_size_minus1; + + int valid_boundary_link; + s64 resulting_valid_fp; + s64 total_valid_fp; + s64 effective_valid_fp; + s64 effective_valid_recorded_fp; + int n_tus; + int n_tus_per_lane; + int paired_tus; + int remainder_tus; + int remainder_tus_upper; + int remainder_tus_lower; + int extra_bytes; + int filler_size; + int delay_start_link; + + int extra_pclk_cycles; + int extra_pclk_cycles_in_link_clk; + s64 ratio_by_tu_fp; + s64 average_valid2_fp; + int new_valid_boundary_link; + int remainder_symbols_exist; + int n_symbols; + s64 n_remainder_symbols_per_lane_fp; + s64 last_partial_tu_fp; + s64 TU_ratio_err_fp; + + int n_tus_incl_last_incomplete_tu; + int extra_pclk_cycles_tmp; + int extra_pclk_cycles_in_link_clk_tmp; + int extra_required_bytes_new_tmp; + int filler_size_tmp; + int lower_filler_size_tmp; + int delay_start_link_tmp; + + bool boundary_moderation_en; + int boundary_mod_lower_err; + int upper_boundary_count; + int lower_boundary_count; + int i_upper_boundary_count; + int i_lower_boundary_count; + int valid_lower_boundary_link; + int even_distribution_BF; + int even_distribution_legacy; + int even_distribution; + int min_hblank_violated; + s64 delay_start_time_fp; + s64 hbp_time_fp; + s64 hactive_time_fp; + s64 diff_abs_fp; + + s64 ratio; +}; + +static int _tu_param_compare(s64 a, s64 b) +{ + u32 a_sign; + u32 b_sign; + s64 a_temp, b_temp, minus_1; + + if (a == b) + return 0; + + minus_1 = drm_fixp_from_fraction(-1, 1); + + a_sign = (a >> 32) & 0x80000000 ? 1 : 0; + + b_sign = (b >> 32) & 0x80000000 ? 1 : 0; + + if (a_sign > b_sign) + return 2; + else if (b_sign > a_sign) + return 1; + + if (!a_sign && !b_sign) { /* positive */ + if (a > b) + return 1; + else + return 2; + } else { /* negative */ + a_temp = drm_fixp_mul(a, minus_1); + b_temp = drm_fixp_mul(b, minus_1); + + if (a_temp > b_temp) + return 2; + else + return 1; + } +} + +static void dp_panel_update_tu_timings(struct dp_tu_calc_input *in, + struct tu_algo_data *tu) +{ + int nlanes = in->nlanes; + int dsc_num_slices = in->num_of_dsc_slices; + int dsc_num_bytes = 0; + int numerator; + s64 pclk_dsc_fp; + s64 dwidth_dsc_fp; + s64 hbp_dsc_fp; + + int tot_num_eoc_symbols = 0; + int tot_num_hor_bytes = 0; + int tot_num_dummy_bytes = 0; + int dwidth_dsc_bytes = 0; + int eoc_bytes = 0; + + s64 temp1_fp, temp2_fp, temp3_fp; + + tu->lclk_fp = drm_fixp_from_fraction(in->lclk, 1); + tu->pclk_fp = drm_fixp_from_fraction(in->pclk_khz, 1000); + tu->lwidth = in->hactive; + tu->hbp_relative_to_pclk = in->hporch; + tu->nlanes = in->nlanes; + tu->bpp = in->bpp; + tu->pixelEnc = in->pixel_enc; + tu->dsc_en = in->dsc_en; + tu->async_en = in->async_en; + tu->lwidth_fp = drm_fixp_from_fraction(in->hactive, 1); + tu->hbp_relative_to_pclk_fp = drm_fixp_from_fraction(in->hporch, 1); + + if (tu->pixelEnc == 420) { + temp1_fp = drm_fixp_from_fraction(2, 1); + tu->pclk_fp = drm_fixp_div(tu->pclk_fp, temp1_fp); + tu->lwidth_fp = drm_fixp_div(tu->lwidth_fp, temp1_fp); + tu->hbp_relative_to_pclk_fp = + drm_fixp_div(tu->hbp_relative_to_pclk_fp, 2); + } + + if (tu->pixelEnc == 422) { + switch (tu->bpp) { + case 24: + tu->bpp = 16; + tu->bpc = 8; + break; + case 30: + tu->bpp = 20; + tu->bpc = 10; + break; + default: + tu->bpp = 16; + tu->bpc = 8; + break; + } + } else { + tu->bpc = tu->bpp/3; + } + + if (!in->dsc_en) + goto fec_check; + + temp1_fp = drm_fixp_from_fraction(in->compress_ratio, 100); + temp2_fp = drm_fixp_from_fraction(in->bpp, 1); + temp3_fp = drm_fixp_div(temp2_fp, temp1_fp); + temp2_fp = drm_fixp_mul(tu->lwidth_fp, temp3_fp); + + temp1_fp = drm_fixp_from_fraction(8, 1); + temp3_fp = drm_fixp_div(temp2_fp, temp1_fp); + + numerator = drm_fixp2int(temp3_fp); + + dsc_num_bytes = numerator / dsc_num_slices; + eoc_bytes = dsc_num_bytes % nlanes; + tot_num_eoc_symbols = nlanes * dsc_num_slices; + tot_num_hor_bytes = dsc_num_bytes * dsc_num_slices; + tot_num_dummy_bytes = (nlanes - eoc_bytes) * dsc_num_slices; + + if (dsc_num_bytes == 0) + pr_info("incorrect no of bytes per slice=%d\n", dsc_num_bytes); + + dwidth_dsc_bytes = (tot_num_hor_bytes + + tot_num_eoc_symbols + + (eoc_bytes == 0 ? 0 : tot_num_dummy_bytes)); + + dwidth_dsc_fp = drm_fixp_from_fraction(dwidth_dsc_bytes, 3); + + temp2_fp = drm_fixp_mul(tu->pclk_fp, dwidth_dsc_fp); + temp1_fp = drm_fixp_div(temp2_fp, tu->lwidth_fp); + pclk_dsc_fp = temp1_fp; + + temp1_fp = drm_fixp_div(pclk_dsc_fp, tu->pclk_fp); + temp2_fp = drm_fixp_mul(tu->hbp_relative_to_pclk_fp, temp1_fp); + hbp_dsc_fp = temp2_fp; + + /* output */ + tu->pclk_fp = pclk_dsc_fp; + tu->lwidth_fp = dwidth_dsc_fp; + tu->hbp_relative_to_pclk_fp = hbp_dsc_fp; + +fec_check: + if (in->fec_en) { + temp1_fp = drm_fixp_from_fraction(976, 1000); /* 0.976 */ + tu->lclk_fp = drm_fixp_mul(tu->lclk_fp, temp1_fp); + } +} + +static void _tu_valid_boundary_calc(struct tu_algo_data *tu) +{ + s64 temp1_fp, temp2_fp, temp, temp1, temp2; + int compare_result_1, compare_result_2, compare_result_3; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + + tu->new_valid_boundary_link = drm_fixp2int_ceil(temp2_fp); + + temp = (tu->i_upper_boundary_count * + tu->new_valid_boundary_link + + tu->i_lower_boundary_count * + (tu->new_valid_boundary_link-1)); + tu->average_valid2_fp = drm_fixp_from_fraction(temp, + (tu->i_upper_boundary_count + + tu->i_lower_boundary_count)); + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp2_fp = tu->lwidth_fp; + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + temp2_fp = drm_fixp_div(temp1_fp, tu->average_valid2_fp); + tu->n_tus = drm_fixp2int(temp2_fp); + if ((temp2_fp & 0xFFFFFFFF) > 0xFFFFF000) + tu->n_tus += 1; + + temp1_fp = drm_fixp_from_fraction(tu->n_tus, 1); + temp2_fp = drm_fixp_mul(temp1_fp, tu->average_valid2_fp); + temp1_fp = drm_fixp_from_fraction(tu->n_symbols, 1); + temp2_fp = temp1_fp - temp2_fp; + temp1_fp = drm_fixp_from_fraction(tu->nlanes, 1); + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); + tu->n_remainder_symbols_per_lane_fp = temp2_fp; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + tu->last_partial_tu_fp = + drm_fixp_div(tu->n_remainder_symbols_per_lane_fp, + temp1_fp); + + if (tu->n_remainder_symbols_per_lane_fp != 0) + tu->remainder_symbols_exist = 1; + else + tu->remainder_symbols_exist = 0; + + temp1_fp = drm_fixp_from_fraction(tu->n_tus, tu->nlanes); + tu->n_tus_per_lane = drm_fixp2int(temp1_fp); + + tu->paired_tus = (int)((tu->n_tus_per_lane) / + (tu->i_upper_boundary_count + + tu->i_lower_boundary_count)); + + tu->remainder_tus = tu->n_tus_per_lane - tu->paired_tus * + (tu->i_upper_boundary_count + + tu->i_lower_boundary_count); + + if ((tu->remainder_tus - tu->i_upper_boundary_count) > 0) { + tu->remainder_tus_upper = tu->i_upper_boundary_count; + tu->remainder_tus_lower = tu->remainder_tus - + tu->i_upper_boundary_count; + } else { + tu->remainder_tus_upper = tu->remainder_tus; + tu->remainder_tus_lower = 0; + } + + temp = tu->paired_tus * (tu->i_upper_boundary_count * + tu->new_valid_boundary_link + + tu->i_lower_boundary_count * + (tu->new_valid_boundary_link - 1)) + + (tu->remainder_tus_upper * + tu->new_valid_boundary_link) + + (tu->remainder_tus_lower * + (tu->new_valid_boundary_link - 1)); + tu->total_valid_fp = drm_fixp_from_fraction(temp, 1); + + if (tu->remainder_symbols_exist) { + temp1_fp = tu->total_valid_fp + + tu->n_remainder_symbols_per_lane_fp; + temp2_fp = drm_fixp_from_fraction(tu->n_tus_per_lane, 1); + temp2_fp = temp2_fp + tu->last_partial_tu_fp; + temp1_fp = drm_fixp_div(temp1_fp, temp2_fp); + } else { + temp2_fp = drm_fixp_from_fraction(tu->n_tus_per_lane, 1); + temp1_fp = drm_fixp_div(tu->total_valid_fp, temp2_fp); + } + tu->effective_valid_fp = temp1_fp; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + tu->n_n_err_fp = tu->effective_valid_fp - temp2_fp; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + tu->n_err_fp = tu->average_valid2_fp - temp2_fp; + + tu->even_distribution = tu->n_tus % tu->nlanes == 0 ? 1 : 0; + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp2_fp = tu->lwidth_fp; + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + temp2_fp = drm_fixp_div(temp1_fp, tu->average_valid2_fp); + + if (temp2_fp) + tu->n_tus_incl_last_incomplete_tu = drm_fixp2int_ceil(temp2_fp); + else + tu->n_tus_incl_last_incomplete_tu = 0; + + temp1 = 0; + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); + temp1_fp = tu->average_valid2_fp - temp2_fp; + temp2_fp = drm_fixp_from_fraction(tu->n_tus_incl_last_incomplete_tu, 1); + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + + if (temp1_fp) + temp1 = drm_fixp2int_ceil(temp1_fp); + + temp = tu->i_upper_boundary_count * tu->nlanes; + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); + temp1_fp = drm_fixp_from_fraction(tu->new_valid_boundary_link, 1); + temp2_fp = temp1_fp - temp2_fp; + temp1_fp = drm_fixp_from_fraction(temp, 1); + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); + + if (temp2_fp) + temp2 = drm_fixp2int_ceil(temp2_fp); + else + temp2 = 0; + tu->extra_required_bytes_new_tmp = (int)(temp1 + temp2); + + temp1_fp = drm_fixp_from_fraction(8, tu->bpp); + temp2_fp = drm_fixp_from_fraction( + tu->extra_required_bytes_new_tmp, 1); + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + + if (temp1_fp) + tu->extra_pclk_cycles_tmp = drm_fixp2int_ceil(temp1_fp); + else + tu->extra_pclk_cycles_tmp = 0; + + temp1_fp = drm_fixp_from_fraction(tu->extra_pclk_cycles_tmp, 1); + temp2_fp = drm_fixp_div(tu->lclk_fp, tu->pclk_fp); + temp1_fp = drm_fixp_mul(temp1_fp, temp2_fp); + + if (temp1_fp) + tu->extra_pclk_cycles_in_link_clk_tmp = + drm_fixp2int_ceil(temp1_fp); + else + tu->extra_pclk_cycles_in_link_clk_tmp = 0; + + tu->filler_size_tmp = tu->tu_size - tu->new_valid_boundary_link; + + tu->lower_filler_size_tmp = tu->filler_size_tmp + 1; + + tu->delay_start_link_tmp = tu->extra_pclk_cycles_in_link_clk_tmp + + tu->lower_filler_size_tmp + + tu->extra_buffer_margin; + + temp1_fp = drm_fixp_from_fraction(tu->delay_start_link_tmp, 1); + tu->delay_start_time_fp = drm_fixp_div(temp1_fp, tu->lclk_fp); + + compare_result_1 = _tu_param_compare(tu->n_n_err_fp, tu->diff_abs_fp); + if (compare_result_1 == 2) + compare_result_1 = 1; + else + compare_result_1 = 0; + + compare_result_2 = _tu_param_compare(tu->n_n_err_fp, tu->err_fp); + if (compare_result_2 == 2) + compare_result_2 = 1; + else + compare_result_2 = 0; + + compare_result_3 = _tu_param_compare(tu->hbp_time_fp, + tu->delay_start_time_fp); + if (compare_result_3 == 2) + compare_result_3 = 0; + else + compare_result_3 = 1; + + if (((tu->even_distribution == 1) || + ((tu->even_distribution_BF == 0) && + (tu->even_distribution_legacy == 0))) && + tu->n_err_fp >= 0 && tu->n_n_err_fp >= 0 && + compare_result_2 && + (compare_result_1 || (tu->min_hblank_violated == 1)) && + (tu->new_valid_boundary_link - 1) > 0 && + compare_result_3 && + (tu->delay_start_link_tmp <= 1023)) { + tu->upper_boundary_count = tu->i_upper_boundary_count; + tu->lower_boundary_count = tu->i_lower_boundary_count; + tu->err_fp = tu->n_n_err_fp; + tu->boundary_moderation_en = true; + tu->tu_size_desired = tu->tu_size; + tu->valid_boundary_link = tu->new_valid_boundary_link; + tu->effective_valid_recorded_fp = tu->effective_valid_fp; + tu->even_distribution_BF = 1; + tu->delay_start_link = tu->delay_start_link_tmp; + } else if (tu->boundary_mod_lower_err == 0) { + compare_result_1 = _tu_param_compare(tu->n_n_err_fp, + tu->diff_abs_fp); + if (compare_result_1 == 2) + tu->boundary_mod_lower_err = 1; + } +} + +static void _dp_ctrl_calc_tu(struct dp_ctrl_private *ctrl, + struct dp_tu_calc_input *in, + struct dp_vc_tu_mapping_table *tu_table) +{ + struct tu_algo_data *tu; + int compare_result_1, compare_result_2; + u64 temp = 0; + s64 temp_fp = 0, temp1_fp = 0, temp2_fp = 0; + + s64 LCLK_FAST_SKEW_fp = drm_fixp_from_fraction(6, 10000); /* 0.0006 */ + s64 const_p49_fp = drm_fixp_from_fraction(49, 100); /* 0.49 */ + s64 const_p56_fp = drm_fixp_from_fraction(56, 100); /* 0.56 */ + s64 RATIO_SCALE_fp = drm_fixp_from_fraction(1001, 1000); + + u8 DP_BRUTE_FORCE = 1; + s64 BRUTE_FORCE_THRESHOLD_fp = drm_fixp_from_fraction(1, 10); /* 0.1 */ + uint EXTRA_PIXCLK_CYCLE_DELAY = 4; + uint HBLANK_MARGIN = 4; + + tu = kzalloc(sizeof(*tu), GFP_KERNEL); + if (!tu) + return; + + dp_panel_update_tu_timings(in, tu); + + tu->err_fp = drm_fixp_from_fraction(1000, 1); /* 1000 */ + + temp1_fp = drm_fixp_from_fraction(4, 1); + temp2_fp = drm_fixp_mul(temp1_fp, tu->lclk_fp); + temp_fp = drm_fixp_div(temp2_fp, tu->pclk_fp); + tu->extra_buffer_margin = drm_fixp2int_ceil(temp_fp); + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp2_fp = drm_fixp_mul(tu->pclk_fp, temp1_fp); + temp1_fp = drm_fixp_from_fraction(tu->nlanes, 1); + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); + tu->ratio_fp = drm_fixp_div(temp2_fp, tu->lclk_fp); + + tu->original_ratio_fp = tu->ratio_fp; + tu->boundary_moderation_en = false; + tu->upper_boundary_count = 0; + tu->lower_boundary_count = 0; + tu->i_upper_boundary_count = 0; + tu->i_lower_boundary_count = 0; + tu->valid_lower_boundary_link = 0; + tu->even_distribution_BF = 0; + tu->even_distribution_legacy = 0; + tu->even_distribution = 0; + tu->delay_start_time_fp = 0; + + tu->err_fp = drm_fixp_from_fraction(1000, 1); + tu->n_err_fp = 0; + tu->n_n_err_fp = 0; + + tu->ratio = drm_fixp2int(tu->ratio_fp); + temp1_fp = drm_fixp_from_fraction(tu->nlanes, 1); + div64_u64_rem(tu->lwidth_fp, temp1_fp, &temp2_fp); + if (temp2_fp != 0 && + !tu->ratio && tu->dsc_en == 0) { + tu->ratio_fp = drm_fixp_mul(tu->ratio_fp, RATIO_SCALE_fp); + tu->ratio = drm_fixp2int(tu->ratio_fp); + if (tu->ratio) + tu->ratio_fp = drm_fixp_from_fraction(1, 1); + } + + if (tu->ratio > 1) + tu->ratio = 1; + + if (tu->ratio == 1) + goto tu_size_calc; + + compare_result_1 = _tu_param_compare(tu->ratio_fp, const_p49_fp); + if (!compare_result_1 || compare_result_1 == 1) + compare_result_1 = 1; + else + compare_result_1 = 0; + + compare_result_2 = _tu_param_compare(tu->ratio_fp, const_p56_fp); + if (!compare_result_2 || compare_result_2 == 2) + compare_result_2 = 1; + else + compare_result_2 = 0; + + if (tu->dsc_en && compare_result_1 && compare_result_2) { + HBLANK_MARGIN += 4; + drm_dbg_dp(ctrl->drm_dev, + "increase HBLANK_MARGIN to %d\n", HBLANK_MARGIN); + } + +tu_size_calc: + for (tu->tu_size = 32; tu->tu_size <= 64; tu->tu_size++) { + temp1_fp = drm_fixp_from_fraction(tu->tu_size, 1); + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + temp = drm_fixp2int_ceil(temp2_fp); + temp1_fp = drm_fixp_from_fraction(temp, 1); + tu->n_err_fp = temp1_fp - temp2_fp; + + if (tu->n_err_fp < tu->err_fp) { + tu->err_fp = tu->n_err_fp; + tu->tu_size_desired = tu->tu_size; + } + } + + tu->tu_size_minus1 = tu->tu_size_desired - 1; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size_desired, 1); + temp2_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + tu->valid_boundary_link = drm_fixp2int_ceil(temp2_fp); + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp2_fp = tu->lwidth_fp; + temp2_fp = drm_fixp_mul(temp2_fp, temp1_fp); + + temp1_fp = drm_fixp_from_fraction(tu->valid_boundary_link, 1); + temp2_fp = drm_fixp_div(temp2_fp, temp1_fp); + tu->n_tus = drm_fixp2int(temp2_fp); + if ((temp2_fp & 0xFFFFFFFF) > 0xFFFFF000) + tu->n_tus += 1; + + tu->even_distribution_legacy = tu->n_tus % tu->nlanes == 0 ? 1 : 0; + + drm_dbg_dp(ctrl->drm_dev, + "n_sym = %d, num_of_tus = %d\n", + tu->valid_boundary_link, tu->n_tus); + + temp1_fp = drm_fixp_from_fraction(tu->tu_size_desired, 1); + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); + temp1_fp = drm_fixp_from_fraction(tu->valid_boundary_link, 1); + temp2_fp = temp1_fp - temp2_fp; + temp1_fp = drm_fixp_from_fraction(tu->n_tus + 1, 1); + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); + + temp = drm_fixp2int(temp2_fp); + if (temp && temp2_fp) + tu->extra_bytes = drm_fixp2int_ceil(temp2_fp); + else + tu->extra_bytes = 0; + + temp1_fp = drm_fixp_from_fraction(tu->extra_bytes, 1); + temp2_fp = drm_fixp_from_fraction(8, tu->bpp); + temp1_fp = drm_fixp_mul(temp1_fp, temp2_fp); + + if (temp && temp1_fp) + tu->extra_pclk_cycles = drm_fixp2int_ceil(temp1_fp); + else + tu->extra_pclk_cycles = drm_fixp2int(temp1_fp); + + temp1_fp = drm_fixp_div(tu->lclk_fp, tu->pclk_fp); + temp2_fp = drm_fixp_from_fraction(tu->extra_pclk_cycles, 1); + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + + if (temp1_fp) + tu->extra_pclk_cycles_in_link_clk = drm_fixp2int_ceil(temp1_fp); + else + tu->extra_pclk_cycles_in_link_clk = drm_fixp2int(temp1_fp); + + tu->filler_size = tu->tu_size_desired - tu->valid_boundary_link; + + temp1_fp = drm_fixp_from_fraction(tu->tu_size_desired, 1); + tu->ratio_by_tu_fp = drm_fixp_mul(tu->ratio_fp, temp1_fp); + + tu->delay_start_link = tu->extra_pclk_cycles_in_link_clk + + tu->filler_size + tu->extra_buffer_margin; + + tu->resulting_valid_fp = + drm_fixp_from_fraction(tu->valid_boundary_link, 1); + + temp1_fp = drm_fixp_from_fraction(tu->tu_size_desired, 1); + temp2_fp = drm_fixp_div(tu->resulting_valid_fp, temp1_fp); + tu->TU_ratio_err_fp = temp2_fp - tu->original_ratio_fp; + + temp1_fp = drm_fixp_from_fraction(HBLANK_MARGIN, 1); + temp1_fp = tu->hbp_relative_to_pclk_fp - temp1_fp; + tu->hbp_time_fp = drm_fixp_div(temp1_fp, tu->pclk_fp); + + temp1_fp = drm_fixp_from_fraction(tu->delay_start_link, 1); + tu->delay_start_time_fp = drm_fixp_div(temp1_fp, tu->lclk_fp); + + compare_result_1 = _tu_param_compare(tu->hbp_time_fp, + tu->delay_start_time_fp); + if (compare_result_1 == 2) /* if (hbp_time_fp < delay_start_time_fp) */ + tu->min_hblank_violated = 1; + + tu->hactive_time_fp = drm_fixp_div(tu->lwidth_fp, tu->pclk_fp); + + compare_result_2 = _tu_param_compare(tu->hactive_time_fp, + tu->delay_start_time_fp); + if (compare_result_2 == 2) + tu->min_hblank_violated = 1; + + tu->delay_start_time_fp = 0; + + /* brute force */ + + tu->delay_start_link_extra_pixclk = EXTRA_PIXCLK_CYCLE_DELAY; + tu->diff_abs_fp = tu->resulting_valid_fp - tu->ratio_by_tu_fp; + + temp = drm_fixp2int(tu->diff_abs_fp); + if (!temp && tu->diff_abs_fp <= 0xffff) + tu->diff_abs_fp = 0; + + /* if(diff_abs < 0) diff_abs *= -1 */ + if (tu->diff_abs_fp < 0) + tu->diff_abs_fp = drm_fixp_mul(tu->diff_abs_fp, -1); + + tu->boundary_mod_lower_err = 0; + if ((tu->diff_abs_fp != 0 && + ((tu->diff_abs_fp > BRUTE_FORCE_THRESHOLD_fp) || + (tu->even_distribution_legacy == 0) || + (DP_BRUTE_FORCE == 1))) || + (tu->min_hblank_violated == 1)) { + do { + tu->err_fp = drm_fixp_from_fraction(1000, 1); + + temp1_fp = drm_fixp_div(tu->lclk_fp, tu->pclk_fp); + temp2_fp = drm_fixp_from_fraction( + tu->delay_start_link_extra_pixclk, 1); + temp1_fp = drm_fixp_mul(temp2_fp, temp1_fp); + + if (temp1_fp) + tu->extra_buffer_margin = + drm_fixp2int_ceil(temp1_fp); + else + tu->extra_buffer_margin = 0; + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp1_fp = drm_fixp_mul(tu->lwidth_fp, temp1_fp); + + if (temp1_fp) + tu->n_symbols = drm_fixp2int_ceil(temp1_fp); + else + tu->n_symbols = 0; + + for (tu->tu_size = 32; tu->tu_size <= 64; tu->tu_size++) { + for (tu->i_upper_boundary_count = 1; + tu->i_upper_boundary_count <= 15; + tu->i_upper_boundary_count++) { + for (tu->i_lower_boundary_count = 1; + tu->i_lower_boundary_count <= 15; + tu->i_lower_boundary_count++) { + _tu_valid_boundary_calc(tu); + } + } + } + tu->delay_start_link_extra_pixclk--; + } while (tu->boundary_moderation_en != true && + tu->boundary_mod_lower_err == 1 && + tu->delay_start_link_extra_pixclk != 0); + + if (tu->boundary_moderation_en == true) { + temp1_fp = drm_fixp_from_fraction( + (tu->upper_boundary_count * + tu->valid_boundary_link + + tu->lower_boundary_count * + (tu->valid_boundary_link - 1)), 1); + temp2_fp = drm_fixp_from_fraction( + (tu->upper_boundary_count + + tu->lower_boundary_count), 1); + tu->resulting_valid_fp = + drm_fixp_div(temp1_fp, temp2_fp); + + temp1_fp = drm_fixp_from_fraction( + tu->tu_size_desired, 1); + tu->ratio_by_tu_fp = + drm_fixp_mul(tu->original_ratio_fp, temp1_fp); + + tu->valid_lower_boundary_link = + tu->valid_boundary_link - 1; + + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp1_fp = drm_fixp_mul(tu->lwidth_fp, temp1_fp); + temp2_fp = drm_fixp_div(temp1_fp, + tu->resulting_valid_fp); + tu->n_tus = drm_fixp2int(temp2_fp); + + tu->tu_size_minus1 = tu->tu_size_desired - 1; + tu->even_distribution_BF = 1; + + temp1_fp = + drm_fixp_from_fraction(tu->tu_size_desired, 1); + temp2_fp = + drm_fixp_div(tu->resulting_valid_fp, temp1_fp); + tu->TU_ratio_err_fp = temp2_fp - tu->original_ratio_fp; + } + } + + temp2_fp = drm_fixp_mul(LCLK_FAST_SKEW_fp, tu->lwidth_fp); + + if (temp2_fp) + temp = drm_fixp2int_ceil(temp2_fp); + else + temp = 0; + + temp1_fp = drm_fixp_from_fraction(tu->nlanes, 1); + temp2_fp = drm_fixp_mul(tu->original_ratio_fp, temp1_fp); + temp1_fp = drm_fixp_from_fraction(tu->bpp, 8); + temp2_fp = drm_fixp_div(temp1_fp, temp2_fp); + temp1_fp = drm_fixp_from_fraction(temp, 1); + temp2_fp = drm_fixp_mul(temp1_fp, temp2_fp); + temp = drm_fixp2int(temp2_fp); + + if (tu->async_en) + tu->delay_start_link += (int)temp; + + temp1_fp = drm_fixp_from_fraction(tu->delay_start_link, 1); + tu->delay_start_time_fp = drm_fixp_div(temp1_fp, tu->lclk_fp); + + /* OUTPUTS */ + tu_table->valid_boundary_link = tu->valid_boundary_link; + tu_table->delay_start_link = tu->delay_start_link; + tu_table->boundary_moderation_en = tu->boundary_moderation_en; + tu_table->valid_lower_boundary_link = tu->valid_lower_boundary_link; + tu_table->upper_boundary_count = tu->upper_boundary_count; + tu_table->lower_boundary_count = tu->lower_boundary_count; + tu_table->tu_size_minus1 = tu->tu_size_minus1; + + drm_dbg_dp(ctrl->drm_dev, "TU: valid_boundary_link: %d\n", + tu_table->valid_boundary_link); + drm_dbg_dp(ctrl->drm_dev, "TU: delay_start_link: %d\n", + tu_table->delay_start_link); + drm_dbg_dp(ctrl->drm_dev, "TU: boundary_moderation_en: %d\n", + tu_table->boundary_moderation_en); + drm_dbg_dp(ctrl->drm_dev, "TU: valid_lower_boundary_link: %d\n", + tu_table->valid_lower_boundary_link); + drm_dbg_dp(ctrl->drm_dev, "TU: upper_boundary_count: %d\n", + tu_table->upper_boundary_count); + drm_dbg_dp(ctrl->drm_dev, "TU: lower_boundary_count: %d\n", + tu_table->lower_boundary_count); + drm_dbg_dp(ctrl->drm_dev, "TU: tu_size_minus1: %d\n", + tu_table->tu_size_minus1); + + kfree(tu); +} + +static void dp_ctrl_calc_tu_parameters(struct dp_ctrl_private *ctrl, + struct dp_vc_tu_mapping_table *tu_table) +{ + struct dp_tu_calc_input in; + struct drm_display_mode *drm_mode; + + drm_mode = &ctrl->panel->dp_mode.drm_mode; + + in.lclk = ctrl->link->link_params.rate / 1000; + in.pclk_khz = drm_mode->clock; + in.hactive = drm_mode->hdisplay; + in.hporch = drm_mode->htotal - drm_mode->hdisplay; + in.nlanes = ctrl->link->link_params.num_lanes; + in.bpp = ctrl->panel->dp_mode.bpp; + in.pixel_enc = 444; + in.dsc_en = 0; + in.async_en = 0; + in.fec_en = 0; + in.num_of_dsc_slices = 0; + in.compress_ratio = 100; + + _dp_ctrl_calc_tu(ctrl, &in, tu_table); +} + +static void dp_ctrl_setup_tr_unit(struct dp_ctrl_private *ctrl) +{ + u32 dp_tu = 0x0; + u32 valid_boundary = 0x0; + u32 valid_boundary2 = 0x0; + struct dp_vc_tu_mapping_table tu_calc_table; + + dp_ctrl_calc_tu_parameters(ctrl, &tu_calc_table); + + dp_tu |= tu_calc_table.tu_size_minus1; + valid_boundary |= tu_calc_table.valid_boundary_link; + valid_boundary |= (tu_calc_table.delay_start_link << 16); + + valid_boundary2 |= (tu_calc_table.valid_lower_boundary_link << 1); + valid_boundary2 |= (tu_calc_table.upper_boundary_count << 16); + valid_boundary2 |= (tu_calc_table.lower_boundary_count << 20); + + if (tu_calc_table.boundary_moderation_en) + valid_boundary2 |= BIT(0); + + pr_debug("dp_tu=0x%x, valid_boundary=0x%x, valid_boundary2=0x%x\n", + dp_tu, valid_boundary, valid_boundary2); + + dp_catalog_ctrl_update_transfer_unit(ctrl->catalog, + dp_tu, valid_boundary, valid_boundary2); +} + +static int dp_ctrl_wait4video_ready(struct dp_ctrl_private *ctrl) +{ + int ret = 0; + + if (!wait_for_completion_timeout(&ctrl->video_comp, + WAIT_FOR_VIDEO_READY_TIMEOUT_JIFFIES)) { + DRM_ERROR("wait4video timedout\n"); + ret = -ETIMEDOUT; + } + return ret; +} + +static int dp_ctrl_update_vx_px(struct dp_ctrl_private *ctrl) +{ + struct dp_link *link = ctrl->link; + int ret = 0, lane, lane_cnt; + u8 buf[4]; + u32 max_level_reached = 0; + u32 voltage_swing_level = link->phy_params.v_level; + u32 pre_emphasis_level = link->phy_params.p_level; + + drm_dbg_dp(ctrl->drm_dev, + "voltage level: %d emphasis level: %d\n", + voltage_swing_level, pre_emphasis_level); + ret = dp_catalog_ctrl_update_vx_px(ctrl->catalog, + voltage_swing_level, pre_emphasis_level); + + if (ret) + return ret; + + if (voltage_swing_level >= DP_TRAIN_VOLTAGE_SWING_MAX) { + drm_dbg_dp(ctrl->drm_dev, + "max. voltage swing level reached %d\n", + voltage_swing_level); + max_level_reached |= DP_TRAIN_MAX_SWING_REACHED; + } + + if (pre_emphasis_level >= DP_TRAIN_PRE_EMPHASIS_MAX) { + drm_dbg_dp(ctrl->drm_dev, + "max. pre-emphasis level reached %d\n", + pre_emphasis_level); + max_level_reached |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; + } + + pre_emphasis_level <<= DP_TRAIN_PRE_EMPHASIS_SHIFT; + + lane_cnt = ctrl->link->link_params.num_lanes; + for (lane = 0; lane < lane_cnt; lane++) + buf[lane] = voltage_swing_level | pre_emphasis_level + | max_level_reached; + + drm_dbg_dp(ctrl->drm_dev, "sink: p|v=0x%x\n", + voltage_swing_level | pre_emphasis_level); + ret = drm_dp_dpcd_write(ctrl->aux, DP_TRAINING_LANE0_SET, + buf, lane_cnt); + if (ret == lane_cnt) + ret = 0; + + return ret; +} + +static bool dp_ctrl_train_pattern_set(struct dp_ctrl_private *ctrl, + u8 pattern) +{ + u8 buf; + int ret = 0; + + drm_dbg_dp(ctrl->drm_dev, "sink: pattern=%x\n", pattern); + + buf = pattern; + + if (pattern && pattern != DP_TRAINING_PATTERN_4) + buf |= DP_LINK_SCRAMBLING_DISABLE; + + ret = drm_dp_dpcd_writeb(ctrl->aux, DP_TRAINING_PATTERN_SET, buf); + return ret == 1; +} + +static int dp_ctrl_read_link_status(struct dp_ctrl_private *ctrl, + u8 *link_status) +{ + int ret = 0, len; + + len = drm_dp_dpcd_read_link_status(ctrl->aux, link_status); + if (len != DP_LINK_STATUS_SIZE) { + DRM_ERROR("DP link status read failed, err: %d\n", len); + ret = -EINVAL; + } + + return ret; +} + +static int dp_ctrl_link_train_1(struct dp_ctrl_private *ctrl, + int *training_step) +{ + int tries, old_v_level, ret = 0; + u8 link_status[DP_LINK_STATUS_SIZE]; + int const maximum_retries = 4; + + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); + + *training_step = DP_TRAINING_1; + + ret = dp_catalog_ctrl_set_pattern_state_bit(ctrl->catalog, 1); + if (ret) + return ret; + dp_ctrl_train_pattern_set(ctrl, DP_TRAINING_PATTERN_1 | + DP_LINK_SCRAMBLING_DISABLE); + + ret = dp_ctrl_update_vx_px(ctrl); + if (ret) + return ret; + + tries = 0; + old_v_level = ctrl->link->phy_params.v_level; + for (tries = 0; tries < maximum_retries; tries++) { + drm_dp_link_train_clock_recovery_delay(ctrl->aux, ctrl->panel->dpcd); + + ret = dp_ctrl_read_link_status(ctrl, link_status); + if (ret) + return ret; + + if (drm_dp_clock_recovery_ok(link_status, + ctrl->link->link_params.num_lanes)) { + return 0; + } + + if (ctrl->link->phy_params.v_level >= + DP_TRAIN_VOLTAGE_SWING_MAX) { + DRM_ERROR_RATELIMITED("max v_level reached\n"); + return -EAGAIN; + } + + if (old_v_level != ctrl->link->phy_params.v_level) { + tries = 0; + old_v_level = ctrl->link->phy_params.v_level; + } + + dp_link_adjust_levels(ctrl->link, link_status); + ret = dp_ctrl_update_vx_px(ctrl); + if (ret) + return ret; + } + + DRM_ERROR("max tries reached\n"); + return -ETIMEDOUT; +} + +static int dp_ctrl_link_rate_down_shift(struct dp_ctrl_private *ctrl) +{ + int ret = 0; + + switch (ctrl->link->link_params.rate) { + case 810000: + ctrl->link->link_params.rate = 540000; + break; + case 540000: + ctrl->link->link_params.rate = 270000; + break; + case 270000: + ctrl->link->link_params.rate = 162000; + break; + case 162000: + default: + ret = -EINVAL; + break; + } + + if (!ret) { + drm_dbg_dp(ctrl->drm_dev, "new rate=0x%x\n", + ctrl->link->link_params.rate); + } + + return ret; +} + +static int dp_ctrl_link_lane_down_shift(struct dp_ctrl_private *ctrl) +{ + + if (ctrl->link->link_params.num_lanes == 1) + return -1; + + ctrl->link->link_params.num_lanes /= 2; + ctrl->link->link_params.rate = ctrl->panel->link_info.rate; + + ctrl->link->phy_params.p_level = 0; + ctrl->link->phy_params.v_level = 0; + + return 0; +} + +static void dp_ctrl_clear_training_pattern(struct dp_ctrl_private *ctrl) +{ + dp_ctrl_train_pattern_set(ctrl, DP_TRAINING_PATTERN_DISABLE); + drm_dp_link_train_channel_eq_delay(ctrl->aux, ctrl->panel->dpcd); +} + +static int dp_ctrl_link_train_2(struct dp_ctrl_private *ctrl, + int *training_step) +{ + int tries = 0, ret = 0; + u8 pattern; + u32 state_ctrl_bit; + int const maximum_retries = 5; + u8 link_status[DP_LINK_STATUS_SIZE]; + + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); + + *training_step = DP_TRAINING_2; + + if (drm_dp_tps4_supported(ctrl->panel->dpcd)) { + pattern = DP_TRAINING_PATTERN_4; + state_ctrl_bit = 4; + } else if (drm_dp_tps3_supported(ctrl->panel->dpcd)) { + pattern = DP_TRAINING_PATTERN_3; + state_ctrl_bit = 3; + } else { + pattern = DP_TRAINING_PATTERN_2; + state_ctrl_bit = 2; + } + + ret = dp_catalog_ctrl_set_pattern_state_bit(ctrl->catalog, state_ctrl_bit); + if (ret) + return ret; + + dp_ctrl_train_pattern_set(ctrl, pattern); + + for (tries = 0; tries <= maximum_retries; tries++) { + drm_dp_link_train_channel_eq_delay(ctrl->aux, ctrl->panel->dpcd); + + ret = dp_ctrl_read_link_status(ctrl, link_status); + if (ret) + return ret; + + if (drm_dp_channel_eq_ok(link_status, + ctrl->link->link_params.num_lanes)) { + return 0; + } + + dp_link_adjust_levels(ctrl->link, link_status); + ret = dp_ctrl_update_vx_px(ctrl); + if (ret) + return ret; + + } + + return -ETIMEDOUT; +} + +static int dp_ctrl_link_train(struct dp_ctrl_private *ctrl, + int *training_step) +{ + int ret = 0; + const u8 *dpcd = ctrl->panel->dpcd; + u8 encoding[] = { 0, DP_SET_ANSI_8B10B }; + u8 assr; + struct dp_link_info link_info = {0}; + + dp_ctrl_config_ctrl(ctrl); + + link_info.num_lanes = ctrl->link->link_params.num_lanes; + link_info.rate = ctrl->link->link_params.rate; + link_info.capabilities = DP_LINK_CAP_ENHANCED_FRAMING; + + dp_aux_link_configure(ctrl->aux, &link_info); + + if (drm_dp_max_downspread(dpcd)) + encoding[0] |= DP_SPREAD_AMP_0_5; + + /* config DOWNSPREAD_CTRL and MAIN_LINK_CHANNEL_CODING_SET */ + drm_dp_dpcd_write(ctrl->aux, DP_DOWNSPREAD_CTRL, encoding, 2); + + if (drm_dp_alternate_scrambler_reset_cap(dpcd)) { + assr = DP_ALTERNATE_SCRAMBLER_RESET_ENABLE; + drm_dp_dpcd_write(ctrl->aux, DP_EDP_CONFIGURATION_SET, + &assr, 1); + } + + ret = dp_ctrl_link_train_1(ctrl, training_step); + if (ret) { + DRM_ERROR("link training #1 failed. ret=%d\n", ret); + goto end; + } + + /* print success info as this is a result of user initiated action */ + drm_dbg_dp(ctrl->drm_dev, "link training #1 successful\n"); + + ret = dp_ctrl_link_train_2(ctrl, training_step); + if (ret) { + DRM_ERROR("link training #2 failed. ret=%d\n", ret); + goto end; + } + + /* print success info as this is a result of user initiated action */ + drm_dbg_dp(ctrl->drm_dev, "link training #2 successful\n"); + +end: + dp_catalog_ctrl_state_ctrl(ctrl->catalog, 0); + + return ret; +} + +static int dp_ctrl_setup_main_link(struct dp_ctrl_private *ctrl, + int *training_step) +{ + int ret = 0; + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, true); + + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) + return ret; + + /* + * As part of previous calls, DP controller state might have + * transitioned to PUSH_IDLE. In order to start transmitting + * a link training pattern, we have to first do soft reset. + */ + + ret = dp_ctrl_link_train(ctrl, training_step); + + return ret; +} + +static void dp_ctrl_set_clock_rate(struct dp_ctrl_private *ctrl, + enum dp_pm_type module, char *name, unsigned long rate) +{ + u32 num = ctrl->parser->mp[module].num_clk; + struct clk_bulk_data *cfg = ctrl->parser->mp[module].clocks; + + while (num && strcmp(cfg->id, name)) { + num--; + cfg++; + } + + drm_dbg_dp(ctrl->drm_dev, "setting rate=%lu on clk=%s\n", + rate, name); + + if (num) + clk_set_rate(cfg->clk, rate); + else + DRM_ERROR("%s clock doesn't exit to set rate %lu\n", + name, rate); +} + +static int dp_ctrl_enable_mainlink_clocks(struct dp_ctrl_private *ctrl) +{ + int ret = 0; + struct dp_io *dp_io = &ctrl->parser->io; + struct phy *phy = dp_io->phy; + struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp; + const u8 *dpcd = ctrl->panel->dpcd; + + opts_dp->lanes = ctrl->link->link_params.num_lanes; + opts_dp->link_rate = ctrl->link->link_params.rate / 100; + opts_dp->ssc = drm_dp_max_downspread(dpcd); + + phy_configure(phy, &dp_io->phy_opts); + phy_power_on(phy); + + dev_pm_opp_set_rate(ctrl->dev, ctrl->link->link_params.rate * 1000); + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, true); + if (ret) + DRM_ERROR("Unable to start link clocks. ret=%d\n", ret); + + drm_dbg_dp(ctrl->drm_dev, "link rate=%d\n", ctrl->link->link_params.rate); + + return ret; +} + +void dp_ctrl_reset_irq_ctrl(struct dp_ctrl *dp_ctrl, bool enable) +{ + struct dp_ctrl_private *ctrl; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + + dp_catalog_ctrl_reset(ctrl->catalog); + + /* + * all dp controller programmable registers will not + * be reset to default value after DP_SW_RESET + * therefore interrupt mask bits have to be updated + * to enable/disable interrupts + */ + dp_catalog_ctrl_enable_irq(ctrl->catalog, enable); +} + +void dp_ctrl_phy_init(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + struct dp_io *dp_io; + struct phy *phy; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + dp_catalog_ctrl_phy_reset(ctrl->catalog); + phy_init(phy); + + drm_dbg_dp(ctrl->drm_dev, "phy=%p init=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); +} + +void dp_ctrl_phy_exit(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + struct dp_io *dp_io; + struct phy *phy; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + dp_catalog_ctrl_phy_reset(ctrl->catalog); + phy_exit(phy); + drm_dbg_dp(ctrl->drm_dev, "phy=%p init=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); +} + +static bool dp_ctrl_use_fixed_nvid(struct dp_ctrl_private *ctrl) +{ + const u8 *dpcd = ctrl->panel->dpcd; + + /* + * For better interop experience, used a fixed NVID=0x8000 + * whenever connected to a VGA dongle downstream. + */ + if (drm_dp_is_branch(dpcd)) + return (drm_dp_has_quirk(&ctrl->panel->desc, + DP_DPCD_QUIRK_CONSTANT_N)); + + return false; +} + +static int dp_ctrl_reinitialize_mainlink(struct dp_ctrl_private *ctrl) +{ + int ret = 0; + struct dp_io *dp_io = &ctrl->parser->io; + struct phy *phy = dp_io->phy; + struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp; + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); + opts_dp->lanes = ctrl->link->link_params.num_lanes; + phy_configure(phy, &dp_io->phy_opts); + /* + * Disable and re-enable the mainlink clock since the + * link clock might have been adjusted as part of the + * link maintenance. + */ + dev_pm_opp_set_rate(ctrl->dev, 0); + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); + if (ret) { + DRM_ERROR("Failed to disable clocks. ret=%d\n", ret); + return ret; + } + phy_power_off(phy); + /* hw recommended delay before re-enabling clocks */ + msleep(20); + + ret = dp_ctrl_enable_mainlink_clocks(ctrl); + if (ret) { + DRM_ERROR("Failed to enable mainlink clks. ret=%d\n", ret); + return ret; + } + + return ret; +} + +static int dp_ctrl_deinitialize_mainlink(struct dp_ctrl_private *ctrl) +{ + struct dp_io *dp_io; + struct phy *phy; + int ret; + + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); + + dp_catalog_ctrl_reset(ctrl->catalog); + + dev_pm_opp_set_rate(ctrl->dev, 0); + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); + if (ret) { + DRM_ERROR("Failed to disable link clocks. ret=%d\n", ret); + } + + phy_power_off(phy); + + /* aux channel down, reinit phy */ + phy_exit(phy); + phy_init(phy); + + drm_dbg_dp(ctrl->drm_dev, "phy=%p init=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); + return 0; +} + +static int dp_ctrl_link_maintenance(struct dp_ctrl_private *ctrl) +{ + int ret = 0; + int training_step = DP_TRAINING_NONE; + + dp_ctrl_push_idle(&ctrl->dp_ctrl); + + ctrl->link->phy_params.p_level = 0; + ctrl->link->phy_params.v_level = 0; + + ret = dp_ctrl_setup_main_link(ctrl, &training_step); + if (ret) + goto end; + + dp_ctrl_clear_training_pattern(ctrl); + + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_SEND_VIDEO); + + ret = dp_ctrl_wait4video_ready(ctrl); +end: + return ret; +} + +static bool dp_ctrl_send_phy_test_pattern(struct dp_ctrl_private *ctrl) +{ + bool success = false; + u32 pattern_sent = 0x0; + u32 pattern_requested = ctrl->link->phy_params.phy_test_pattern_sel; + + drm_dbg_dp(ctrl->drm_dev, "request: 0x%x\n", pattern_requested); + + if (dp_catalog_ctrl_update_vx_px(ctrl->catalog, + ctrl->link->phy_params.v_level, + ctrl->link->phy_params.p_level)) { + DRM_ERROR("Failed to set v/p levels\n"); + return false; + } + dp_catalog_ctrl_send_phy_pattern(ctrl->catalog, pattern_requested); + dp_ctrl_update_vx_px(ctrl); + dp_link_send_test_response(ctrl->link); + + pattern_sent = dp_catalog_ctrl_read_phy_pattern(ctrl->catalog); + + switch (pattern_sent) { + case MR_LINK_TRAINING1: + success = (pattern_requested == + DP_PHY_TEST_PATTERN_D10_2); + break; + case MR_LINK_SYMBOL_ERM: + success = ((pattern_requested == + DP_PHY_TEST_PATTERN_ERROR_COUNT) || + (pattern_requested == + DP_PHY_TEST_PATTERN_CP2520)); + break; + case MR_LINK_PRBS7: + success = (pattern_requested == + DP_PHY_TEST_PATTERN_PRBS7); + break; + case MR_LINK_CUSTOM80: + success = (pattern_requested == + DP_PHY_TEST_PATTERN_80BIT_CUSTOM); + break; + case MR_LINK_TRAINING4: + success = (pattern_requested == + DP_PHY_TEST_PATTERN_SEL_MASK); + break; + default: + success = false; + } + + drm_dbg_dp(ctrl->drm_dev, "%s: test->0x%x\n", + success ? "success" : "failed", pattern_requested); + return success; +} + +static int dp_ctrl_process_phy_test_request(struct dp_ctrl_private *ctrl) +{ + int ret; + unsigned long pixel_rate; + + if (!ctrl->link->phy_params.phy_test_pattern_sel) { + drm_dbg_dp(ctrl->drm_dev, + "no test pattern selected by sink\n"); + return 0; + } + + /* + * The global reset will need DP link related clocks to be + * running. Add the global reset just before disabling the + * link clocks and core clocks. + */ + ret = dp_ctrl_off(&ctrl->dp_ctrl); + if (ret) { + DRM_ERROR("failed to disable DP controller\n"); + return ret; + } + + ret = dp_ctrl_on_link(&ctrl->dp_ctrl); + if (ret) { + DRM_ERROR("failed to enable DP link controller\n"); + return ret; + } + + pixel_rate = ctrl->panel->dp_mode.drm_mode.clock; + dp_ctrl_set_clock_rate(ctrl, DP_STREAM_PM, "stream_pixel", pixel_rate * 1000); + + ret = dp_power_clk_enable(ctrl->power, DP_STREAM_PM, true); + if (ret) { + DRM_ERROR("Failed to start pixel clocks. ret=%d\n", ret); + return ret; + } + + dp_ctrl_send_phy_test_pattern(ctrl); + + return 0; +} + +void dp_ctrl_handle_sink_request(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + u32 sink_request = 0x0; + + if (!dp_ctrl) { + DRM_ERROR("invalid input\n"); + return; + } + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + sink_request = ctrl->link->sink_request; + + if (sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) { + drm_dbg_dp(ctrl->drm_dev, "PHY_TEST_PATTERN request\n"); + if (dp_ctrl_process_phy_test_request(ctrl)) { + DRM_ERROR("process phy_test_req failed\n"); + return; + } + } + + if (sink_request & DP_LINK_STATUS_UPDATED) { + if (dp_ctrl_link_maintenance(ctrl)) { + DRM_ERROR("LM failed: TEST_LINK_TRAINING\n"); + return; + } + } + + if (sink_request & DP_TEST_LINK_TRAINING) { + dp_link_send_test_response(ctrl->link); + if (dp_ctrl_link_maintenance(ctrl)) { + DRM_ERROR("LM failed: TEST_LINK_TRAINING\n"); + return; + } + } +} + +static bool dp_ctrl_clock_recovery_any_ok( + const u8 link_status[DP_LINK_STATUS_SIZE], + int lane_count) +{ + int reduced_cnt; + + if (lane_count <= 1) + return false; + + /* + * only interested in the lane number after reduced + * lane_count = 4, then only interested in 2 lanes + * lane_count = 2, then only interested in 1 lane + */ + reduced_cnt = lane_count >> 1; + + return drm_dp_clock_recovery_ok(link_status, reduced_cnt); +} + +static bool dp_ctrl_channel_eq_ok(struct dp_ctrl_private *ctrl) +{ + u8 link_status[DP_LINK_STATUS_SIZE]; + int num_lanes = ctrl->link->link_params.num_lanes; + + dp_ctrl_read_link_status(ctrl, link_status); + + return drm_dp_channel_eq_ok(link_status, num_lanes); +} + +int dp_ctrl_on_link(struct dp_ctrl *dp_ctrl) +{ + int rc = 0; + struct dp_ctrl_private *ctrl; + u32 rate; + int link_train_max_retries = 5; + u32 const phy_cts_pixel_clk_khz = 148500; + u8 link_status[DP_LINK_STATUS_SIZE]; + unsigned int training_step; + unsigned long pixel_rate; + + if (!dp_ctrl) + return -EINVAL; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + + rate = ctrl->panel->link_info.rate; + pixel_rate = ctrl->panel->dp_mode.drm_mode.clock; + + dp_power_clk_enable(ctrl->power, DP_CORE_PM, true); + + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) { + drm_dbg_dp(ctrl->drm_dev, + "using phy test link parameters\n"); + if (!pixel_rate) + pixel_rate = phy_cts_pixel_clk_khz; + } else { + ctrl->link->link_params.rate = rate; + ctrl->link->link_params.num_lanes = + ctrl->panel->link_info.num_lanes; + } + + drm_dbg_dp(ctrl->drm_dev, "rate=%d, num_lanes=%d, pixel_rate=%lu\n", + ctrl->link->link_params.rate, ctrl->link->link_params.num_lanes, + pixel_rate); + + rc = dp_ctrl_enable_mainlink_clocks(ctrl); + if (rc) + return rc; + + while (--link_train_max_retries) { + training_step = DP_TRAINING_NONE; + rc = dp_ctrl_setup_main_link(ctrl, &training_step); + if (rc == 0) { + /* training completed successfully */ + break; + } else if (training_step == DP_TRAINING_1) { + /* link train_1 failed */ + if (!dp_catalog_link_is_connected(ctrl->catalog)) + break; + + dp_ctrl_read_link_status(ctrl, link_status); + + rc = dp_ctrl_link_rate_down_shift(ctrl); + if (rc < 0) { /* already in RBR = 1.6G */ + if (dp_ctrl_clock_recovery_any_ok(link_status, + ctrl->link->link_params.num_lanes)) { + /* + * some lanes are ready, + * reduce lane number + */ + rc = dp_ctrl_link_lane_down_shift(ctrl); + if (rc < 0) { /* lane == 1 already */ + /* end with failure */ + break; + } + } else { + /* end with failure */ + break; /* lane == 1 already */ + } + } + } else if (training_step == DP_TRAINING_2) { + /* link train_2 failed */ + if (!dp_catalog_link_is_connected(ctrl->catalog)) + break; + + dp_ctrl_read_link_status(ctrl, link_status); + + if (!drm_dp_clock_recovery_ok(link_status, + ctrl->link->link_params.num_lanes)) + rc = dp_ctrl_link_rate_down_shift(ctrl); + else + rc = dp_ctrl_link_lane_down_shift(ctrl); + + if (rc < 0) { + /* end with failure */ + break; /* lane == 1 already */ + } + + /* stop link training before start re training */ + dp_ctrl_clear_training_pattern(ctrl); + } + + rc = dp_ctrl_reinitialize_mainlink(ctrl); + if (rc) { + DRM_ERROR("Failed to reinitialize mainlink. rc=%d\n", rc); + break; + } + } + + if (ctrl->link->sink_request & DP_TEST_LINK_PHY_TEST_PATTERN) + return rc; + + if (rc == 0) { /* link train successfully */ + /* + * do not stop train pattern here + * stop link training at on_stream + * to pass compliance test + */ + } else { + /* + * link training failed + * end txing train pattern here + */ + dp_ctrl_clear_training_pattern(ctrl); + + dp_ctrl_deinitialize_mainlink(ctrl); + rc = -ECONNRESET; + } + + return rc; +} + +static int dp_ctrl_link_retrain(struct dp_ctrl_private *ctrl) +{ + int training_step = DP_TRAINING_NONE; + + return dp_ctrl_setup_main_link(ctrl, &training_step); +} + +int dp_ctrl_on_stream(struct dp_ctrl *dp_ctrl, bool force_link_train) +{ + int ret = 0; + bool mainlink_ready = false; + struct dp_ctrl_private *ctrl; + unsigned long pixel_rate; + unsigned long pixel_rate_orig; + + if (!dp_ctrl) + return -EINVAL; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + + pixel_rate = pixel_rate_orig = ctrl->panel->dp_mode.drm_mode.clock; + + if (dp_ctrl->wide_bus_en) + pixel_rate >>= 1; + + drm_dbg_dp(ctrl->drm_dev, "rate=%d, num_lanes=%d, pixel_rate=%lu\n", + ctrl->link->link_params.rate, + ctrl->link->link_params.num_lanes, pixel_rate); + + if (!dp_power_clk_status(ctrl->power, DP_CTRL_PM)) { /* link clk is off */ + ret = dp_ctrl_enable_mainlink_clocks(ctrl); + if (ret) { + DRM_ERROR("Failed to start link clocks. ret=%d\n", ret); + goto end; + } + } + + dp_ctrl_set_clock_rate(ctrl, DP_STREAM_PM, "stream_pixel", pixel_rate * 1000); + + ret = dp_power_clk_enable(ctrl->power, DP_STREAM_PM, true); + if (ret) { + DRM_ERROR("Unable to start pixel clocks. ret=%d\n", ret); + goto end; + } + + if (force_link_train || !dp_ctrl_channel_eq_ok(ctrl)) + dp_ctrl_link_retrain(ctrl); + + /* stop txing train pattern to end link training */ + dp_ctrl_clear_training_pattern(ctrl); + + /* + * Set up transfer unit values and set controller state to send + * video. + */ + reinit_completion(&ctrl->video_comp); + + dp_ctrl_configure_source_params(ctrl); + + dp_catalog_ctrl_config_msa(ctrl->catalog, + ctrl->link->link_params.rate, + pixel_rate_orig, dp_ctrl_use_fixed_nvid(ctrl)); + + dp_ctrl_setup_tr_unit(ctrl); + + dp_catalog_ctrl_state_ctrl(ctrl->catalog, DP_STATE_CTRL_SEND_VIDEO); + + ret = dp_ctrl_wait4video_ready(ctrl); + if (ret) + return ret; + + mainlink_ready = dp_catalog_ctrl_mainlink_ready(ctrl->catalog); + drm_dbg_dp(ctrl->drm_dev, + "mainlink %s\n", mainlink_ready ? "READY" : "NOT READY"); + +end: + return ret; +} + +int dp_ctrl_off_link_stream(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + struct dp_io *dp_io; + struct phy *phy; + int ret; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + /* set dongle to D3 (power off) mode */ + dp_link_psm_config(ctrl->link, &ctrl->panel->link_info, true); + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); + + if (dp_power_clk_status(ctrl->power, DP_STREAM_PM)) { + ret = dp_power_clk_enable(ctrl->power, DP_STREAM_PM, false); + if (ret) { + DRM_ERROR("Failed to disable pclk. ret=%d\n", ret); + return ret; + } + } + + dev_pm_opp_set_rate(ctrl->dev, 0); + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); + if (ret) { + DRM_ERROR("Failed to disable link clocks. ret=%d\n", ret); + return ret; + } + + phy_power_off(phy); + + /* aux channel down, reinit phy */ + phy_exit(phy); + phy_init(phy); + + drm_dbg_dp(ctrl->drm_dev, "phy=%p init=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); + return ret; +} + +int dp_ctrl_off_link(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + struct dp_io *dp_io; + struct phy *phy; + int ret; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); + + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); + if (ret) { + DRM_ERROR("Failed to disable link clocks. ret=%d\n", ret); + } + + DRM_DEBUG_DP("Before, phy=%p init_count=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); + + phy_power_off(phy); + + DRM_DEBUG_DP("After, phy=%p init_count=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); + + return ret; +} + +int dp_ctrl_off(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + struct dp_io *dp_io; + struct phy *phy; + int ret = 0; + + if (!dp_ctrl) + return -EINVAL; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + dp_io = &ctrl->parser->io; + phy = dp_io->phy; + + dp_catalog_ctrl_mainlink_ctrl(ctrl->catalog, false); + + dp_catalog_ctrl_reset(ctrl->catalog); + + ret = dp_power_clk_enable(ctrl->power, DP_STREAM_PM, false); + if (ret) + DRM_ERROR("Failed to disable pixel clocks. ret=%d\n", ret); + + dev_pm_opp_set_rate(ctrl->dev, 0); + ret = dp_power_clk_enable(ctrl->power, DP_CTRL_PM, false); + if (ret) { + DRM_ERROR("Failed to disable link clocks. ret=%d\n", ret); + } + + phy_power_off(phy); + drm_dbg_dp(ctrl->drm_dev, "phy=%p init=%d power_on=%d\n", + phy, phy->init_count, phy->power_count); + + return ret; +} + +void dp_ctrl_isr(struct dp_ctrl *dp_ctrl) +{ + struct dp_ctrl_private *ctrl; + u32 isr; + + if (!dp_ctrl) + return; + + ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl); + + isr = dp_catalog_ctrl_get_interrupt(ctrl->catalog); + + if (isr & DP_CTRL_INTR_READY_FOR_VIDEO) { + drm_dbg_dp(ctrl->drm_dev, "dp_video_ready\n"); + complete(&ctrl->video_comp); + } + + if (isr & DP_CTRL_INTR_IDLE_PATTERN_SENT) { + drm_dbg_dp(ctrl->drm_dev, "idle_patterns_sent\n"); + complete(&ctrl->idle_comp); + } +} + +struct dp_ctrl *dp_ctrl_get(struct device *dev, struct dp_link *link, + struct dp_panel *panel, struct drm_dp_aux *aux, + struct dp_power *power, struct dp_catalog *catalog, + struct dp_parser *parser) +{ + struct dp_ctrl_private *ctrl; + int ret; + + if (!dev || !panel || !aux || + !link || !catalog) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-EINVAL); + } + + ctrl = devm_kzalloc(dev, sizeof(*ctrl), GFP_KERNEL); + if (!ctrl) { + DRM_ERROR("Mem allocation failure\n"); + return ERR_PTR(-ENOMEM); + } + + ret = devm_pm_opp_set_clkname(dev, "ctrl_link"); + if (ret) { + dev_err(dev, "invalid DP OPP table in device tree\n"); + /* caller do PTR_ERR(opp_table) */ + return (struct dp_ctrl *)ERR_PTR(ret); + } + + /* OPP table is optional */ + ret = devm_pm_opp_of_add_table(dev); + if (ret) + dev_err(dev, "failed to add DP OPP table\n"); + + init_completion(&ctrl->idle_comp); + init_completion(&ctrl->video_comp); + + /* in parameters */ + ctrl->parser = parser; + ctrl->panel = panel; + ctrl->power = power; + ctrl->aux = aux; + ctrl->link = link; + ctrl->catalog = catalog; + ctrl->dev = dev; + + return &ctrl->dp_ctrl; +} diff --git a/drivers/gpu/drm/msm/dp/dp_ctrl.h b/drivers/gpu/drm/msm/dp/dp_ctrl.h new file mode 100644 index 000000000..9f29734af --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_ctrl.h @@ -0,0 +1,40 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_CTRL_H_ +#define _DP_CTRL_H_ + +#include "dp_aux.h" +#include "dp_panel.h" +#include "dp_link.h" +#include "dp_parser.h" +#include "dp_power.h" +#include "dp_catalog.h" + +struct dp_ctrl { + bool orientation; + atomic_t aborted; + bool wide_bus_en; +}; + +int dp_ctrl_on_link(struct dp_ctrl *dp_ctrl); +int dp_ctrl_on_stream(struct dp_ctrl *dp_ctrl, bool force_link_train); +int dp_ctrl_off_link_stream(struct dp_ctrl *dp_ctrl); +int dp_ctrl_off_link(struct dp_ctrl *dp_ctrl); +int dp_ctrl_off(struct dp_ctrl *dp_ctrl); +void dp_ctrl_push_idle(struct dp_ctrl *dp_ctrl); +void dp_ctrl_isr(struct dp_ctrl *dp_ctrl); +void dp_ctrl_handle_sink_request(struct dp_ctrl *dp_ctrl); +struct dp_ctrl *dp_ctrl_get(struct device *dev, struct dp_link *link, + struct dp_panel *panel, struct drm_dp_aux *aux, + struct dp_power *power, struct dp_catalog *catalog, + struct dp_parser *parser); + +void dp_ctrl_reset_irq_ctrl(struct dp_ctrl *dp_ctrl, bool enable); +void dp_ctrl_phy_init(struct dp_ctrl *dp_ctrl); +void dp_ctrl_phy_exit(struct dp_ctrl *dp_ctrl); +void dp_ctrl_irq_phy_exit(struct dp_ctrl *dp_ctrl); + +#endif /* _DP_CTRL_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_debug.c b/drivers/gpu/drm/msm/dp/dp_debug.c new file mode 100644 index 000000000..5e35033ba --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_debug.c @@ -0,0 +1,300 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt)"[drm-dp] %s: " fmt, __func__ + +#include <linux/debugfs.h> +#include <drm/drm_connector.h> +#include <drm/drm_file.h> + +#include "dp_parser.h" +#include "dp_catalog.h" +#include "dp_aux.h" +#include "dp_ctrl.h" +#include "dp_debug.h" +#include "dp_display.h" + +#define DEBUG_NAME "msm_dp" + +struct dp_debug_private { + struct dentry *root; + + struct dp_usbpd *usbpd; + struct dp_link *link; + struct dp_panel *panel; + struct drm_connector *connector; + struct device *dev; + struct drm_device *drm_dev; + + struct dp_debug dp_debug; +}; + +static int dp_debug_show(struct seq_file *seq, void *p) +{ + struct dp_debug_private *debug = seq->private; + u64 lclk = 0; + u32 link_params_rate; + const struct drm_display_mode *drm_mode; + + if (!debug) + return -ENODEV; + + drm_mode = &debug->panel->dp_mode.drm_mode; + + seq_printf(seq, "\tname = %s\n", DEBUG_NAME); + seq_printf(seq, "\tdrm_dp_link\n\t\trate = %u\n", + debug->panel->link_info.rate); + seq_printf(seq, "\t\tnum_lanes = %u\n", + debug->panel->link_info.num_lanes); + seq_printf(seq, "\t\tcapabilities = %lu\n", + debug->panel->link_info.capabilities); + seq_printf(seq, "\tdp_panel_info:\n\t\tactive = %dx%d\n", + drm_mode->hdisplay, + drm_mode->vdisplay); + seq_printf(seq, "\t\tback_porch = %dx%d\n", + drm_mode->htotal - drm_mode->hsync_end, + drm_mode->vtotal - drm_mode->vsync_end); + seq_printf(seq, "\t\tfront_porch = %dx%d\n", + drm_mode->hsync_start - drm_mode->hdisplay, + drm_mode->vsync_start - drm_mode->vdisplay); + seq_printf(seq, "\t\tsync_width = %dx%d\n", + drm_mode->hsync_end - drm_mode->hsync_start, + drm_mode->vsync_end - drm_mode->vsync_start); + seq_printf(seq, "\t\tactive_low = %dx%d\n", + debug->panel->dp_mode.h_active_low, + debug->panel->dp_mode.v_active_low); + seq_printf(seq, "\t\th_skew = %d\n", + drm_mode->hskew); + seq_printf(seq, "\t\trefresh rate = %d\n", + drm_mode_vrefresh(drm_mode)); + seq_printf(seq, "\t\tpixel clock khz = %d\n", + drm_mode->clock); + seq_printf(seq, "\t\tbpp = %d\n", + debug->panel->dp_mode.bpp); + + /* Link Information */ + seq_printf(seq, "\tdp_link:\n\t\ttest_requested = %d\n", + debug->link->sink_request); + seq_printf(seq, "\t\tnum_lanes = %d\n", + debug->link->link_params.num_lanes); + link_params_rate = debug->link->link_params.rate; + seq_printf(seq, "\t\tbw_code = %d\n", + drm_dp_link_rate_to_bw_code(link_params_rate)); + lclk = debug->link->link_params.rate * 1000; + seq_printf(seq, "\t\tlclk = %lld\n", lclk); + seq_printf(seq, "\t\tv_level = %d\n", + debug->link->phy_params.v_level); + seq_printf(seq, "\t\tp_level = %d\n", + debug->link->phy_params.p_level); + + return 0; +} +DEFINE_SHOW_ATTRIBUTE(dp_debug); + +static int dp_test_data_show(struct seq_file *m, void *data) +{ + const struct dp_debug_private *debug = m->private; + const struct drm_connector *connector = debug->connector; + u32 bpc; + + if (connector->status == connector_status_connected) { + bpc = debug->link->test_video.test_bit_depth; + seq_printf(m, "hdisplay: %d\n", + debug->link->test_video.test_h_width); + seq_printf(m, "vdisplay: %d\n", + debug->link->test_video.test_v_height); + seq_printf(m, "bpc: %u\n", + dp_link_bit_depth_to_bpc(bpc)); + } else { + seq_puts(m, "0"); + } + + return 0; +} +DEFINE_SHOW_ATTRIBUTE(dp_test_data); + +static int dp_test_type_show(struct seq_file *m, void *data) +{ + const struct dp_debug_private *debug = m->private; + const struct drm_connector *connector = debug->connector; + + if (connector->status == connector_status_connected) + seq_printf(m, "%02x", DP_TEST_LINK_VIDEO_PATTERN); + else + seq_puts(m, "0"); + + return 0; +} +DEFINE_SHOW_ATTRIBUTE(dp_test_type); + +static ssize_t dp_test_active_write(struct file *file, + const char __user *ubuf, + size_t len, loff_t *offp) +{ + char *input_buffer; + int status = 0; + const struct dp_debug_private *debug; + const struct drm_connector *connector; + int val = 0; + + debug = ((struct seq_file *)file->private_data)->private; + connector = debug->connector; + + if (len == 0) + return 0; + + input_buffer = memdup_user_nul(ubuf, len); + if (IS_ERR(input_buffer)) + return PTR_ERR(input_buffer); + + DRM_DEBUG_DRIVER("Copied %d bytes from user\n", (unsigned int)len); + + if (connector->status == connector_status_connected) { + status = kstrtoint(input_buffer, 10, &val); + if (status < 0) { + kfree(input_buffer); + return status; + } + DRM_DEBUG_DRIVER("Got %d for test active\n", val); + /* To prevent erroneous activation of the compliance + * testing code, only accept an actual value of 1 here + */ + if (val == 1) + debug->panel->video_test = true; + else + debug->panel->video_test = false; + } + kfree(input_buffer); + + *offp += len; + return len; +} + +static int dp_test_active_show(struct seq_file *m, void *data) +{ + struct dp_debug_private *debug = m->private; + struct drm_connector *connector = debug->connector; + + if (connector->status == connector_status_connected) { + if (debug->panel->video_test) + seq_puts(m, "1"); + else + seq_puts(m, "0"); + } else { + seq_puts(m, "0"); + } + + return 0; +} + +static int dp_test_active_open(struct inode *inode, + struct file *file) +{ + return single_open(file, dp_test_active_show, + inode->i_private); +} + +static const struct file_operations test_active_fops = { + .owner = THIS_MODULE, + .open = dp_test_active_open, + .read = seq_read, + .llseek = seq_lseek, + .release = single_release, + .write = dp_test_active_write +}; + +static void dp_debug_init(struct dp_debug *dp_debug, struct drm_minor *minor) +{ + char path[64]; + struct dp_debug_private *debug = container_of(dp_debug, + struct dp_debug_private, dp_debug); + + snprintf(path, sizeof(path), "msm_dp-%s", debug->connector->name); + + debug->root = debugfs_create_dir(path, minor->debugfs_root); + + debugfs_create_file("dp_debug", 0444, debug->root, + debug, &dp_debug_fops); + + debugfs_create_file("msm_dp_test_active", 0444, + debug->root, + debug, &test_active_fops); + + debugfs_create_file("msm_dp_test_data", 0444, + debug->root, + debug, &dp_test_data_fops); + + debugfs_create_file("msm_dp_test_type", 0444, + debug->root, + debug, &dp_test_type_fops); +} + +struct dp_debug *dp_debug_get(struct device *dev, struct dp_panel *panel, + struct dp_usbpd *usbpd, struct dp_link *link, + struct drm_connector *connector, struct drm_minor *minor) +{ + struct dp_debug_private *debug; + struct dp_debug *dp_debug; + int rc; + + if (!dev || !panel || !usbpd || !link) { + DRM_ERROR("invalid input\n"); + rc = -EINVAL; + goto error; + } + + debug = devm_kzalloc(dev, sizeof(*debug), GFP_KERNEL); + if (!debug) { + rc = -ENOMEM; + goto error; + } + + debug->dp_debug.debug_en = false; + debug->usbpd = usbpd; + debug->link = link; + debug->panel = panel; + debug->dev = dev; + debug->drm_dev = minor->dev; + debug->connector = connector; + + dp_debug = &debug->dp_debug; + dp_debug->vdisplay = 0; + dp_debug->hdisplay = 0; + dp_debug->vrefresh = 0; + + dp_debug_init(dp_debug, minor); + + return dp_debug; + error: + return ERR_PTR(rc); +} + +static int dp_debug_deinit(struct dp_debug *dp_debug) +{ + struct dp_debug_private *debug; + + if (!dp_debug) + return -EINVAL; + + debug = container_of(dp_debug, struct dp_debug_private, dp_debug); + + debugfs_remove_recursive(debug->root); + + return 0; +} + +void dp_debug_put(struct dp_debug *dp_debug) +{ + struct dp_debug_private *debug; + + if (!dp_debug) + return; + + debug = container_of(dp_debug, struct dp_debug_private, dp_debug); + + dp_debug_deinit(dp_debug); + + devm_kfree(debug->dev, debug); +} diff --git a/drivers/gpu/drm/msm/dp/dp_debug.h b/drivers/gpu/drm/msm/dp/dp_debug.h new file mode 100644 index 000000000..8c0d0b517 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_debug.h @@ -0,0 +1,74 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_DEBUG_H_ +#define _DP_DEBUG_H_ + +#include "dp_panel.h" +#include "dp_link.h" + +/** + * struct dp_debug + * @debug_en: specifies whether debug mode enabled + * @vdisplay: used to filter out vdisplay value + * @hdisplay: used to filter out hdisplay value + * @vrefresh: used to filter out vrefresh value + * @tpg_state: specifies whether tpg feature is enabled + */ +struct dp_debug { + bool debug_en; + int aspect_ratio; + int vdisplay; + int hdisplay; + int vrefresh; +}; + +#if defined(CONFIG_DEBUG_FS) + +/** + * dp_debug_get() - configure and get the DisplayPlot debug module data + * + * @dev: device instance of the caller + * @panel: instance of panel module + * @usbpd: instance of usbpd module + * @link: instance of link module + * @connector: double pointer to display connector + * @minor: pointer to drm minor number after device registration + * return: pointer to allocated debug module data + * + * This function sets up the debug module and provides a way + * for debugfs input to be communicated with existing modules + */ +struct dp_debug *dp_debug_get(struct device *dev, struct dp_panel *panel, + struct dp_usbpd *usbpd, struct dp_link *link, + struct drm_connector *connector, + struct drm_minor *minor); + +/** + * dp_debug_put() + * + * Cleans up dp_debug instance + * + * @dp_debug: instance of dp_debug + */ +void dp_debug_put(struct dp_debug *dp_debug); + +#else + +static inline +struct dp_debug *dp_debug_get(struct device *dev, struct dp_panel *panel, + struct dp_usbpd *usbpd, struct dp_link *link, + struct drm_connector *connector, struct drm_minor *minor) +{ + return ERR_PTR(-EINVAL); +} + +static inline void dp_debug_put(struct dp_debug *dp_debug) +{ +} + +#endif /* defined(CONFIG_DEBUG_FS) */ + +#endif /* _DP_DEBUG_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_display.c b/drivers/gpu/drm/msm/dp/dp_display.c new file mode 100644 index 000000000..d16c12351 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_display.c @@ -0,0 +1,1784 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/uaccess.h> +#include <linux/debugfs.h> +#include <linux/component.h> +#include <linux/of_irq.h> +#include <linux/delay.h> +#include <drm/display/drm_dp_aux_bus.h> + +#include "msm_drv.h" +#include "msm_kms.h" +#include "dp_hpd.h" +#include "dp_parser.h" +#include "dp_power.h" +#include "dp_catalog.h" +#include "dp_aux.h" +#include "dp_reg.h" +#include "dp_link.h" +#include "dp_panel.h" +#include "dp_ctrl.h" +#include "dp_display.h" +#include "dp_drm.h" +#include "dp_audio.h" +#include "dp_debug.h" + +#define HPD_STRING_SIZE 30 + +enum { + ISR_DISCONNECTED, + ISR_CONNECT_PENDING, + ISR_CONNECTED, + ISR_HPD_REPLUG_COUNT, + ISR_IRQ_HPD_PULSE_COUNT, + ISR_HPD_LO_GLITH_COUNT, +}; + +/* event thread connection state */ +enum { + ST_DISCONNECTED, + ST_MAINLINK_READY, + ST_CONNECTED, + ST_DISCONNECT_PENDING, + ST_DISPLAY_OFF, + ST_SUSPENDED, +}; + +enum { + EV_NO_EVENT, + /* hpd events */ + EV_HPD_INIT_SETUP, + EV_HPD_PLUG_INT, + EV_IRQ_HPD_INT, + EV_HPD_UNPLUG_INT, + EV_USER_NOTIFICATION, +}; + +#define EVENT_TIMEOUT (HZ/10) /* 100ms */ +#define DP_EVENT_Q_MAX 8 + +#define DP_TIMEOUT_NONE 0 + +#define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2) + +struct dp_event { + u32 event_id; + u32 data; + u32 delay; +}; + +struct dp_display_private { + char *name; + int irq; + + unsigned int id; + + /* state variables */ + bool core_initialized; + bool phy_initialized; + bool hpd_irq_on; + bool audio_supported; + + struct drm_device *drm_dev; + struct platform_device *pdev; + struct dentry *root; + + struct dp_usbpd *usbpd; + struct dp_parser *parser; + struct dp_power *power; + struct dp_catalog *catalog; + struct drm_dp_aux *aux; + struct dp_link *link; + struct dp_panel *panel; + struct dp_ctrl *ctrl; + struct dp_debug *debug; + + struct dp_usbpd_cb usbpd_cb; + struct dp_display_mode dp_mode; + struct msm_dp dp_display; + + /* wait for audio signaling */ + struct completion audio_comp; + + /* event related only access by event thread */ + struct mutex event_mutex; + wait_queue_head_t event_q; + u32 hpd_state; + u32 event_pndx; + u32 event_gndx; + struct task_struct *ev_tsk; + struct dp_event event_list[DP_EVENT_Q_MAX]; + spinlock_t event_lock; + + bool wide_bus_en; + + struct dp_audio *audio; +}; + +struct msm_dp_desc { + phys_addr_t io_start; + unsigned int connector_type; + bool wide_bus_en; +}; + +struct msm_dp_config { + const struct msm_dp_desc *descs; + size_t num_descs; +}; + +static const struct msm_dp_desc sc7180_dp_descs[] = { + [MSM_DP_CONTROLLER_0] = { .io_start = 0x0ae90000, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, +}; + +static const struct msm_dp_config sc7180_dp_cfg = { + .descs = sc7180_dp_descs, + .num_descs = ARRAY_SIZE(sc7180_dp_descs), +}; + +static const struct msm_dp_desc sc7280_dp_descs[] = { + [MSM_DP_CONTROLLER_0] = { .io_start = 0x0ae90000, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, + [MSM_DP_CONTROLLER_1] = { .io_start = 0x0aea0000, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, +}; + +static const struct msm_dp_config sc7280_dp_cfg = { + .descs = sc7280_dp_descs, + .num_descs = ARRAY_SIZE(sc7280_dp_descs), +}; + +static const struct msm_dp_desc sc8180x_dp_descs[] = { + [MSM_DP_CONTROLLER_0] = { .io_start = 0x0ae90000, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, + [MSM_DP_CONTROLLER_1] = { .io_start = 0x0ae98000, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, + [MSM_DP_CONTROLLER_2] = { .io_start = 0x0ae9a000, .connector_type = DRM_MODE_CONNECTOR_eDP }, +}; + +static const struct msm_dp_config sc8180x_dp_cfg = { + .descs = sc8180x_dp_descs, + .num_descs = ARRAY_SIZE(sc8180x_dp_descs), +}; + +static const struct msm_dp_desc sm8350_dp_descs[] = { + [MSM_DP_CONTROLLER_0] = { .io_start = 0x0ae90000, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, +}; + +static const struct msm_dp_config sm8350_dp_cfg = { + .descs = sm8350_dp_descs, + .num_descs = ARRAY_SIZE(sm8350_dp_descs), +}; + +static const struct of_device_id dp_dt_match[] = { + { .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_cfg }, + { .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_cfg }, + { .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_cfg }, + { .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_cfg }, + { .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_cfg }, + { .compatible = "qcom,sm8350-dp", .data = &sm8350_dp_cfg }, + {} +}; + +static struct dp_display_private *dev_get_dp_display_private(struct device *dev) +{ + struct msm_dp *dp = dev_get_drvdata(dev); + + return container_of(dp, struct dp_display_private, dp_display); +} + +static int dp_add_event(struct dp_display_private *dp_priv, u32 event, + u32 data, u32 delay) +{ + unsigned long flag; + struct dp_event *todo; + int pndx; + + spin_lock_irqsave(&dp_priv->event_lock, flag); + pndx = dp_priv->event_pndx + 1; + pndx %= DP_EVENT_Q_MAX; + if (pndx == dp_priv->event_gndx) { + pr_err("event_q is full: pndx=%d gndx=%d\n", + dp_priv->event_pndx, dp_priv->event_gndx); + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + return -EPERM; + } + todo = &dp_priv->event_list[dp_priv->event_pndx++]; + dp_priv->event_pndx %= DP_EVENT_Q_MAX; + todo->event_id = event; + todo->data = data; + todo->delay = delay; + wake_up(&dp_priv->event_q); + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + + return 0; +} + +static int dp_del_event(struct dp_display_private *dp_priv, u32 event) +{ + unsigned long flag; + struct dp_event *todo; + u32 gndx; + + spin_lock_irqsave(&dp_priv->event_lock, flag); + if (dp_priv->event_pndx == dp_priv->event_gndx) { + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + return -ENOENT; + } + + gndx = dp_priv->event_gndx; + while (dp_priv->event_pndx != gndx) { + todo = &dp_priv->event_list[gndx]; + if (todo->event_id == event) { + todo->event_id = EV_NO_EVENT; /* deleted */ + todo->delay = 0; + } + gndx++; + gndx %= DP_EVENT_Q_MAX; + } + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + + return 0; +} + +void dp_display_signal_audio_start(struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + reinit_completion(&dp->audio_comp); +} + +void dp_display_signal_audio_complete(struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + complete_all(&dp->audio_comp); +} + +static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv); + +static int dp_display_bind(struct device *dev, struct device *master, + void *data) +{ + int rc = 0; + struct dp_display_private *dp = dev_get_dp_display_private(dev); + struct msm_drm_private *priv = dev_get_drvdata(master); + struct drm_device *drm = priv->dev; + + dp->dp_display.drm_dev = drm; + priv->dp[dp->id] = &dp->dp_display; + + rc = dp->parser->parse(dp->parser); + if (rc) { + DRM_ERROR("device tree parsing failed\n"); + goto end; + } + + + dp->drm_dev = drm; + dp->aux->drm_dev = drm; + rc = dp_aux_register(dp->aux); + if (rc) { + DRM_ERROR("DRM DP AUX register failed\n"); + goto end; + } + + rc = dp_power_client_init(dp->power); + if (rc) { + DRM_ERROR("Power client create failed\n"); + goto end; + } + + rc = dp_register_audio_driver(dev, dp->audio); + if (rc) { + DRM_ERROR("Audio registration Dp failed\n"); + goto end; + } + + rc = dp_hpd_event_thread_start(dp); + if (rc) { + DRM_ERROR("Event thread create failed\n"); + goto end; + } + + return 0; +end: + return rc; +} + +static void dp_display_unbind(struct device *dev, struct device *master, + void *data) +{ + struct dp_display_private *dp = dev_get_dp_display_private(dev); + struct msm_drm_private *priv = dev_get_drvdata(master); + + /* disable all HPD interrupts */ + if (dp->core_initialized) + dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false); + + kthread_stop(dp->ev_tsk); + + dp_power_client_deinit(dp->power); + dp_unregister_audio_driver(dev, dp->audio); + dp_aux_unregister(dp->aux); + dp->drm_dev = NULL; + dp->aux->drm_dev = NULL; + priv->dp[dp->id] = NULL; +} + +static const struct component_ops dp_display_comp_ops = { + .bind = dp_display_bind, + .unbind = dp_display_unbind, +}; + +static bool dp_display_is_ds_bridge(struct dp_panel *panel) +{ + return (panel->dpcd[DP_DOWNSTREAMPORT_PRESENT] & + DP_DWN_STRM_PORT_PRESENT); +} + +static bool dp_display_is_sink_count_zero(struct dp_display_private *dp) +{ + drm_dbg_dp(dp->drm_dev, "present=%#x sink_count=%d\n", + dp->panel->dpcd[DP_DOWNSTREAMPORT_PRESENT], + dp->link->sink_count); + return dp_display_is_ds_bridge(dp->panel) && + (dp->link->sink_count == 0); +} + +static void dp_display_send_hpd_event(struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + struct drm_connector *connector; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + connector = dp->dp_display.connector; + drm_helper_hpd_irq_event(connector->dev); +} + + +static int dp_display_send_hpd_notification(struct dp_display_private *dp, + bool hpd) +{ + if ((hpd && dp->dp_display.is_connected) || + (!hpd && !dp->dp_display.is_connected)) { + drm_dbg_dp(dp->drm_dev, "HPD already %s\n", + (hpd ? "on" : "off")); + return 0; + } + + /* reset video pattern flag on disconnect */ + if (!hpd) + dp->panel->video_test = false; + + dp->dp_display.is_connected = hpd; + + drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n", + dp->dp_display.connector_type, hpd); + dp_display_send_hpd_event(&dp->dp_display); + + return 0; +} + +static int dp_display_process_hpd_high(struct dp_display_private *dp) +{ + int rc = 0; + struct edid *edid; + + dp->panel->max_dp_lanes = dp->parser->max_dp_lanes; + + rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector); + if (rc) + goto end; + + dp_link_process_request(dp->link); + + edid = dp->panel->edid; + + dp->audio_supported = drm_detect_monitor_audio(edid); + dp_panel_handle_sink_request(dp->panel); + + dp->dp_display.max_dp_lanes = dp->parser->max_dp_lanes; + + /* + * set sink to normal operation mode -- D0 + * before dpcd read + */ + dp_link_psm_config(dp->link, &dp->panel->link_info, false); + + dp_link_reset_phy_params_vx_px(dp->link); + rc = dp_ctrl_on_link(dp->ctrl); + if (rc) { + DRM_ERROR("failed to complete DP link training\n"); + goto end; + } + + dp_add_event(dp, EV_USER_NOTIFICATION, true, 0); + +end: + return rc; +} + +static void dp_display_host_phy_init(struct dp_display_private *dp) +{ + drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized); + + if (!dp->phy_initialized) { + dp_ctrl_phy_init(dp->ctrl); + dp->phy_initialized = true; + } +} + +static void dp_display_host_phy_exit(struct dp_display_private *dp) +{ + drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized); + + if (dp->phy_initialized) { + dp_ctrl_phy_exit(dp->ctrl); + dp->phy_initialized = false; + } +} + +static void dp_display_host_init(struct dp_display_private *dp) +{ + drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized); + + dp_power_init(dp->power, false); + dp_ctrl_reset_irq_ctrl(dp->ctrl, true); + dp_aux_init(dp->aux); + dp->core_initialized = true; +} + +static void dp_display_host_deinit(struct dp_display_private *dp) +{ + drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized); + + dp_ctrl_reset_irq_ctrl(dp->ctrl, false); + dp_aux_deinit(dp->aux); + dp_power_deinit(dp->power); + dp->core_initialized = false; +} + +static int dp_display_usbpd_configure_cb(struct device *dev) +{ + struct dp_display_private *dp = dev_get_dp_display_private(dev); + + dp_display_host_phy_init(dp); + + return dp_display_process_hpd_high(dp); +} + +static int dp_display_usbpd_disconnect_cb(struct device *dev) +{ + return 0; +} + +static int dp_display_notify_disconnect(struct device *dev) +{ + struct dp_display_private *dp = dev_get_dp_display_private(dev); + + dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); + + return 0; +} + +static void dp_display_handle_video_request(struct dp_display_private *dp) +{ + if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) { + dp->panel->video_test = true; + dp_link_send_test_response(dp->link); + } +} + +static int dp_display_handle_port_ststus_changed(struct dp_display_private *dp) +{ + int rc = 0; + + if (dp_display_is_sink_count_zero(dp)) { + drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n"); + if (dp->hpd_state != ST_DISCONNECTED) { + dp->hpd_state = ST_DISCONNECT_PENDING; + dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); + } + } else { + if (dp->hpd_state == ST_DISCONNECTED) { + dp->hpd_state = ST_MAINLINK_READY; + rc = dp_display_process_hpd_high(dp); + if (rc) + dp->hpd_state = ST_DISCONNECTED; + } + } + + return rc; +} + +static int dp_display_handle_irq_hpd(struct dp_display_private *dp) +{ + u32 sink_request = dp->link->sink_request; + + drm_dbg_dp(dp->drm_dev, "%d\n", sink_request); + if (dp->hpd_state == ST_DISCONNECTED) { + if (sink_request & DP_LINK_STATUS_UPDATED) { + drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n", + sink_request); + DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n"); + return -EINVAL; + } + } + + dp_ctrl_handle_sink_request(dp->ctrl); + + if (sink_request & DP_TEST_LINK_VIDEO_PATTERN) + dp_display_handle_video_request(dp); + + return 0; +} + +static int dp_display_usbpd_attention_cb(struct device *dev) +{ + int rc = 0; + u32 sink_request; + struct dp_display_private *dp = dev_get_dp_display_private(dev); + + /* check for any test request issued by sink */ + rc = dp_link_process_request(dp->link); + if (!rc) { + sink_request = dp->link->sink_request; + drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n", + dp->hpd_state, sink_request); + if (sink_request & DS_PORT_STATUS_CHANGED) + rc = dp_display_handle_port_ststus_changed(dp); + else + rc = dp_display_handle_irq_hpd(dp); + } + + return rc; +} + +static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data) +{ + struct dp_usbpd *hpd = dp->usbpd; + u32 state; + int ret; + + if (!hpd) + return 0; + + mutex_lock(&dp->event_mutex); + + state = dp->hpd_state; + drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + + if (state == ST_DISPLAY_OFF || state == ST_SUSPENDED) { + mutex_unlock(&dp->event_mutex); + return 0; + } + + if (state == ST_MAINLINK_READY || state == ST_CONNECTED) { + mutex_unlock(&dp->event_mutex); + return 0; + } + + if (state == ST_DISCONNECT_PENDING) { + /* wait until ST_DISCONNECTED */ + dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */ + mutex_unlock(&dp->event_mutex); + return 0; + } + + ret = dp_display_usbpd_configure_cb(&dp->pdev->dev); + if (ret) { /* link train failed */ + dp->hpd_state = ST_DISCONNECTED; + } else { + dp->hpd_state = ST_MAINLINK_READY; + } + + /* enable HDP irq_hpd/replug interrupt */ + dp_catalog_hpd_config_intr(dp->catalog, + DP_DP_IRQ_HPD_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true); + + drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + mutex_unlock(&dp->event_mutex); + + /* uevent will complete connection part */ + return 0; +}; + +static void dp_display_handle_plugged_change(struct msm_dp *dp_display, + bool plugged) +{ + struct dp_display_private *dp; + + dp = container_of(dp_display, + struct dp_display_private, dp_display); + + /* notify audio subsystem only if sink supports audio */ + if (dp_display->plugged_cb && dp_display->codec_dev && + dp->audio_supported) + dp_display->plugged_cb(dp_display->codec_dev, plugged); +} + +static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data) +{ + struct dp_usbpd *hpd = dp->usbpd; + u32 state; + + if (!hpd) + return 0; + + mutex_lock(&dp->event_mutex); + + state = dp->hpd_state; + + drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + + /* disable irq_hpd/replug interrupts */ + dp_catalog_hpd_config_intr(dp->catalog, + DP_DP_IRQ_HPD_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, false); + + /* unplugged, no more irq_hpd handle */ + dp_del_event(dp, EV_IRQ_HPD_INT); + + if (state == ST_DISCONNECTED) { + /* triggered by irq_hdp with sink_count = 0 */ + if (dp->link->sink_count == 0) { + dp_display_host_phy_exit(dp); + } + dp_display_notify_disconnect(&dp->pdev->dev); + mutex_unlock(&dp->event_mutex); + return 0; + } else if (state == ST_DISCONNECT_PENDING) { + mutex_unlock(&dp->event_mutex); + return 0; + } else if (state == ST_MAINLINK_READY) { + dp_ctrl_off_link(dp->ctrl); + dp_display_host_phy_exit(dp); + dp->hpd_state = ST_DISCONNECTED; + dp_display_notify_disconnect(&dp->pdev->dev); + mutex_unlock(&dp->event_mutex); + return 0; + } + + /* disable HPD plug interrupts */ + dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_PLUG_INT_MASK, false); + + /* + * We don't need separate work for disconnect as + * connect/attention interrupts are disabled + */ + dp_display_notify_disconnect(&dp->pdev->dev); + + if (state == ST_DISPLAY_OFF) { + dp->hpd_state = ST_DISCONNECTED; + } else { + dp->hpd_state = ST_DISCONNECT_PENDING; + } + + /* signal the disconnect event early to ensure proper teardown */ + dp_display_handle_plugged_change(&dp->dp_display, false); + + /* enable HDP plug interrupt to prepare for next plugin */ + if (!dp->dp_display.is_edp) + dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_PLUG_INT_MASK, true); + + drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + + /* uevent will complete disconnection part */ + mutex_unlock(&dp->event_mutex); + return 0; +} + +static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data) +{ + u32 state; + + mutex_lock(&dp->event_mutex); + + /* irq_hpd can happen at either connected or disconnected state */ + state = dp->hpd_state; + drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + + if (state == ST_DISPLAY_OFF || state == ST_SUSPENDED) { + mutex_unlock(&dp->event_mutex); + return 0; + } + + if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) { + /* wait until ST_CONNECTED */ + dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */ + mutex_unlock(&dp->event_mutex); + return 0; + } + + dp_display_usbpd_attention_cb(&dp->pdev->dev); + + drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", + dp->dp_display.connector_type, state); + + mutex_unlock(&dp->event_mutex); + + return 0; +} + +static void dp_display_deinit_sub_modules(struct dp_display_private *dp) +{ + dp_debug_put(dp->debug); + dp_audio_put(dp->audio); + dp_panel_put(dp->panel); + dp_aux_put(dp->aux); +} + +static int dp_init_sub_modules(struct dp_display_private *dp) +{ + int rc = 0; + struct device *dev = &dp->pdev->dev; + struct dp_usbpd_cb *cb = &dp->usbpd_cb; + struct dp_panel_in panel_in = { + .dev = dev, + }; + + /* Callback APIs used for cable status change event */ + cb->configure = dp_display_usbpd_configure_cb; + cb->disconnect = dp_display_usbpd_disconnect_cb; + cb->attention = dp_display_usbpd_attention_cb; + + dp->usbpd = dp_hpd_get(dev, cb); + if (IS_ERR(dp->usbpd)) { + rc = PTR_ERR(dp->usbpd); + DRM_ERROR("failed to initialize hpd, rc = %d\n", rc); + dp->usbpd = NULL; + goto error; + } + + dp->parser = dp_parser_get(dp->pdev); + if (IS_ERR(dp->parser)) { + rc = PTR_ERR(dp->parser); + DRM_ERROR("failed to initialize parser, rc = %d\n", rc); + dp->parser = NULL; + goto error; + } + + dp->catalog = dp_catalog_get(dev, &dp->parser->io); + if (IS_ERR(dp->catalog)) { + rc = PTR_ERR(dp->catalog); + DRM_ERROR("failed to initialize catalog, rc = %d\n", rc); + dp->catalog = NULL; + goto error; + } + + dp->power = dp_power_get(dev, dp->parser); + if (IS_ERR(dp->power)) { + rc = PTR_ERR(dp->power); + DRM_ERROR("failed to initialize power, rc = %d\n", rc); + dp->power = NULL; + goto error; + } + + dp->aux = dp_aux_get(dev, dp->catalog, dp->dp_display.is_edp); + if (IS_ERR(dp->aux)) { + rc = PTR_ERR(dp->aux); + DRM_ERROR("failed to initialize aux, rc = %d\n", rc); + dp->aux = NULL; + goto error; + } + + dp->link = dp_link_get(dev, dp->aux); + if (IS_ERR(dp->link)) { + rc = PTR_ERR(dp->link); + DRM_ERROR("failed to initialize link, rc = %d\n", rc); + dp->link = NULL; + goto error_link; + } + + panel_in.aux = dp->aux; + panel_in.catalog = dp->catalog; + panel_in.link = dp->link; + + dp->panel = dp_panel_get(&panel_in); + if (IS_ERR(dp->panel)) { + rc = PTR_ERR(dp->panel); + DRM_ERROR("failed to initialize panel, rc = %d\n", rc); + dp->panel = NULL; + goto error_link; + } + + dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux, + dp->power, dp->catalog, dp->parser); + if (IS_ERR(dp->ctrl)) { + rc = PTR_ERR(dp->ctrl); + DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc); + dp->ctrl = NULL; + goto error_ctrl; + } + + dp->audio = dp_audio_get(dp->pdev, dp->panel, dp->catalog); + if (IS_ERR(dp->audio)) { + rc = PTR_ERR(dp->audio); + pr_err("failed to initialize audio, rc = %d\n", rc); + dp->audio = NULL; + goto error_ctrl; + } + + /* populate wide_bus_en to differernt layers */ + dp->ctrl->wide_bus_en = dp->wide_bus_en; + dp->catalog->wide_bus_en = dp->wide_bus_en; + + return rc; + +error_ctrl: + dp_panel_put(dp->panel); +error_link: + dp_aux_put(dp->aux); +error: + return rc; +} + +static int dp_display_set_mode(struct msm_dp *dp_display, + struct dp_display_mode *mode) +{ + struct dp_display_private *dp; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode); + dp->panel->dp_mode.bpp = mode->bpp; + dp->panel->dp_mode.capabilities = mode->capabilities; + dp_panel_init_panel_info(dp->panel); + return 0; +} + +static int dp_display_enable(struct dp_display_private *dp, bool force_link_train) +{ + int rc = 0; + struct msm_dp *dp_display = &dp->dp_display; + + drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count); + if (dp_display->power_on) { + drm_dbg_dp(dp->drm_dev, "Link already setup, return\n"); + return 0; + } + + rc = dp_ctrl_on_stream(dp->ctrl, force_link_train); + if (!rc) + dp_display->power_on = true; + + return rc; +} + +static int dp_display_post_enable(struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + u32 rate; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + rate = dp->link->link_params.rate; + + if (dp->audio_supported) { + dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate); + dp->audio->lane_count = dp->link->link_params.num_lanes; + } + + /* signal the connect event late to synchronize video and display */ + dp_display_handle_plugged_change(dp_display, true); + return 0; +} + +static int dp_display_disable(struct dp_display_private *dp) +{ + struct msm_dp *dp_display = &dp->dp_display; + + if (!dp_display->power_on) + return 0; + + /* wait only if audio was enabled */ + if (dp_display->audio_enabled) { + /* signal the disconnect event */ + dp_display_handle_plugged_change(dp_display, false); + if (!wait_for_completion_timeout(&dp->audio_comp, + HZ * 5)) + DRM_ERROR("audio comp timeout\n"); + } + + dp_display->audio_enabled = false; + + if (dp->link->sink_count == 0) { + /* + * irq_hpd with sink_count = 0 + * hdmi unplugged out of dongle + */ + dp_ctrl_off_link_stream(dp->ctrl); + } else { + /* + * unplugged interrupt + * dongle unplugged out of DUT + */ + dp_ctrl_off(dp->ctrl); + dp_display_host_phy_exit(dp); + } + + dp_display->power_on = false; + + drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count); + return 0; +} + +int dp_display_set_plugged_cb(struct msm_dp *dp_display, + hdmi_codec_plugged_cb fn, struct device *codec_dev) +{ + bool plugged; + + dp_display->plugged_cb = fn; + dp_display->codec_dev = codec_dev; + plugged = dp_display->is_connected; + dp_display_handle_plugged_change(dp_display, plugged); + + return 0; +} + +/** + * dp_bridge_mode_valid - callback to determine if specified mode is valid + * @bridge: Pointer to drm bridge structure + * @info: display info + * @mode: Pointer to drm mode structure + * Returns: Validity status for specified mode + */ +enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge, + const struct drm_display_info *info, + const struct drm_display_mode *mode) +{ + const u32 num_components = 3, default_bpp = 24; + struct dp_display_private *dp_display; + struct dp_link_info *link_info; + u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0; + struct msm_dp *dp; + int mode_pclk_khz = mode->clock; + + dp = to_dp_bridge(bridge)->dp_display; + + if (!dp || !mode_pclk_khz || !dp->connector) { + DRM_ERROR("invalid params\n"); + return -EINVAL; + } + + /* + * The eDP controller currently does not have a reliable way of + * enabling panel power to read sink capabilities. So, we rely + * on the panel driver to populate only supported modes for now. + */ + if (dp->is_edp) + return MODE_OK; + + if (mode->clock > DP_MAX_PIXEL_CLK_KHZ) + return MODE_CLOCK_HIGH; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + link_info = &dp_display->panel->link_info; + + mode_bpp = dp->connector->display_info.bpc * num_components; + if (!mode_bpp) + mode_bpp = default_bpp; + + mode_bpp = dp_panel_get_mode_bpp(dp_display->panel, + mode_bpp, mode_pclk_khz); + + mode_rate_khz = mode_pclk_khz * mode_bpp; + supported_rate_khz = link_info->num_lanes * link_info->rate * 8; + + if (mode_rate_khz > supported_rate_khz) + return MODE_BAD; + + return MODE_OK; +} + +int dp_display_get_modes(struct msm_dp *dp) +{ + struct dp_display_private *dp_display; + + if (!dp) { + DRM_ERROR("invalid params\n"); + return 0; + } + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + return dp_panel_get_modes(dp_display->panel, + dp->connector); +} + +bool dp_display_check_video_test(struct msm_dp *dp) +{ + struct dp_display_private *dp_display; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + return dp_display->panel->video_test; +} + +int dp_display_get_test_bpp(struct msm_dp *dp) +{ + struct dp_display_private *dp_display; + + if (!dp) { + DRM_ERROR("invalid params\n"); + return 0; + } + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + return dp_link_bit_depth_to_bpp( + dp_display->link->test_video.test_bit_depth); +} + +void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp) +{ + struct dp_display_private *dp_display; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + /* + * if we are reading registers we need the link clocks to be on + * however till DP cable is connected this will not happen as we + * do not know the resolution to power up with. Hence check the + * power_on status before dumping DP registers to avoid crash due + * to unclocked access + */ + mutex_lock(&dp_display->event_mutex); + + if (!dp->power_on) { + mutex_unlock(&dp_display->event_mutex); + return; + } + + dp_catalog_snapshot(dp_display->catalog, disp_state); + + mutex_unlock(&dp_display->event_mutex); +} + +static void dp_display_config_hpd(struct dp_display_private *dp) +{ + + dp_display_host_init(dp); + dp_catalog_ctrl_hpd_config(dp->catalog); + + /* Enable plug and unplug interrupts only for external DisplayPort */ + if (!dp->dp_display.is_edp) + dp_catalog_hpd_config_intr(dp->catalog, + DP_DP_HPD_PLUG_INT_MASK | + DP_DP_HPD_UNPLUG_INT_MASK, + true); + + /* Enable interrupt first time + * we are leaving dp clocks on during disconnect + * and never disable interrupt + */ + enable_irq(dp->irq); +} + +static int hpd_event_thread(void *data) +{ + struct dp_display_private *dp_priv; + unsigned long flag; + struct dp_event *todo; + int timeout_mode = 0; + + dp_priv = (struct dp_display_private *)data; + + while (1) { + if (timeout_mode) { + wait_event_timeout(dp_priv->event_q, + (dp_priv->event_pndx == dp_priv->event_gndx) || + kthread_should_stop(), EVENT_TIMEOUT); + } else { + wait_event_interruptible(dp_priv->event_q, + (dp_priv->event_pndx != dp_priv->event_gndx) || + kthread_should_stop()); + } + + if (kthread_should_stop()) + break; + + spin_lock_irqsave(&dp_priv->event_lock, flag); + todo = &dp_priv->event_list[dp_priv->event_gndx]; + if (todo->delay) { + struct dp_event *todo_next; + + dp_priv->event_gndx++; + dp_priv->event_gndx %= DP_EVENT_Q_MAX; + + /* re enter delay event into q */ + todo_next = &dp_priv->event_list[dp_priv->event_pndx++]; + dp_priv->event_pndx %= DP_EVENT_Q_MAX; + todo_next->event_id = todo->event_id; + todo_next->data = todo->data; + todo_next->delay = todo->delay - 1; + + /* clean up older event */ + todo->event_id = EV_NO_EVENT; + todo->delay = 0; + + /* switch to timeout mode */ + timeout_mode = 1; + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + continue; + } + + /* timeout with no events in q */ + if (dp_priv->event_pndx == dp_priv->event_gndx) { + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + continue; + } + + dp_priv->event_gndx++; + dp_priv->event_gndx %= DP_EVENT_Q_MAX; + timeout_mode = 0; + spin_unlock_irqrestore(&dp_priv->event_lock, flag); + + switch (todo->event_id) { + case EV_HPD_INIT_SETUP: + dp_display_config_hpd(dp_priv); + break; + case EV_HPD_PLUG_INT: + dp_hpd_plug_handle(dp_priv, todo->data); + break; + case EV_HPD_UNPLUG_INT: + dp_hpd_unplug_handle(dp_priv, todo->data); + break; + case EV_IRQ_HPD_INT: + dp_irq_hpd_handle(dp_priv, todo->data); + break; + case EV_USER_NOTIFICATION: + dp_display_send_hpd_notification(dp_priv, + todo->data); + break; + default: + break; + } + } + + return 0; +} + +static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv) +{ + /* set event q to empty */ + dp_priv->event_gndx = 0; + dp_priv->event_pndx = 0; + + dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler"); + if (IS_ERR(dp_priv->ev_tsk)) + return PTR_ERR(dp_priv->ev_tsk); + + return 0; +} + +static irqreturn_t dp_display_irq_handler(int irq, void *dev_id) +{ + struct dp_display_private *dp = dev_id; + irqreturn_t ret = IRQ_HANDLED; + u32 hpd_isr_status; + + if (!dp) { + DRM_ERROR("invalid data\n"); + return IRQ_NONE; + } + + hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog); + + if (hpd_isr_status & 0x0F) { + drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n", + dp->dp_display.connector_type, hpd_isr_status); + /* hpd related interrupts */ + if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK) + dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0); + + if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) { + dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0); + } + + if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) { + dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); + dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3); + } + + if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK) + dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); + } + + /* DP controller isr */ + dp_ctrl_isr(dp->ctrl); + + /* DP aux isr */ + dp_aux_isr(dp->aux); + + return ret; +} + +int dp_display_request_irq(struct msm_dp *dp_display) +{ + int rc = 0; + struct dp_display_private *dp; + + if (!dp_display) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + dp->irq = irq_of_parse_and_map(dp->pdev->dev.of_node, 0); + if (!dp->irq) { + DRM_ERROR("failed to get irq\n"); + return -EINVAL; + } + + rc = devm_request_irq(dp_display->drm_dev->dev, dp->irq, + dp_display_irq_handler, + IRQF_TRIGGER_HIGH, "dp_display_isr", dp); + if (rc < 0) { + DRM_ERROR("failed to request IRQ%u: %d\n", + dp->irq, rc); + return rc; + } + disable_irq(dp->irq); + + return 0; +} + +static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev, + unsigned int *id) +{ + const struct msm_dp_config *cfg = of_device_get_match_data(&pdev->dev); + struct resource *res; + int i; + + res = platform_get_resource(pdev, IORESOURCE_MEM, 0); + if (!res) + return NULL; + + for (i = 0; i < cfg->num_descs; i++) { + if (cfg->descs[i].io_start == res->start) { + *id = i; + return &cfg->descs[i]; + } + } + + dev_err(&pdev->dev, "unknown displayport instance\n"); + return NULL; +} + +static int dp_display_probe(struct platform_device *pdev) +{ + int rc = 0; + struct dp_display_private *dp; + const struct msm_dp_desc *desc; + + if (!pdev || !pdev->dev.of_node) { + DRM_ERROR("pdev not found\n"); + return -ENODEV; + } + + dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); + if (!dp) + return -ENOMEM; + + desc = dp_display_get_desc(pdev, &dp->id); + if (!desc) + return -EINVAL; + + dp->pdev = pdev; + dp->name = "drm_dp"; + dp->dp_display.connector_type = desc->connector_type; + dp->wide_bus_en = desc->wide_bus_en; + dp->dp_display.is_edp = + (dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP); + + rc = dp_init_sub_modules(dp); + if (rc) { + DRM_ERROR("init sub module failed\n"); + return -EPROBE_DEFER; + } + + /* setup event q */ + mutex_init(&dp->event_mutex); + init_waitqueue_head(&dp->event_q); + spin_lock_init(&dp->event_lock); + + /* Store DP audio handle inside DP display */ + dp->dp_display.dp_audio = dp->audio; + + init_completion(&dp->audio_comp); + + platform_set_drvdata(pdev, &dp->dp_display); + + rc = component_add(&pdev->dev, &dp_display_comp_ops); + if (rc) { + DRM_ERROR("component add failed, rc=%d\n", rc); + dp_display_deinit_sub_modules(dp); + } + + return rc; +} + +static int dp_display_remove(struct platform_device *pdev) +{ + struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev); + + component_del(&pdev->dev, &dp_display_comp_ops); + dp_display_deinit_sub_modules(dp); + + platform_set_drvdata(pdev, NULL); + + return 0; +} + +static int dp_pm_resume(struct device *dev) +{ + struct platform_device *pdev = to_platform_device(dev); + struct msm_dp *dp_display = platform_get_drvdata(pdev); + struct dp_display_private *dp; + int sink_count = 0; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + mutex_lock(&dp->event_mutex); + + drm_dbg_dp(dp->drm_dev, + "Before, type=%d core_inited=%d phy_inited=%d power_on=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized, dp_display->power_on); + + /* start from disconnected state */ + dp->hpd_state = ST_DISCONNECTED; + + /* turn on dp ctrl/phy */ + dp_display_host_init(dp); + + dp_catalog_ctrl_hpd_config(dp->catalog); + + + if (!dp->dp_display.is_edp) + dp_catalog_hpd_config_intr(dp->catalog, + DP_DP_HPD_PLUG_INT_MASK | + DP_DP_HPD_UNPLUG_INT_MASK, + true); + + if (dp_catalog_link_is_connected(dp->catalog)) { + /* + * set sink to normal operation mode -- D0 + * before dpcd read + */ + dp_display_host_phy_init(dp); + dp_link_psm_config(dp->link, &dp->panel->link_info, false); + sink_count = drm_dp_read_sink_count(dp->aux); + if (sink_count < 0) + sink_count = 0; + + dp_display_host_phy_exit(dp); + } + + dp->link->sink_count = sink_count; + /* + * can not declared display is connected unless + * HDMI cable is plugged in and sink_count of + * dongle become 1 + * also only signal audio when disconnected + */ + if (dp->link->sink_count) { + dp->dp_display.is_connected = true; + } else { + dp->dp_display.is_connected = false; + dp_display_handle_plugged_change(dp_display, false); + } + + drm_dbg_dp(dp->drm_dev, + "After, type=%d sink=%d conn=%d core_init=%d phy_init=%d power=%d\n", + dp->dp_display.connector_type, dp->link->sink_count, + dp->dp_display.is_connected, dp->core_initialized, + dp->phy_initialized, dp_display->power_on); + + mutex_unlock(&dp->event_mutex); + + return 0; +} + +static int dp_pm_suspend(struct device *dev) +{ + struct platform_device *pdev = to_platform_device(dev); + struct msm_dp *dp_display = platform_get_drvdata(pdev); + struct dp_display_private *dp; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + mutex_lock(&dp->event_mutex); + + drm_dbg_dp(dp->drm_dev, + "Before, type=%d core_inited=%d phy_inited=%d power_on=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized, dp_display->power_on); + + /* mainlink enabled */ + if (dp_power_clk_status(dp->power, DP_CTRL_PM)) + dp_ctrl_off_link_stream(dp->ctrl); + + dp_display_host_phy_exit(dp); + + /* host_init will be called at pm_resume */ + dp_display_host_deinit(dp); + + dp->hpd_state = ST_SUSPENDED; + + drm_dbg_dp(dp->drm_dev, + "After, type=%d core_inited=%d phy_inited=%d power_on=%d\n", + dp->dp_display.connector_type, dp->core_initialized, + dp->phy_initialized, dp_display->power_on); + + mutex_unlock(&dp->event_mutex); + + return 0; +} + +static const struct dev_pm_ops dp_pm_ops = { + .suspend = dp_pm_suspend, + .resume = dp_pm_resume, +}; + +static struct platform_driver dp_display_driver = { + .probe = dp_display_probe, + .remove = dp_display_remove, + .driver = { + .name = "msm-dp-display", + .of_match_table = dp_dt_match, + .suppress_bind_attrs = true, + .pm = &dp_pm_ops, + }, +}; + +int __init msm_dp_register(void) +{ + int ret; + + ret = platform_driver_register(&dp_display_driver); + if (ret) + DRM_ERROR("Dp display driver register failed"); + + return ret; +} + +void __exit msm_dp_unregister(void) +{ + platform_driver_unregister(&dp_display_driver); +} + +void msm_dp_irq_postinstall(struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + + if (!dp_display) + return; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + if (!dp_display->is_edp) + dp_add_event(dp, EV_HPD_INIT_SETUP, 0, 100); +} + +bool msm_dp_wide_bus_available(const struct msm_dp *dp_display) +{ + struct dp_display_private *dp; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + + return dp->wide_bus_en; +} + +void msm_dp_debugfs_init(struct msm_dp *dp_display, struct drm_minor *minor) +{ + struct dp_display_private *dp; + struct device *dev; + int rc; + + dp = container_of(dp_display, struct dp_display_private, dp_display); + dev = &dp->pdev->dev; + + dp->debug = dp_debug_get(dev, dp->panel, dp->usbpd, + dp->link, dp->dp_display.connector, + minor); + if (IS_ERR(dp->debug)) { + rc = PTR_ERR(dp->debug); + DRM_ERROR("failed to initialize debug, rc = %d\n", rc); + dp->debug = NULL; + } +} + +static void of_dp_aux_depopulate_bus_void(void *data) +{ + of_dp_aux_depopulate_bus(data); +} + +static int dp_display_get_next_bridge(struct msm_dp *dp) +{ + int rc; + struct dp_display_private *dp_priv; + struct device_node *aux_bus; + struct device *dev; + + dp_priv = container_of(dp, struct dp_display_private, dp_display); + dev = &dp_priv->pdev->dev; + aux_bus = of_get_child_by_name(dev->of_node, "aux-bus"); + + if (aux_bus && dp->is_edp) { + dp_display_host_init(dp_priv); + dp_catalog_ctrl_hpd_config(dp_priv->catalog); + dp_display_host_phy_init(dp_priv); + enable_irq(dp_priv->irq); + + /* + * The code below assumes that the panel will finish probing + * by the time devm_of_dp_aux_populate_ep_devices() returns. + * This isn't a great assumption since it will fail if the + * panel driver is probed asynchronously but is the best we + * can do without a bigger driver reorganization. + */ + rc = of_dp_aux_populate_bus(dp_priv->aux, NULL); + of_node_put(aux_bus); + if (rc) + goto error; + + rc = devm_add_action_or_reset(dp->drm_dev->dev, + of_dp_aux_depopulate_bus_void, + dp_priv->aux); + if (rc) + goto error; + } else if (dp->is_edp) { + DRM_ERROR("eDP aux_bus not found\n"); + return -ENODEV; + } + + /* + * External bridges are mandatory for eDP interfaces: one has to + * provide at least an eDP panel (which gets wrapped into panel-bridge). + * + * For DisplayPort interfaces external bridges are optional, so + * silently ignore an error if one is not present (-ENODEV). + */ + rc = devm_dp_parser_find_next_bridge(dp->drm_dev->dev, dp_priv->parser); + if (!dp->is_edp && rc == -ENODEV) + return 0; + + if (!rc) { + dp->next_bridge = dp_priv->parser->next_bridge; + return 0; + } + +error: + if (dp->is_edp) { + disable_irq(dp_priv->irq); + dp_display_host_phy_exit(dp_priv); + dp_display_host_deinit(dp_priv); + } + return rc; +} + +int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, + struct drm_encoder *encoder) +{ + struct msm_drm_private *priv; + struct dp_display_private *dp_priv; + int ret; + + if (WARN_ON(!encoder) || WARN_ON(!dp_display) || WARN_ON(!dev)) + return -EINVAL; + + priv = dev->dev_private; + + if (priv->num_bridges == ARRAY_SIZE(priv->bridges)) { + DRM_DEV_ERROR(dev->dev, "too many bridges\n"); + return -ENOSPC; + } + + dp_display->drm_dev = dev; + + dp_priv = container_of(dp_display, struct dp_display_private, dp_display); + + ret = dp_display_request_irq(dp_display); + if (ret) { + DRM_ERROR("request_irq failed, ret=%d\n", ret); + return ret; + } + + ret = dp_display_get_next_bridge(dp_display); + if (ret) + return ret; + + dp_display->bridge = dp_bridge_init(dp_display, dev, encoder); + if (IS_ERR(dp_display->bridge)) { + ret = PTR_ERR(dp_display->bridge); + DRM_DEV_ERROR(dev->dev, + "failed to create dp bridge: %d\n", ret); + dp_display->bridge = NULL; + return ret; + } + + priv->bridges[priv->num_bridges++] = dp_display->bridge; + + dp_display->connector = dp_drm_connector_init(dp_display, encoder); + if (IS_ERR(dp_display->connector)) { + ret = PTR_ERR(dp_display->connector); + DRM_DEV_ERROR(dev->dev, + "failed to create dp connector: %d\n", ret); + dp_display->connector = NULL; + return ret; + } + + dp_priv->panel->connector = dp_display->connector; + + return 0; +} + +void dp_bridge_enable(struct drm_bridge *drm_bridge) +{ + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); + struct msm_dp *dp = dp_bridge->dp_display; + int rc = 0; + struct dp_display_private *dp_display; + u32 state; + bool force_link_train = false; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + if (!dp_display->dp_mode.drm_mode.clock) { + DRM_ERROR("invalid params\n"); + return; + } + + if (dp->is_edp) + dp_hpd_plug_handle(dp_display, 0); + + mutex_lock(&dp_display->event_mutex); + + state = dp_display->hpd_state; + if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) { + mutex_unlock(&dp_display->event_mutex); + return; + } + + rc = dp_display_set_mode(dp, &dp_display->dp_mode); + if (rc) { + DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc); + mutex_unlock(&dp_display->event_mutex); + return; + } + + state = dp_display->hpd_state; + + if (state == ST_DISPLAY_OFF) { + dp_display_host_phy_init(dp_display); + force_link_train = true; + } + + dp_display_enable(dp_display, force_link_train); + + rc = dp_display_post_enable(dp); + if (rc) { + DRM_ERROR("DP display post enable failed, rc=%d\n", rc); + dp_display_disable(dp_display); + } + + /* completed connection */ + dp_display->hpd_state = ST_CONNECTED; + + drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); + mutex_unlock(&dp_display->event_mutex); +} + +void dp_bridge_disable(struct drm_bridge *drm_bridge) +{ + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); + struct msm_dp *dp = dp_bridge->dp_display; + struct dp_display_private *dp_display; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + dp_ctrl_push_idle(dp_display->ctrl); +} + +void dp_bridge_post_disable(struct drm_bridge *drm_bridge) +{ + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); + struct msm_dp *dp = dp_bridge->dp_display; + u32 state; + struct dp_display_private *dp_display; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + if (dp->is_edp) + dp_hpd_unplug_handle(dp_display, 0); + + mutex_lock(&dp_display->event_mutex); + + state = dp_display->hpd_state; + if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED) { + mutex_unlock(&dp_display->event_mutex); + return; + } + + dp_display_disable(dp_display); + + state = dp_display->hpd_state; + if (state == ST_DISCONNECT_PENDING) { + /* completed disconnection */ + dp_display->hpd_state = ST_DISCONNECTED; + } else { + dp_display->hpd_state = ST_DISPLAY_OFF; + } + + drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); + mutex_unlock(&dp_display->event_mutex); +} + +void dp_bridge_mode_set(struct drm_bridge *drm_bridge, + const struct drm_display_mode *mode, + const struct drm_display_mode *adjusted_mode) +{ + struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); + struct msm_dp *dp = dp_bridge->dp_display; + struct dp_display_private *dp_display; + + dp_display = container_of(dp, struct dp_display_private, dp_display); + + memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode)); + + if (dp_display_check_video_test(dp)) + dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp); + else /* Default num_components per pixel = 3 */ + dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3; + + if (!dp_display->dp_mode.bpp) + dp_display->dp_mode.bpp = 24; /* Default bpp */ + + drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode); + + dp_display->dp_mode.v_active_low = + !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC); + + dp_display->dp_mode.h_active_low = + !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC); +} diff --git a/drivers/gpu/drm/msm/dp/dp_display.h b/drivers/gpu/drm/msm/dp/dp_display.h new file mode 100644 index 000000000..dcedf021f --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_display.h @@ -0,0 +1,42 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_DISPLAY_H_ +#define _DP_DISPLAY_H_ + +#include "dp_panel.h" +#include <sound/hdmi-codec.h> +#include "disp/msm_disp_snapshot.h" + +struct msm_dp { + struct drm_device *drm_dev; + struct device *codec_dev; + struct drm_bridge *bridge; + struct drm_connector *connector; + struct drm_bridge *next_bridge; + bool is_connected; + bool audio_enabled; + bool power_on; + unsigned int connector_type; + bool is_edp; + + hdmi_codec_plugged_cb plugged_cb; + + bool wide_bus_en; + + u32 max_dp_lanes; + struct dp_audio *dp_audio; +}; + +int dp_display_set_plugged_cb(struct msm_dp *dp_display, + hdmi_codec_plugged_cb fn, struct device *codec_dev); +int dp_display_get_modes(struct msm_dp *dp_display); +int dp_display_request_irq(struct msm_dp *dp_display); +bool dp_display_check_video_test(struct msm_dp *dp_display); +int dp_display_get_test_bpp(struct msm_dp *dp_display); +void dp_display_signal_audio_start(struct msm_dp *dp_display); +void dp_display_signal_audio_complete(struct msm_dp *dp_display); + +#endif /* _DP_DISPLAY_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_drm.c b/drivers/gpu/drm/msm/dp/dp_drm.c new file mode 100644 index 000000000..6db82f9b0 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_drm.c @@ -0,0 +1,178 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#include <drm/drm_atomic_helper.h> +#include <drm/drm_atomic.h> +#include <drm/drm_bridge.h> +#include <drm/drm_bridge_connector.h> +#include <drm/drm_crtc.h> + +#include "msm_drv.h" +#include "msm_kms.h" +#include "dp_drm.h" + +/** + * dp_bridge_detect - callback to determine if connector is connected + * @bridge: Pointer to drm bridge structure + * Returns: Bridge's 'is connected' status + */ +static enum drm_connector_status dp_bridge_detect(struct drm_bridge *bridge) +{ + struct msm_dp *dp; + + dp = to_dp_bridge(bridge)->dp_display; + + drm_dbg_dp(dp->drm_dev, "is_connected = %s\n", + (dp->is_connected) ? "true" : "false"); + + return (dp->is_connected) ? connector_status_connected : + connector_status_disconnected; +} + +static int dp_bridge_atomic_check(struct drm_bridge *bridge, + struct drm_bridge_state *bridge_state, + struct drm_crtc_state *crtc_state, + struct drm_connector_state *conn_state) +{ + struct msm_dp *dp; + + dp = to_dp_bridge(bridge)->dp_display; + + drm_dbg_dp(dp->drm_dev, "is_connected = %s\n", + (dp->is_connected) ? "true" : "false"); + + /* + * There is no protection in the DRM framework to check if the display + * pipeline has been already disabled before trying to disable it again. + * Hence if the sink is unplugged, the pipeline gets disabled, but the + * crtc->active is still true. Any attempt to set the mode or manually + * disable this encoder will result in the crash. + * + * TODO: add support for telling the DRM subsystem that the pipeline is + * disabled by the hardware and thus all access to it should be forbidden. + * After that this piece of code can be removed. + */ + if (bridge->ops & DRM_BRIDGE_OP_HPD) + return (dp->is_connected) ? 0 : -ENOTCONN; + + return 0; +} + + +/** + * dp_bridge_get_modes - callback to add drm modes via drm_mode_probed_add() + * @bridge: Poiner to drm bridge + * @connector: Pointer to drm connector structure + * Returns: Number of modes added + */ +static int dp_bridge_get_modes(struct drm_bridge *bridge, struct drm_connector *connector) +{ + int rc = 0; + struct msm_dp *dp; + + if (!connector) + return 0; + + dp = to_dp_bridge(bridge)->dp_display; + + /* pluggable case assumes EDID is read when HPD */ + if (dp->is_connected) { + rc = dp_display_get_modes(dp); + if (rc <= 0) { + DRM_ERROR("failed to get DP sink modes, rc=%d\n", rc); + return rc; + } + } else { + drm_dbg_dp(connector->dev, "No sink connected\n"); + } + return rc; +} + +static const struct drm_bridge_funcs dp_bridge_ops = { + .atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state, + .atomic_destroy_state = drm_atomic_helper_bridge_destroy_state, + .atomic_reset = drm_atomic_helper_bridge_reset, + .enable = dp_bridge_enable, + .disable = dp_bridge_disable, + .post_disable = dp_bridge_post_disable, + .mode_set = dp_bridge_mode_set, + .mode_valid = dp_bridge_mode_valid, + .get_modes = dp_bridge_get_modes, + .detect = dp_bridge_detect, + .atomic_check = dp_bridge_atomic_check, +}; + +struct drm_bridge *dp_bridge_init(struct msm_dp *dp_display, struct drm_device *dev, + struct drm_encoder *encoder) +{ + int rc; + struct msm_dp_bridge *dp_bridge; + struct drm_bridge *bridge; + + dp_bridge = devm_kzalloc(dev->dev, sizeof(*dp_bridge), GFP_KERNEL); + if (!dp_bridge) + return ERR_PTR(-ENOMEM); + + dp_bridge->dp_display = dp_display; + + bridge = &dp_bridge->bridge; + bridge->funcs = &dp_bridge_ops; + bridge->type = dp_display->connector_type; + + /* + * Many ops only make sense for DP. Why? + * - Detect/HPD are used by DRM to know if a display is _physically_ + * there, not whether the display is powered on / finished initting. + * On eDP we assume the display is always there because you can't + * know until power is applied. If we don't implement the ops DRM will + * assume our display is always there. + * - Currently eDP mode reading is driven by the panel driver. This + * allows the panel driver to properly power itself on to read the + * modes. + */ + if (!dp_display->is_edp) { + bridge->ops = + DRM_BRIDGE_OP_DETECT | + DRM_BRIDGE_OP_HPD | + DRM_BRIDGE_OP_MODES; + } + + drm_bridge_add(bridge); + + rc = drm_bridge_attach(encoder, bridge, NULL, DRM_BRIDGE_ATTACH_NO_CONNECTOR); + if (rc) { + DRM_ERROR("failed to attach bridge, rc=%d\n", rc); + drm_bridge_remove(bridge); + + return ERR_PTR(rc); + } + + if (dp_display->next_bridge) { + rc = drm_bridge_attach(encoder, + dp_display->next_bridge, bridge, + DRM_BRIDGE_ATTACH_NO_CONNECTOR); + if (rc < 0) { + DRM_ERROR("failed to attach panel bridge: %d\n", rc); + drm_bridge_remove(bridge); + return ERR_PTR(rc); + } + } + + return bridge; +} + +/* connector initialization */ +struct drm_connector *dp_drm_connector_init(struct msm_dp *dp_display, struct drm_encoder *encoder) +{ + struct drm_connector *connector = NULL; + + connector = drm_bridge_connector_init(dp_display->drm_dev, encoder); + if (IS_ERR(connector)) + return connector; + + drm_connector_attach_encoder(connector, encoder); + + return connector; +} diff --git a/drivers/gpu/drm/msm/dp/dp_drm.h b/drivers/gpu/drm/msm/dp/dp_drm.h new file mode 100644 index 000000000..82035dbb0 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_drm.h @@ -0,0 +1,36 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_DRM_H_ +#define _DP_DRM_H_ + +#include <linux/types.h> +#include <drm/drm_bridge.h> + +#include "msm_drv.h" +#include "dp_display.h" + +struct msm_dp_bridge { + struct drm_bridge bridge; + struct msm_dp *dp_display; +}; + +#define to_dp_bridge(x) container_of((x), struct msm_dp_bridge, bridge) + +struct drm_connector *dp_drm_connector_init(struct msm_dp *dp_display, struct drm_encoder *encoder); +struct drm_bridge *dp_bridge_init(struct msm_dp *dp_display, struct drm_device *dev, + struct drm_encoder *encoder); + +void dp_bridge_enable(struct drm_bridge *drm_bridge); +void dp_bridge_disable(struct drm_bridge *drm_bridge); +void dp_bridge_post_disable(struct drm_bridge *drm_bridge); +enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge, + const struct drm_display_info *info, + const struct drm_display_mode *mode); +void dp_bridge_mode_set(struct drm_bridge *drm_bridge, + const struct drm_display_mode *mode, + const struct drm_display_mode *adjusted_mode); + +#endif /* _DP_DRM_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_hpd.c b/drivers/gpu/drm/msm/dp/dp_hpd.c new file mode 100644 index 000000000..db98a1d43 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_hpd.c @@ -0,0 +1,67 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <linux/slab.h> +#include <linux/device.h> + +#include "dp_hpd.h" + +/* DP specific VDM commands */ +#define DP_USBPD_VDM_STATUS 0x10 +#define DP_USBPD_VDM_CONFIGURE 0x11 + +/* USBPD-TypeC specific Macros */ +#define VDM_VERSION 0x0 +#define USB_C_DP_SID 0xFF01 + +struct dp_hpd_private { + struct device *dev; + struct dp_usbpd_cb *dp_cb; + struct dp_usbpd dp_usbpd; +}; + +int dp_hpd_connect(struct dp_usbpd *dp_usbpd, bool hpd) +{ + int rc = 0; + struct dp_hpd_private *hpd_priv; + + hpd_priv = container_of(dp_usbpd, struct dp_hpd_private, + dp_usbpd); + + if (!hpd_priv->dp_cb || !hpd_priv->dp_cb->configure + || !hpd_priv->dp_cb->disconnect) { + pr_err("hpd dp_cb not initialized\n"); + return -EINVAL; + } + if (hpd) + hpd_priv->dp_cb->configure(hpd_priv->dev); + else + hpd_priv->dp_cb->disconnect(hpd_priv->dev); + + return rc; +} + +struct dp_usbpd *dp_hpd_get(struct device *dev, struct dp_usbpd_cb *cb) +{ + struct dp_hpd_private *dp_hpd; + + if (!cb) { + pr_err("invalid cb data\n"); + return ERR_PTR(-EINVAL); + } + + dp_hpd = devm_kzalloc(dev, sizeof(*dp_hpd), GFP_KERNEL); + if (!dp_hpd) + return ERR_PTR(-ENOMEM); + + dp_hpd->dev = dev; + dp_hpd->dp_cb = cb; + + dp_hpd->dp_usbpd.connect = dp_hpd_connect; + + return &dp_hpd->dp_usbpd; +} diff --git a/drivers/gpu/drm/msm/dp/dp_hpd.h b/drivers/gpu/drm/msm/dp/dp_hpd.h new file mode 100644 index 000000000..8feec5aa5 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_hpd.h @@ -0,0 +1,78 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_HPD_H_ +#define _DP_HPD_H_ + +//#include <linux/usb/usbpd.h> + +#include <linux/types.h> +#include <linux/device.h> + +enum plug_orientation { + ORIENTATION_NONE, + ORIENTATION_CC1, + ORIENTATION_CC2, +}; + +/** + * struct dp_usbpd - DisplayPort status + * + * @orientation: plug orientation configuration + * @low_pow_st: low power state + * @adaptor_dp_en: adaptor functionality enabled + * @multi_func: multi-function preferred + * @usb_config_req: request to switch to usb + * @exit_dp_mode: request exit from displayport mode + * @hpd_irq: Change in the status since last message + * @alt_mode_cfg_done: bool to specify alt mode status + * @debug_en: bool to specify debug mode + * @connect: simulate disconnect or connect for debug mode + */ +struct dp_usbpd { + enum plug_orientation orientation; + bool low_pow_st; + bool adaptor_dp_en; + bool multi_func; + bool usb_config_req; + bool exit_dp_mode; + bool hpd_irq; + bool alt_mode_cfg_done; + bool debug_en; + + int (*connect)(struct dp_usbpd *dp_usbpd, bool hpd); +}; + +/** + * struct dp_usbpd_cb - callback functions provided by the client + * + * @configure: called by usbpd module when PD communication has + * been completed and the usb peripheral has been configured on + * dp mode. + * @disconnect: notify the cable disconnect issued by usb. + * @attention: notify any attention message issued by usb. + */ +struct dp_usbpd_cb { + int (*configure)(struct device *dev); + int (*disconnect)(struct device *dev); + int (*attention)(struct device *dev); +}; + +/** + * dp_hpd_get() - setup hpd module + * + * @dev: device instance of the caller + * @cb: struct containing callback function pointers. + * + * This function allows the client to initialize the usbpd + * module. The module will communicate with HPD module. + */ +struct dp_usbpd *dp_hpd_get(struct device *dev, struct dp_usbpd_cb *cb); + +int dp_hpd_register(struct dp_usbpd *dp_usbpd); +void dp_hpd_unregister(struct dp_usbpd *dp_usbpd); +int dp_hpd_connect(struct dp_usbpd *dp_usbpd, bool hpd); + +#endif /* _DP_HPD_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_link.c b/drivers/gpu/drm/msm/dp/dp_link.c new file mode 100644 index 000000000..cb66d1126 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_link.c @@ -0,0 +1,1223 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <drm/drm_print.h> + +#include "dp_link.h" +#include "dp_panel.h" + +#define DP_TEST_REQUEST_MASK 0x7F + +enum audio_sample_rate { + AUDIO_SAMPLE_RATE_32_KHZ = 0x00, + AUDIO_SAMPLE_RATE_44_1_KHZ = 0x01, + AUDIO_SAMPLE_RATE_48_KHZ = 0x02, + AUDIO_SAMPLE_RATE_88_2_KHZ = 0x03, + AUDIO_SAMPLE_RATE_96_KHZ = 0x04, + AUDIO_SAMPLE_RATE_176_4_KHZ = 0x05, + AUDIO_SAMPLE_RATE_192_KHZ = 0x06, +}; + +enum audio_pattern_type { + AUDIO_TEST_PATTERN_OPERATOR_DEFINED = 0x00, + AUDIO_TEST_PATTERN_SAWTOOTH = 0x01, +}; + +struct dp_link_request { + u32 test_requested; + u32 test_link_rate; + u32 test_lane_count; +}; + +struct dp_link_private { + u32 prev_sink_count; + struct device *dev; + struct drm_device *drm_dev; + struct drm_dp_aux *aux; + struct dp_link dp_link; + + struct dp_link_request request; + struct mutex psm_mutex; + u8 link_status[DP_LINK_STATUS_SIZE]; +}; + +static int dp_aux_link_power_up(struct drm_dp_aux *aux, + struct dp_link_info *link) +{ + u8 value; + int err; + + if (link->revision < 0x11) + return 0; + + err = drm_dp_dpcd_readb(aux, DP_SET_POWER, &value); + if (err < 0) + return err; + + value &= ~DP_SET_POWER_MASK; + value |= DP_SET_POWER_D0; + + err = drm_dp_dpcd_writeb(aux, DP_SET_POWER, value); + if (err < 0) + return err; + + usleep_range(1000, 2000); + + return 0; +} + +static int dp_aux_link_power_down(struct drm_dp_aux *aux, + struct dp_link_info *link) +{ + u8 value; + int err; + + if (link->revision < 0x11) + return 0; + + err = drm_dp_dpcd_readb(aux, DP_SET_POWER, &value); + if (err < 0) + return err; + + value &= ~DP_SET_POWER_MASK; + value |= DP_SET_POWER_D3; + + err = drm_dp_dpcd_writeb(aux, DP_SET_POWER, value); + if (err < 0) + return err; + + return 0; +} + +static int dp_link_get_period(struct dp_link_private *link, int const addr) +{ + int ret = 0; + u8 data; + u32 const max_audio_period = 0xA; + + /* TEST_AUDIO_PERIOD_CH_XX */ + if (drm_dp_dpcd_readb(link->aux, addr, &data) < 0) { + DRM_ERROR("failed to read test_audio_period (0x%x)\n", addr); + ret = -EINVAL; + goto exit; + } + + /* Period - Bits 3:0 */ + data = data & 0xF; + if ((int)data > max_audio_period) { + DRM_ERROR("invalid test_audio_period_ch_1 = 0x%x\n", data); + ret = -EINVAL; + goto exit; + } + + ret = data; +exit: + return ret; +} + +static int dp_link_parse_audio_channel_period(struct dp_link_private *link) +{ + int ret = 0; + struct dp_link_test_audio *req = &link->dp_link.test_audio; + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH1); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_1 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_1 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH2); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_2 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_2 = 0x%x\n", ret); + + /* TEST_AUDIO_PERIOD_CH_3 (Byte 0x275) */ + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH3); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_3 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_3 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH4); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_4 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_4 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH5); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_5 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_5 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH6); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_6 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_6 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH7); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_7 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_7 = 0x%x\n", ret); + + ret = dp_link_get_period(link, DP_TEST_AUDIO_PERIOD_CH8); + if (ret == -EINVAL) + goto exit; + + req->test_audio_period_ch_8 = ret; + drm_dbg_dp(link->drm_dev, "test_audio_period_ch_8 = 0x%x\n", ret); +exit: + return ret; +} + +static int dp_link_parse_audio_pattern_type(struct dp_link_private *link) +{ + int ret = 0; + u8 data; + ssize_t rlen; + int const max_audio_pattern_type = 0x1; + + rlen = drm_dp_dpcd_readb(link->aux, + DP_TEST_AUDIO_PATTERN_TYPE, &data); + if (rlen < 0) { + DRM_ERROR("failed to read link audio mode. rlen=%zd\n", rlen); + return rlen; + } + + /* Audio Pattern Type - Bits 7:0 */ + if ((int)data > max_audio_pattern_type) { + DRM_ERROR("invalid audio pattern type = 0x%x\n", data); + ret = -EINVAL; + goto exit; + } + + link->dp_link.test_audio.test_audio_pattern_type = data; + drm_dbg_dp(link->drm_dev, "audio pattern type = 0x%x\n", data); +exit: + return ret; +} + +static int dp_link_parse_audio_mode(struct dp_link_private *link) +{ + int ret = 0; + u8 data; + ssize_t rlen; + int const max_audio_sampling_rate = 0x6; + int const max_audio_channel_count = 0x8; + int sampling_rate = 0x0; + int channel_count = 0x0; + + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_AUDIO_MODE, &data); + if (rlen < 0) { + DRM_ERROR("failed to read link audio mode. rlen=%zd\n", rlen); + return rlen; + } + + /* Sampling Rate - Bits 3:0 */ + sampling_rate = data & 0xF; + if (sampling_rate > max_audio_sampling_rate) { + DRM_ERROR("sampling rate (0x%x) greater than max (0x%x)\n", + sampling_rate, max_audio_sampling_rate); + ret = -EINVAL; + goto exit; + } + + /* Channel Count - Bits 7:4 */ + channel_count = ((data & 0xF0) >> 4) + 1; + if (channel_count > max_audio_channel_count) { + DRM_ERROR("channel_count (0x%x) greater than max (0x%x)\n", + channel_count, max_audio_channel_count); + ret = -EINVAL; + goto exit; + } + + link->dp_link.test_audio.test_audio_sampling_rate = sampling_rate; + link->dp_link.test_audio.test_audio_channel_count = channel_count; + drm_dbg_dp(link->drm_dev, + "sampling_rate = 0x%x, channel_count = 0x%x\n", + sampling_rate, channel_count); +exit: + return ret; +} + +static int dp_link_parse_audio_pattern_params(struct dp_link_private *link) +{ + int ret = 0; + + ret = dp_link_parse_audio_mode(link); + if (ret) + goto exit; + + ret = dp_link_parse_audio_pattern_type(link); + if (ret) + goto exit; + + ret = dp_link_parse_audio_channel_period(link); + +exit: + return ret; +} + +static bool dp_link_is_video_pattern_valid(u32 pattern) +{ + switch (pattern) { + case DP_NO_TEST_PATTERN: + case DP_COLOR_RAMP: + case DP_BLACK_AND_WHITE_VERTICAL_LINES: + case DP_COLOR_SQUARE: + return true; + default: + return false; + } +} + +/** + * dp_link_is_bit_depth_valid() - validates the bit depth requested + * @tbd: bit depth requested by the sink + * + * Returns true if the requested bit depth is supported. + */ +static bool dp_link_is_bit_depth_valid(u32 tbd) +{ + /* DP_TEST_VIDEO_PATTERN_NONE is treated as invalid */ + switch (tbd) { + case DP_TEST_BIT_DEPTH_6: + case DP_TEST_BIT_DEPTH_8: + case DP_TEST_BIT_DEPTH_10: + return true; + default: + return false; + } +} + +static int dp_link_parse_timing_params1(struct dp_link_private *link, + int addr, int len, u32 *val) +{ + u8 bp[2]; + int rlen; + + if (len != 2) + return -EINVAL; + + /* Read the requested video link pattern (Byte 0x221). */ + rlen = drm_dp_dpcd_read(link->aux, addr, bp, len); + if (rlen < len) { + DRM_ERROR("failed to read 0x%x\n", addr); + return -EINVAL; + } + + *val = bp[1] | (bp[0] << 8); + + return 0; +} + +static int dp_link_parse_timing_params2(struct dp_link_private *link, + int addr, int len, + u32 *val1, u32 *val2) +{ + u8 bp[2]; + int rlen; + + if (len != 2) + return -EINVAL; + + /* Read the requested video link pattern (Byte 0x221). */ + rlen = drm_dp_dpcd_read(link->aux, addr, bp, len); + if (rlen < len) { + DRM_ERROR("failed to read 0x%x\n", addr); + return -EINVAL; + } + + *val1 = (bp[0] & BIT(7)) >> 7; + *val2 = bp[1] | ((bp[0] & 0x7F) << 8); + + return 0; +} + +static int dp_link_parse_timing_params3(struct dp_link_private *link, + int addr, u32 *val) +{ + u8 bp; + u32 len = 1; + int rlen; + + rlen = drm_dp_dpcd_read(link->aux, addr, &bp, len); + if (rlen < 1) { + DRM_ERROR("failed to read 0x%x\n", addr); + return -EINVAL; + } + *val = bp; + + return 0; +} + +/** + * dp_link_parse_video_pattern_params() - parses video pattern parameters from DPCD + * @link: Display Port Driver data + * + * Returns 0 if it successfully parses the video link pattern and the link + * bit depth requested by the sink and, and if the values parsed are valid. + */ +static int dp_link_parse_video_pattern_params(struct dp_link_private *link) +{ + int ret = 0; + ssize_t rlen; + u8 bp; + + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_PATTERN, &bp); + if (rlen < 0) { + DRM_ERROR("failed to read link video pattern. rlen=%zd\n", + rlen); + return rlen; + } + + if (!dp_link_is_video_pattern_valid(bp)) { + DRM_ERROR("invalid link video pattern = 0x%x\n", bp); + ret = -EINVAL; + return ret; + } + + link->dp_link.test_video.test_video_pattern = bp; + + /* Read the requested color bit depth and dynamic range (Byte 0x232) */ + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_MISC0, &bp); + if (rlen < 0) { + DRM_ERROR("failed to read link bit depth. rlen=%zd\n", rlen); + return rlen; + } + + /* Dynamic Range */ + link->dp_link.test_video.test_dyn_range = + (bp & DP_TEST_DYNAMIC_RANGE_CEA); + + /* Color bit depth */ + bp &= DP_TEST_BIT_DEPTH_MASK; + if (!dp_link_is_bit_depth_valid(bp)) { + DRM_ERROR("invalid link bit depth = 0x%x\n", bp); + ret = -EINVAL; + return ret; + } + + link->dp_link.test_video.test_bit_depth = bp; + + /* resolution timing params */ + ret = dp_link_parse_timing_params1(link, DP_TEST_H_TOTAL_HI, 2, + &link->dp_link.test_video.test_h_total); + if (ret) { + DRM_ERROR("failed to parse test_htotal(DP_TEST_H_TOTAL_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params1(link, DP_TEST_V_TOTAL_HI, 2, + &link->dp_link.test_video.test_v_total); + if (ret) { + DRM_ERROR("failed to parse test_v_total(DP_TEST_V_TOTAL_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params1(link, DP_TEST_H_START_HI, 2, + &link->dp_link.test_video.test_h_start); + if (ret) { + DRM_ERROR("failed to parse test_h_start(DP_TEST_H_START_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params1(link, DP_TEST_V_START_HI, 2, + &link->dp_link.test_video.test_v_start); + if (ret) { + DRM_ERROR("failed to parse test_v_start(DP_TEST_V_START_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params2(link, DP_TEST_HSYNC_HI, 2, + &link->dp_link.test_video.test_hsync_pol, + &link->dp_link.test_video.test_hsync_width); + if (ret) { + DRM_ERROR("failed to parse (DP_TEST_HSYNC_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params2(link, DP_TEST_VSYNC_HI, 2, + &link->dp_link.test_video.test_vsync_pol, + &link->dp_link.test_video.test_vsync_width); + if (ret) { + DRM_ERROR("failed to parse (DP_TEST_VSYNC_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params1(link, DP_TEST_H_WIDTH_HI, 2, + &link->dp_link.test_video.test_h_width); + if (ret) { + DRM_ERROR("failed to parse test_h_width(DP_TEST_H_WIDTH_HI)\n"); + return ret; + } + + ret = dp_link_parse_timing_params1(link, DP_TEST_V_HEIGHT_HI, 2, + &link->dp_link.test_video.test_v_height); + if (ret) { + DRM_ERROR("failed to parse test_v_height\n"); + return ret; + } + + ret = dp_link_parse_timing_params3(link, DP_TEST_MISC1, + &link->dp_link.test_video.test_rr_d); + link->dp_link.test_video.test_rr_d &= DP_TEST_REFRESH_DENOMINATOR; + if (ret) { + DRM_ERROR("failed to parse test_rr_d (DP_TEST_MISC1)\n"); + return ret; + } + + ret = dp_link_parse_timing_params3(link, DP_TEST_REFRESH_RATE_NUMERATOR, + &link->dp_link.test_video.test_rr_n); + if (ret) { + DRM_ERROR("failed to parse test_rr_n\n"); + return ret; + } + + drm_dbg_dp(link->drm_dev, + "link video pattern = 0x%x\n" + "link dynamic range = 0x%x\n" + "link bit depth = 0x%x\n" + "TEST_H_TOTAL = %d, TEST_V_TOTAL = %d\n" + "TEST_H_START = %d, TEST_V_START = %d\n" + "TEST_HSYNC_POL = %d\n" + "TEST_HSYNC_WIDTH = %d\n" + "TEST_VSYNC_POL = %d\n" + "TEST_VSYNC_WIDTH = %d\n" + "TEST_H_WIDTH = %d\n" + "TEST_V_HEIGHT = %d\n" + "TEST_REFRESH_DENOMINATOR = %d\n" + "TEST_REFRESH_NUMERATOR = %d\n", + link->dp_link.test_video.test_video_pattern, + link->dp_link.test_video.test_dyn_range, + link->dp_link.test_video.test_bit_depth, + link->dp_link.test_video.test_h_total, + link->dp_link.test_video.test_v_total, + link->dp_link.test_video.test_h_start, + link->dp_link.test_video.test_v_start, + link->dp_link.test_video.test_hsync_pol, + link->dp_link.test_video.test_hsync_width, + link->dp_link.test_video.test_vsync_pol, + link->dp_link.test_video.test_vsync_width, + link->dp_link.test_video.test_h_width, + link->dp_link.test_video.test_v_height, + link->dp_link.test_video.test_rr_d, + link->dp_link.test_video.test_rr_n); + + return ret; +} + +/** + * dp_link_parse_link_training_params() - parses link training parameters from + * DPCD + * @link: Display Port Driver data + * + * Returns 0 if it successfully parses the link rate (Byte 0x219) and lane + * count (Byte 0x220), and if these values parse are valid. + */ +static int dp_link_parse_link_training_params(struct dp_link_private *link) +{ + u8 bp; + ssize_t rlen; + + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_LINK_RATE, &bp); + if (rlen < 0) { + DRM_ERROR("failed to read link rate. rlen=%zd\n", rlen); + return rlen; + } + + if (!is_link_rate_valid(bp)) { + DRM_ERROR("invalid link rate = 0x%x\n", bp); + return -EINVAL; + } + + link->request.test_link_rate = bp; + drm_dbg_dp(link->drm_dev, "link rate = 0x%x\n", + link->request.test_link_rate); + + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_LANE_COUNT, &bp); + if (rlen < 0) { + DRM_ERROR("failed to read lane count. rlen=%zd\n", rlen); + return rlen; + } + bp &= DP_MAX_LANE_COUNT_MASK; + + if (!is_lane_count_valid(bp)) { + DRM_ERROR("invalid lane count = 0x%x\n", bp); + return -EINVAL; + } + + link->request.test_lane_count = bp; + drm_dbg_dp(link->drm_dev, "lane count = 0x%x\n", + link->request.test_lane_count); + return 0; +} + +/** + * dp_link_parse_phy_test_params() - parses the phy link parameters + * @link: Display Port Driver data + * + * Parses the DPCD (Byte 0x248) for the DP PHY link pattern that is being + * requested. + */ +static int dp_link_parse_phy_test_params(struct dp_link_private *link) +{ + u8 data; + ssize_t rlen; + + rlen = drm_dp_dpcd_readb(link->aux, DP_PHY_TEST_PATTERN, + &data); + if (rlen < 0) { + DRM_ERROR("failed to read phy link pattern. rlen=%zd\n", rlen); + return rlen; + } + + link->dp_link.phy_params.phy_test_pattern_sel = data & 0x07; + + drm_dbg_dp(link->drm_dev, "phy_test_pattern_sel = 0x%x\n", data); + + switch (data) { + case DP_PHY_TEST_PATTERN_SEL_MASK: + case DP_PHY_TEST_PATTERN_NONE: + case DP_PHY_TEST_PATTERN_D10_2: + case DP_PHY_TEST_PATTERN_ERROR_COUNT: + case DP_PHY_TEST_PATTERN_PRBS7: + case DP_PHY_TEST_PATTERN_80BIT_CUSTOM: + case DP_PHY_TEST_PATTERN_CP2520: + return 0; + default: + return -EINVAL; + } +} + +/** + * dp_link_is_video_audio_test_requested() - checks for audio/video link request + * @link: link requested by the sink + * + * Returns true if the requested link is a permitted audio/video link. + */ +static bool dp_link_is_video_audio_test_requested(u32 link) +{ + u8 video_audio_test = (DP_TEST_LINK_VIDEO_PATTERN | + DP_TEST_LINK_AUDIO_PATTERN | + DP_TEST_LINK_AUDIO_DISABLED_VIDEO); + + return ((link & video_audio_test) && + !(link & ~video_audio_test)); +} + +/** + * dp_link_parse_request() - parses link request parameters from sink + * @link: Display Port Driver data + * + * Parses the DPCD to check if an automated link is requested (Byte 0x201), + * and what type of link automation is being requested (Byte 0x218). + */ +static int dp_link_parse_request(struct dp_link_private *link) +{ + int ret = 0; + u8 data; + ssize_t rlen; + + /** + * Read the device service IRQ vector (Byte 0x201) to determine + * whether an automated link has been requested by the sink. + */ + rlen = drm_dp_dpcd_readb(link->aux, + DP_DEVICE_SERVICE_IRQ_VECTOR, &data); + if (rlen < 0) { + DRM_ERROR("aux read failed. rlen=%zd\n", rlen); + return rlen; + } + + drm_dbg_dp(link->drm_dev, "device service irq vector = 0x%x\n", data); + + if (!(data & DP_AUTOMATED_TEST_REQUEST)) { + drm_dbg_dp(link->drm_dev, "no test requested\n"); + return 0; + } + + /** + * Read the link request byte (Byte 0x218) to determine what type + * of automated link has been requested by the sink. + */ + rlen = drm_dp_dpcd_readb(link->aux, DP_TEST_REQUEST, &data); + if (rlen < 0) { + DRM_ERROR("aux read failed. rlen=%zd\n", rlen); + return rlen; + } + + if (!data || (data == DP_TEST_LINK_FAUX_PATTERN)) { + drm_dbg_dp(link->drm_dev, "link 0x%x not supported\n", data); + goto end; + } + + drm_dbg_dp(link->drm_dev, "Test:(0x%x) requested\n", data); + link->request.test_requested = data; + if (link->request.test_requested == DP_TEST_LINK_PHY_TEST_PATTERN) { + ret = dp_link_parse_phy_test_params(link); + if (ret) + goto end; + ret = dp_link_parse_link_training_params(link); + if (ret) + goto end; + } + + if (link->request.test_requested == DP_TEST_LINK_TRAINING) { + ret = dp_link_parse_link_training_params(link); + if (ret) + goto end; + } + + if (dp_link_is_video_audio_test_requested( + link->request.test_requested)) { + ret = dp_link_parse_video_pattern_params(link); + if (ret) + goto end; + + ret = dp_link_parse_audio_pattern_params(link); + } +end: + /* + * Send a DP_TEST_ACK if all link parameters are valid, otherwise send + * a DP_TEST_NAK. + */ + if (ret) { + link->dp_link.test_response = DP_TEST_NAK; + } else { + if (link->request.test_requested != DP_TEST_LINK_EDID_READ) + link->dp_link.test_response = DP_TEST_ACK; + else + link->dp_link.test_response = + DP_TEST_EDID_CHECKSUM_WRITE; + } + + return ret; +} + +/** + * dp_link_parse_sink_count() - parses the sink count + * @dp_link: pointer to link module data + * + * Parses the DPCD to check if there is an update to the sink count + * (Byte 0x200), and whether all the sink devices connected have Content + * Protection enabled. + */ +static int dp_link_parse_sink_count(struct dp_link *dp_link) +{ + ssize_t rlen; + bool cp_ready; + + struct dp_link_private *link = container_of(dp_link, + struct dp_link_private, dp_link); + + rlen = drm_dp_dpcd_readb(link->aux, DP_SINK_COUNT, + &link->dp_link.sink_count); + if (rlen < 0) { + DRM_ERROR("sink count read failed. rlen=%zd\n", rlen); + return rlen; + } + + cp_ready = link->dp_link.sink_count & DP_SINK_CP_READY; + + link->dp_link.sink_count = + DP_GET_SINK_COUNT(link->dp_link.sink_count); + + drm_dbg_dp(link->drm_dev, "sink_count = 0x%x, cp_ready = 0x%x\n", + link->dp_link.sink_count, cp_ready); + return 0; +} + +static int dp_link_parse_sink_status_field(struct dp_link_private *link) +{ + int len = 0; + + link->prev_sink_count = link->dp_link.sink_count; + len = dp_link_parse_sink_count(&link->dp_link); + if (len < 0) { + DRM_ERROR("DP parse sink count failed\n"); + return len; + } + + len = drm_dp_dpcd_read_link_status(link->aux, + link->link_status); + if (len < DP_LINK_STATUS_SIZE) { + DRM_ERROR("DP link status read failed\n"); + return len; + } + + return dp_link_parse_request(link); +} + +/** + * dp_link_process_link_training_request() - processes new training requests + * @link: Display Port link data + * + * This function will handle new link training requests that are initiated by + * the sink. In particular, it will update the requested lane count and link + * rate, and then trigger the link retraining procedure. + * + * The function will return 0 if a link training request has been processed, + * otherwise it will return -EINVAL. + */ +static int dp_link_process_link_training_request(struct dp_link_private *link) +{ + if (link->request.test_requested != DP_TEST_LINK_TRAINING) + return -EINVAL; + + drm_dbg_dp(link->drm_dev, + "Test:0x%x link rate = 0x%x, lane count = 0x%x\n", + DP_TEST_LINK_TRAINING, + link->request.test_link_rate, + link->request.test_lane_count); + + link->dp_link.link_params.num_lanes = link->request.test_lane_count; + link->dp_link.link_params.rate = + drm_dp_bw_code_to_link_rate(link->request.test_link_rate); + + return 0; +} + +bool dp_link_send_test_response(struct dp_link *dp_link) +{ + struct dp_link_private *link = NULL; + int ret = 0; + + if (!dp_link) { + DRM_ERROR("invalid input\n"); + return false; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + ret = drm_dp_dpcd_writeb(link->aux, DP_TEST_RESPONSE, + dp_link->test_response); + + return ret == 1; +} + +int dp_link_psm_config(struct dp_link *dp_link, + struct dp_link_info *link_info, bool enable) +{ + struct dp_link_private *link = NULL; + int ret = 0; + + if (!dp_link) { + DRM_ERROR("invalid params\n"); + return -EINVAL; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + mutex_lock(&link->psm_mutex); + if (enable) + ret = dp_aux_link_power_down(link->aux, link_info); + else + ret = dp_aux_link_power_up(link->aux, link_info); + + if (ret) + DRM_ERROR("Failed to %s low power mode\n", enable ? + "enter" : "exit"); + else + dp_link->psm_enabled = enable; + + mutex_unlock(&link->psm_mutex); + return ret; +} + +bool dp_link_send_edid_checksum(struct dp_link *dp_link, u8 checksum) +{ + struct dp_link_private *link = NULL; + int ret = 0; + + if (!dp_link) { + DRM_ERROR("invalid input\n"); + return false; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + ret = drm_dp_dpcd_writeb(link->aux, DP_TEST_EDID_CHECKSUM, + checksum); + return ret == 1; +} + +static void dp_link_parse_vx_px(struct dp_link_private *link) +{ + drm_dbg_dp(link->drm_dev, "vx: 0=%d, 1=%d, 2=%d, 3=%d\n", + drm_dp_get_adjust_request_voltage(link->link_status, 0), + drm_dp_get_adjust_request_voltage(link->link_status, 1), + drm_dp_get_adjust_request_voltage(link->link_status, 2), + drm_dp_get_adjust_request_voltage(link->link_status, 3)); + + drm_dbg_dp(link->drm_dev, "px: 0=%d, 1=%d, 2=%d, 3=%d\n", + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 0), + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 1), + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 2), + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 3)); + + /** + * Update the voltage and pre-emphasis levels as per DPCD request + * vector. + */ + drm_dbg_dp(link->drm_dev, + "Current: v_level = 0x%x, p_level = 0x%x\n", + link->dp_link.phy_params.v_level, + link->dp_link.phy_params.p_level); + link->dp_link.phy_params.v_level = + drm_dp_get_adjust_request_voltage(link->link_status, 0); + link->dp_link.phy_params.p_level = + drm_dp_get_adjust_request_pre_emphasis(link->link_status, 0); + + link->dp_link.phy_params.p_level >>= DP_TRAIN_PRE_EMPHASIS_SHIFT; + + drm_dbg_dp(link->drm_dev, + "Requested: v_level = 0x%x, p_level = 0x%x\n", + link->dp_link.phy_params.v_level, + link->dp_link.phy_params.p_level); +} + +/** + * dp_link_process_phy_test_pattern_request() - process new phy link requests + * @link: Display Port Driver data + * + * This function will handle new phy link pattern requests that are initiated + * by the sink. The function will return 0 if a phy link pattern has been + * processed, otherwise it will return -EINVAL. + */ +static int dp_link_process_phy_test_pattern_request( + struct dp_link_private *link) +{ + if (!(link->request.test_requested & DP_TEST_LINK_PHY_TEST_PATTERN)) { + drm_dbg_dp(link->drm_dev, "no phy test\n"); + return -EINVAL; + } + + if (!is_link_rate_valid(link->request.test_link_rate) || + !is_lane_count_valid(link->request.test_lane_count)) { + DRM_ERROR("Invalid: link rate = 0x%x,lane count = 0x%x\n", + link->request.test_link_rate, + link->request.test_lane_count); + return -EINVAL; + } + + drm_dbg_dp(link->drm_dev, + "Current: rate = 0x%x, lane count = 0x%x\n", + link->dp_link.link_params.rate, + link->dp_link.link_params.num_lanes); + + drm_dbg_dp(link->drm_dev, + "Requested: rate = 0x%x, lane count = 0x%x\n", + link->request.test_link_rate, + link->request.test_lane_count); + + link->dp_link.link_params.num_lanes = link->request.test_lane_count; + link->dp_link.link_params.rate = + drm_dp_bw_code_to_link_rate(link->request.test_link_rate); + + dp_link_parse_vx_px(link); + + return 0; +} + +static u8 get_link_status(const u8 link_status[DP_LINK_STATUS_SIZE], int r) +{ + return link_status[r - DP_LANE0_1_STATUS]; +} + +/** + * dp_link_process_link_status_update() - processes link status updates + * @link: Display Port link module data + * + * This function will check for changes in the link status, e.g. clock + * recovery done on all lanes, and trigger link training if there is a + * failure/error on the link. + * + * The function will return 0 if the a link status update has been processed, + * otherwise it will return -EINVAL. + */ +static int dp_link_process_link_status_update(struct dp_link_private *link) +{ + bool channel_eq_done = drm_dp_channel_eq_ok(link->link_status, + link->dp_link.link_params.num_lanes); + + bool clock_recovery_done = drm_dp_clock_recovery_ok(link->link_status, + link->dp_link.link_params.num_lanes); + + drm_dbg_dp(link->drm_dev, + "channel_eq_done = %d, clock_recovery_done = %d\n", + channel_eq_done, clock_recovery_done); + + if (channel_eq_done && clock_recovery_done) + return -EINVAL; + + return 0; +} + +/** + * dp_link_process_ds_port_status_change() - process port status changes + * @link: Display Port Driver data + * + * This function will handle downstream port updates that are initiated by + * the sink. If the downstream port status has changed, the EDID is read via + * AUX. + * + * The function will return 0 if a downstream port update has been + * processed, otherwise it will return -EINVAL. + */ +static int dp_link_process_ds_port_status_change(struct dp_link_private *link) +{ + if (get_link_status(link->link_status, DP_LANE_ALIGN_STATUS_UPDATED) & + DP_DOWNSTREAM_PORT_STATUS_CHANGED) + goto reset; + + if (link->prev_sink_count == link->dp_link.sink_count) + return -EINVAL; + +reset: + /* reset prev_sink_count */ + link->prev_sink_count = link->dp_link.sink_count; + + return 0; +} + +static bool dp_link_is_video_pattern_requested(struct dp_link_private *link) +{ + return (link->request.test_requested & DP_TEST_LINK_VIDEO_PATTERN) + && !(link->request.test_requested & + DP_TEST_LINK_AUDIO_DISABLED_VIDEO); +} + +static bool dp_link_is_audio_pattern_requested(struct dp_link_private *link) +{ + return (link->request.test_requested & DP_TEST_LINK_AUDIO_PATTERN); +} + +static void dp_link_reset_data(struct dp_link_private *link) +{ + link->request = (const struct dp_link_request){ 0 }; + link->dp_link.test_video = (const struct dp_link_test_video){ 0 }; + link->dp_link.test_video.test_bit_depth = DP_TEST_BIT_DEPTH_UNKNOWN; + link->dp_link.test_audio = (const struct dp_link_test_audio){ 0 }; + link->dp_link.phy_params.phy_test_pattern_sel = 0; + link->dp_link.sink_request = 0; + link->dp_link.test_response = 0; +} + +/** + * dp_link_process_request() - handle HPD IRQ transition to HIGH + * @dp_link: pointer to link module data + * + * This function will handle the HPD IRQ state transitions from LOW to HIGH + * (including cases when there are back to back HPD IRQ HIGH) indicating + * the start of a new link training request or sink status update. + */ +int dp_link_process_request(struct dp_link *dp_link) +{ + int ret = 0; + struct dp_link_private *link; + + if (!dp_link) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + dp_link_reset_data(link); + + ret = dp_link_parse_sink_status_field(link); + if (ret) + return ret; + + if (link->request.test_requested == DP_TEST_LINK_EDID_READ) { + dp_link->sink_request |= DP_TEST_LINK_EDID_READ; + } else if (!dp_link_process_ds_port_status_change(link)) { + dp_link->sink_request |= DS_PORT_STATUS_CHANGED; + } else if (!dp_link_process_link_training_request(link)) { + dp_link->sink_request |= DP_TEST_LINK_TRAINING; + } else if (!dp_link_process_phy_test_pattern_request(link)) { + dp_link->sink_request |= DP_TEST_LINK_PHY_TEST_PATTERN; + } else { + ret = dp_link_process_link_status_update(link); + if (!ret) { + dp_link->sink_request |= DP_LINK_STATUS_UPDATED; + } else { + if (dp_link_is_video_pattern_requested(link)) { + ret = 0; + dp_link->sink_request |= DP_TEST_LINK_VIDEO_PATTERN; + } + if (dp_link_is_audio_pattern_requested(link)) { + dp_link->sink_request |= DP_TEST_LINK_AUDIO_PATTERN; + ret = -EINVAL; + } + } + } + + drm_dbg_dp(link->drm_dev, "sink request=%#x\n", + dp_link->sink_request); + return ret; +} + +int dp_link_get_colorimetry_config(struct dp_link *dp_link) +{ + u32 cc; + struct dp_link_private *link; + + if (!dp_link) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + /* + * Unless a video pattern CTS test is ongoing, use RGB_VESA + * Only RGB_VESA and RGB_CEA supported for now + */ + if (dp_link_is_video_pattern_requested(link)) + cc = link->dp_link.test_video.test_dyn_range; + else + cc = DP_TEST_DYNAMIC_RANGE_VESA; + + return cc; +} + +int dp_link_adjust_levels(struct dp_link *dp_link, u8 *link_status) +{ + int i; + int v_max = 0, p_max = 0; + struct dp_link_private *link; + + if (!dp_link) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + link = container_of(dp_link, struct dp_link_private, dp_link); + + /* use the max level across lanes */ + for (i = 0; i < dp_link->link_params.num_lanes; i++) { + u8 data_v = drm_dp_get_adjust_request_voltage(link_status, i); + u8 data_p = drm_dp_get_adjust_request_pre_emphasis(link_status, + i); + drm_dbg_dp(link->drm_dev, + "lane=%d req_vol_swing=%d req_pre_emphasis=%d\n", + i, data_v, data_p); + if (v_max < data_v) + v_max = data_v; + if (p_max < data_p) + p_max = data_p; + } + + dp_link->phy_params.v_level = v_max >> DP_TRAIN_VOLTAGE_SWING_SHIFT; + dp_link->phy_params.p_level = p_max >> DP_TRAIN_PRE_EMPHASIS_SHIFT; + + /** + * Adjust the voltage swing and pre-emphasis level combination to within + * the allowable range. + */ + if (dp_link->phy_params.v_level > DP_TRAIN_VOLTAGE_SWING_MAX) { + drm_dbg_dp(link->drm_dev, + "Requested vSwingLevel=%d, change to %d\n", + dp_link->phy_params.v_level, + DP_TRAIN_VOLTAGE_SWING_MAX); + dp_link->phy_params.v_level = DP_TRAIN_VOLTAGE_SWING_MAX; + } + + if (dp_link->phy_params.p_level > DP_TRAIN_PRE_EMPHASIS_MAX) { + drm_dbg_dp(link->drm_dev, + "Requested preEmphasisLevel=%d, change to %d\n", + dp_link->phy_params.p_level, + DP_TRAIN_PRE_EMPHASIS_MAX); + dp_link->phy_params.p_level = DP_TRAIN_PRE_EMPHASIS_MAX; + } + + if ((dp_link->phy_params.p_level > DP_TRAIN_PRE_EMPHASIS_LVL_1) + && (dp_link->phy_params.v_level == + DP_TRAIN_VOLTAGE_SWING_LVL_2)) { + drm_dbg_dp(link->drm_dev, + "Requested preEmphasisLevel=%d, change to %d\n", + dp_link->phy_params.p_level, + DP_TRAIN_PRE_EMPHASIS_LVL_1); + dp_link->phy_params.p_level = DP_TRAIN_PRE_EMPHASIS_LVL_1; + } + + drm_dbg_dp(link->drm_dev, "adjusted: v_level=%d, p_level=%d\n", + dp_link->phy_params.v_level, dp_link->phy_params.p_level); + + return 0; +} + +void dp_link_reset_phy_params_vx_px(struct dp_link *dp_link) +{ + dp_link->phy_params.v_level = 0; + dp_link->phy_params.p_level = 0; +} + +u32 dp_link_get_test_bits_depth(struct dp_link *dp_link, u32 bpp) +{ + u32 tbd; + + /* + * Few simplistic rules and assumptions made here: + * 1. Test bit depth is bit depth per color component + * 2. Assume 3 color components + */ + switch (bpp) { + case 18: + tbd = DP_TEST_BIT_DEPTH_6; + break; + case 24: + tbd = DP_TEST_BIT_DEPTH_8; + break; + case 30: + tbd = DP_TEST_BIT_DEPTH_10; + break; + default: + tbd = DP_TEST_BIT_DEPTH_UNKNOWN; + break; + } + + if (tbd != DP_TEST_BIT_DEPTH_UNKNOWN) + tbd = (tbd >> DP_TEST_BIT_DEPTH_SHIFT); + + return tbd; +} + +struct dp_link *dp_link_get(struct device *dev, struct drm_dp_aux *aux) +{ + struct dp_link_private *link; + struct dp_link *dp_link; + + if (!dev || !aux) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-EINVAL); + } + + link = devm_kzalloc(dev, sizeof(*link), GFP_KERNEL); + if (!link) + return ERR_PTR(-ENOMEM); + + link->dev = dev; + link->aux = aux; + + mutex_init(&link->psm_mutex); + dp_link = &link->dp_link; + + return dp_link; +} diff --git a/drivers/gpu/drm/msm/dp/dp_link.h b/drivers/gpu/drm/msm/dp/dp_link.h new file mode 100644 index 000000000..9dd4dd926 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_link.h @@ -0,0 +1,156 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_LINK_H_ +#define _DP_LINK_H_ + +#include "dp_aux.h" + +#define DS_PORT_STATUS_CHANGED 0x200 +#define DP_TEST_BIT_DEPTH_UNKNOWN 0xFFFFFFFF +#define DP_LINK_CAP_ENHANCED_FRAMING (1 << 0) + +struct dp_link_info { + unsigned char revision; + unsigned int rate; + unsigned int num_lanes; + unsigned long capabilities; +}; + +enum dp_link_voltage_level { + DP_TRAIN_VOLTAGE_SWING_LVL_0 = 0, + DP_TRAIN_VOLTAGE_SWING_LVL_1 = 1, + DP_TRAIN_VOLTAGE_SWING_LVL_2 = 2, + DP_TRAIN_VOLTAGE_SWING_MAX = DP_TRAIN_VOLTAGE_SWING_LVL_2, +}; + +enum dp_link_preemaphasis_level { + DP_TRAIN_PRE_EMPHASIS_LVL_0 = 0, + DP_TRAIN_PRE_EMPHASIS_LVL_1 = 1, + DP_TRAIN_PRE_EMPHASIS_LVL_2 = 2, + DP_TRAIN_PRE_EMPHASIS_MAX = DP_TRAIN_PRE_EMPHASIS_LVL_2, +}; + +struct dp_link_test_video { + u32 test_video_pattern; + u32 test_bit_depth; + u32 test_dyn_range; + u32 test_h_total; + u32 test_v_total; + u32 test_h_start; + u32 test_v_start; + u32 test_hsync_pol; + u32 test_hsync_width; + u32 test_vsync_pol; + u32 test_vsync_width; + u32 test_h_width; + u32 test_v_height; + u32 test_rr_d; + u32 test_rr_n; +}; + +struct dp_link_test_audio { + u32 test_audio_sampling_rate; + u32 test_audio_channel_count; + u32 test_audio_pattern_type; + u32 test_audio_period_ch_1; + u32 test_audio_period_ch_2; + u32 test_audio_period_ch_3; + u32 test_audio_period_ch_4; + u32 test_audio_period_ch_5; + u32 test_audio_period_ch_6; + u32 test_audio_period_ch_7; + u32 test_audio_period_ch_8; +}; + +struct dp_link_phy_params { + u32 phy_test_pattern_sel; + u8 v_level; + u8 p_level; +}; + +struct dp_link { + u32 sink_request; + u32 test_response; + bool psm_enabled; + + u8 sink_count; + struct dp_link_test_video test_video; + struct dp_link_test_audio test_audio; + struct dp_link_phy_params phy_params; + struct dp_link_info link_params; +}; + +/** + * mdss_dp_test_bit_depth_to_bpp() - convert test bit depth to bpp + * @tbd: test bit depth + * + * Returns the bits per pixel (bpp) to be used corresponding to the + * git bit depth value. This function assumes that bit depth has + * already been validated. + */ +static inline u32 dp_link_bit_depth_to_bpp(u32 tbd) +{ + /* + * Few simplistic rules and assumptions made here: + * 1. Bit depth is per color component + * 2. If bit depth is unknown return 0 + * 3. Assume 3 color components + */ + switch (tbd) { + case DP_TEST_BIT_DEPTH_6: + return 18; + case DP_TEST_BIT_DEPTH_8: + return 24; + case DP_TEST_BIT_DEPTH_10: + return 30; + case DP_TEST_BIT_DEPTH_UNKNOWN: + default: + return 0; + } +} + +/** + * dp_test_bit_depth_to_bpc() - convert test bit depth to bpc + * @tbd: test bit depth + * + * Returns the bits per comp (bpc) to be used corresponding to the + * bit depth value. This function assumes that bit depth has + * already been validated. + */ +static inline u32 dp_link_bit_depth_to_bpc(u32 tbd) +{ + switch (tbd) { + case DP_TEST_BIT_DEPTH_6: + return 6; + case DP_TEST_BIT_DEPTH_8: + return 8; + case DP_TEST_BIT_DEPTH_10: + return 10; + case DP_TEST_BIT_DEPTH_UNKNOWN: + default: + return 0; + } +} + +void dp_link_reset_phy_params_vx_px(struct dp_link *dp_link); +u32 dp_link_get_test_bits_depth(struct dp_link *dp_link, u32 bpp); +int dp_link_process_request(struct dp_link *dp_link); +int dp_link_get_colorimetry_config(struct dp_link *dp_link); +int dp_link_adjust_levels(struct dp_link *dp_link, u8 *link_status); +bool dp_link_send_test_response(struct dp_link *dp_link); +int dp_link_psm_config(struct dp_link *dp_link, + struct dp_link_info *link_info, bool enable); +bool dp_link_send_edid_checksum(struct dp_link *dp_link, u8 checksum); + +/** + * dp_link_get() - get the functionalities of dp test module + * + * + * return: a pointer to dp_link struct + */ +struct dp_link *dp_link_get(struct device *dev, struct drm_dp_aux *aux); + +#endif /* _DP_LINK_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_panel.c b/drivers/gpu/drm/msm/dp/dp_panel.c new file mode 100644 index 000000000..d38086650 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_panel.c @@ -0,0 +1,464 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#include "dp_panel.h" + +#include <drm/drm_connector.h> +#include <drm/drm_edid.h> +#include <drm/drm_print.h> + +struct dp_panel_private { + struct device *dev; + struct drm_device *drm_dev; + struct dp_panel dp_panel; + struct drm_dp_aux *aux; + struct dp_link *link; + struct dp_catalog *catalog; + bool panel_on; + bool aux_cfg_update_done; +}; + +static int dp_panel_read_dpcd(struct dp_panel *dp_panel) +{ + int rc = 0; + size_t len; + ssize_t rlen; + struct dp_panel_private *panel; + struct dp_link_info *link_info; + u8 *dpcd, major = 0, minor = 0, temp; + u32 offset = DP_DPCD_REV; + + dpcd = dp_panel->dpcd; + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + link_info = &dp_panel->link_info; + + rlen = drm_dp_dpcd_read(panel->aux, offset, + dpcd, (DP_RECEIVER_CAP_SIZE + 1)); + if (rlen < (DP_RECEIVER_CAP_SIZE + 1)) { + DRM_ERROR("dpcd read failed, rlen=%zd\n", rlen); + if (rlen == -ETIMEDOUT) + rc = rlen; + else + rc = -EINVAL; + + goto end; + } + + temp = dpcd[DP_TRAINING_AUX_RD_INTERVAL]; + + /* check for EXTENDED_RECEIVER_CAPABILITY_FIELD_PRESENT */ + if (temp & BIT(7)) { + drm_dbg_dp(panel->drm_dev, + "using EXTENDED_RECEIVER_CAPABILITY_FIELD\n"); + offset = DPRX_EXTENDED_DPCD_FIELD; + } + + rlen = drm_dp_dpcd_read(panel->aux, offset, + dpcd, (DP_RECEIVER_CAP_SIZE + 1)); + if (rlen < (DP_RECEIVER_CAP_SIZE + 1)) { + DRM_ERROR("dpcd read failed, rlen=%zd\n", rlen); + if (rlen == -ETIMEDOUT) + rc = rlen; + else + rc = -EINVAL; + + goto end; + } + + link_info->revision = dpcd[DP_DPCD_REV]; + major = (link_info->revision >> 4) & 0x0f; + minor = link_info->revision & 0x0f; + + link_info->rate = drm_dp_bw_code_to_link_rate(dpcd[DP_MAX_LINK_RATE]); + link_info->num_lanes = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; + + if (link_info->num_lanes > dp_panel->max_dp_lanes) + link_info->num_lanes = dp_panel->max_dp_lanes; + + /* Limit support upto HBR2 until HBR3 support is added */ + if (link_info->rate >= (drm_dp_bw_code_to_link_rate(DP_LINK_BW_5_4))) + link_info->rate = drm_dp_bw_code_to_link_rate(DP_LINK_BW_5_4); + + drm_dbg_dp(panel->drm_dev, "version: %d.%d\n", major, minor); + drm_dbg_dp(panel->drm_dev, "link_rate=%d\n", link_info->rate); + drm_dbg_dp(panel->drm_dev, "lane_count=%d\n", link_info->num_lanes); + + if (drm_dp_enhanced_frame_cap(dpcd)) + link_info->capabilities |= DP_LINK_CAP_ENHANCED_FRAMING; + + dp_panel->dfp_present = dpcd[DP_DOWNSTREAMPORT_PRESENT]; + dp_panel->dfp_present &= DP_DWN_STRM_PORT_PRESENT; + + if (dp_panel->dfp_present && (dpcd[DP_DPCD_REV] > 0x10)) { + dp_panel->ds_port_cnt = dpcd[DP_DOWN_STREAM_PORT_COUNT]; + dp_panel->ds_port_cnt &= DP_PORT_COUNT_MASK; + len = DP_DOWNSTREAM_PORTS * DP_DOWNSTREAM_CAP_SIZE; + + rlen = drm_dp_dpcd_read(panel->aux, + DP_DOWNSTREAM_PORT_0, dp_panel->ds_cap_info, len); + if (rlen < len) { + DRM_ERROR("ds port status failed, rlen=%zd\n", rlen); + rc = -EINVAL; + goto end; + } + } + +end: + return rc; +} + +static u32 dp_panel_get_supported_bpp(struct dp_panel *dp_panel, + u32 mode_edid_bpp, u32 mode_pclk_khz) +{ + struct dp_link_info *link_info; + const u32 max_supported_bpp = 30, min_supported_bpp = 18; + u32 bpp = 0, data_rate_khz = 0; + + bpp = min_t(u32, mode_edid_bpp, max_supported_bpp); + + link_info = &dp_panel->link_info; + data_rate_khz = link_info->num_lanes * link_info->rate * 8; + + while (bpp > min_supported_bpp) { + if (mode_pclk_khz * bpp <= data_rate_khz) + break; + bpp -= 6; + } + + return bpp; +} + +static int dp_panel_update_modes(struct drm_connector *connector, + struct edid *edid) +{ + int rc = 0; + + if (edid) { + rc = drm_connector_update_edid_property(connector, edid); + if (rc) { + DRM_ERROR("failed to update edid property %d\n", rc); + return rc; + } + rc = drm_add_edid_modes(connector, edid); + return rc; + } + + rc = drm_connector_update_edid_property(connector, NULL); + if (rc) + DRM_ERROR("failed to update edid property %d\n", rc); + + return rc; +} + +int dp_panel_read_sink_caps(struct dp_panel *dp_panel, + struct drm_connector *connector) +{ + int rc = 0, bw_code; + int rlen, count; + struct dp_panel_private *panel; + + if (!dp_panel || !connector) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + + rc = dp_panel_read_dpcd(dp_panel); + if (rc) { + DRM_ERROR("read dpcd failed %d\n", rc); + return rc; + } + + bw_code = drm_dp_link_rate_to_bw_code(dp_panel->link_info.rate); + if (!is_link_rate_valid(bw_code) || + !is_lane_count_valid(dp_panel->link_info.num_lanes) || + (bw_code > dp_panel->max_bw_code)) { + DRM_ERROR("Illegal link rate=%d lane=%d\n", dp_panel->link_info.rate, + dp_panel->link_info.num_lanes); + return -EINVAL; + } + + if (dp_panel->dfp_present) { + rlen = drm_dp_dpcd_read(panel->aux, DP_SINK_COUNT, + &count, 1); + if (rlen == 1) { + count = DP_GET_SINK_COUNT(count); + if (!count) { + DRM_ERROR("no downstream ports connected\n"); + panel->link->sink_count = 0; + rc = -ENOTCONN; + goto end; + } + } + } + + kfree(dp_panel->edid); + dp_panel->edid = NULL; + + dp_panel->edid = drm_get_edid(connector, + &panel->aux->ddc); + if (!dp_panel->edid) { + DRM_ERROR("panel edid read failed\n"); + /* check edid read fail is due to unplug */ + if (!dp_catalog_link_is_connected(panel->catalog)) { + rc = -ETIMEDOUT; + goto end; + } + } + + if (panel->aux_cfg_update_done) { + drm_dbg_dp(panel->drm_dev, + "read DPCD with updated AUX config\n"); + rc = dp_panel_read_dpcd(dp_panel); + bw_code = drm_dp_link_rate_to_bw_code(dp_panel->link_info.rate); + if (rc || !is_link_rate_valid(bw_code) || + !is_lane_count_valid(dp_panel->link_info.num_lanes) + || (bw_code > dp_panel->max_bw_code)) { + DRM_ERROR("read dpcd failed %d\n", rc); + return rc; + } + panel->aux_cfg_update_done = false; + } +end: + return rc; +} + +u32 dp_panel_get_mode_bpp(struct dp_panel *dp_panel, + u32 mode_edid_bpp, u32 mode_pclk_khz) +{ + struct dp_panel_private *panel; + u32 bpp; + + if (!dp_panel || !mode_edid_bpp || !mode_pclk_khz) { + DRM_ERROR("invalid input\n"); + return 0; + } + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + + if (dp_panel->video_test) + bpp = dp_link_bit_depth_to_bpp( + panel->link->test_video.test_bit_depth); + else + bpp = dp_panel_get_supported_bpp(dp_panel, mode_edid_bpp, + mode_pclk_khz); + + return bpp; +} + +int dp_panel_get_modes(struct dp_panel *dp_panel, + struct drm_connector *connector) +{ + if (!dp_panel) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + if (dp_panel->edid) + return dp_panel_update_modes(connector, dp_panel->edid); + + return 0; +} + +static u8 dp_panel_get_edid_checksum(struct edid *edid) +{ + edid += edid->extensions; + + return edid->checksum; +} + +void dp_panel_handle_sink_request(struct dp_panel *dp_panel) +{ + struct dp_panel_private *panel; + + if (!dp_panel) { + DRM_ERROR("invalid input\n"); + return; + } + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + + if (panel->link->sink_request & DP_TEST_LINK_EDID_READ) { + u8 checksum; + + if (dp_panel->edid) + checksum = dp_panel_get_edid_checksum(dp_panel->edid); + else + checksum = dp_panel->connector->real_edid_checksum; + + dp_link_send_edid_checksum(panel->link, checksum); + dp_link_send_test_response(panel->link); + } +} + +void dp_panel_tpg_config(struct dp_panel *dp_panel, bool enable) +{ + struct dp_catalog *catalog; + struct dp_panel_private *panel; + + if (!dp_panel) { + DRM_ERROR("invalid input\n"); + return; + } + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + catalog = panel->catalog; + + if (!panel->panel_on) { + drm_dbg_dp(panel->drm_dev, + "DP panel not enabled, handle TPG on next on\n"); + return; + } + + if (!enable) { + dp_catalog_panel_tpg_disable(catalog); + return; + } + + drm_dbg_dp(panel->drm_dev, "calling catalog tpg_enable\n"); + dp_catalog_panel_tpg_enable(catalog, &panel->dp_panel.dp_mode.drm_mode); +} + +void dp_panel_dump_regs(struct dp_panel *dp_panel) +{ + struct dp_catalog *catalog; + struct dp_panel_private *panel; + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + catalog = panel->catalog; + + dp_catalog_dump_regs(catalog); +} + +int dp_panel_timing_cfg(struct dp_panel *dp_panel) +{ + u32 data, total_ver, total_hor; + struct dp_catalog *catalog; + struct dp_panel_private *panel; + struct drm_display_mode *drm_mode; + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + catalog = panel->catalog; + drm_mode = &panel->dp_panel.dp_mode.drm_mode; + + drm_dbg_dp(panel->drm_dev, "width=%d hporch= %d %d %d\n", + drm_mode->hdisplay, drm_mode->htotal - drm_mode->hsync_end, + drm_mode->hsync_start - drm_mode->hdisplay, + drm_mode->hsync_end - drm_mode->hsync_start); + + drm_dbg_dp(panel->drm_dev, "height=%d vporch= %d %d %d\n", + drm_mode->vdisplay, drm_mode->vtotal - drm_mode->vsync_end, + drm_mode->vsync_start - drm_mode->vdisplay, + drm_mode->vsync_end - drm_mode->vsync_start); + + total_hor = drm_mode->htotal; + + total_ver = drm_mode->vtotal; + + data = total_ver; + data <<= 16; + data |= total_hor; + + catalog->total = data; + + data = (drm_mode->vtotal - drm_mode->vsync_start); + data <<= 16; + data |= (drm_mode->htotal - drm_mode->hsync_start); + + catalog->sync_start = data; + + data = drm_mode->vsync_end - drm_mode->vsync_start; + data <<= 16; + data |= (panel->dp_panel.dp_mode.v_active_low << 31); + data |= drm_mode->hsync_end - drm_mode->hsync_start; + data |= (panel->dp_panel.dp_mode.h_active_low << 15); + + catalog->width_blanking = data; + + data = drm_mode->vdisplay; + data <<= 16; + data |= drm_mode->hdisplay; + + catalog->dp_active = data; + + dp_catalog_panel_timing_cfg(catalog); + panel->panel_on = true; + + return 0; +} + +int dp_panel_init_panel_info(struct dp_panel *dp_panel) +{ + struct drm_display_mode *drm_mode; + struct dp_panel_private *panel; + + drm_mode = &dp_panel->dp_mode.drm_mode; + + panel = container_of(dp_panel, struct dp_panel_private, dp_panel); + + /* + * print resolution info as this is a result + * of user initiated action of cable connection + */ + drm_dbg_dp(panel->drm_dev, "SET NEW RESOLUTION:\n"); + drm_dbg_dp(panel->drm_dev, "%dx%d@%dfps\n", + drm_mode->hdisplay, drm_mode->vdisplay, drm_mode_vrefresh(drm_mode)); + drm_dbg_dp(panel->drm_dev, + "h_porches(back|front|width) = (%d|%d|%d)\n", + drm_mode->htotal - drm_mode->hsync_end, + drm_mode->hsync_start - drm_mode->hdisplay, + drm_mode->hsync_end - drm_mode->hsync_start); + drm_dbg_dp(panel->drm_dev, + "v_porches(back|front|width) = (%d|%d|%d)\n", + drm_mode->vtotal - drm_mode->vsync_end, + drm_mode->vsync_start - drm_mode->vdisplay, + drm_mode->vsync_end - drm_mode->vsync_start); + drm_dbg_dp(panel->drm_dev, "pixel clock (KHz)=(%d)\n", + drm_mode->clock); + drm_dbg_dp(panel->drm_dev, "bpp = %d\n", dp_panel->dp_mode.bpp); + + dp_panel->dp_mode.bpp = max_t(u32, 18, + min_t(u32, dp_panel->dp_mode.bpp, 30)); + drm_dbg_dp(panel->drm_dev, "updated bpp = %d\n", + dp_panel->dp_mode.bpp); + + return 0; +} + +struct dp_panel *dp_panel_get(struct dp_panel_in *in) +{ + struct dp_panel_private *panel; + struct dp_panel *dp_panel; + + if (!in->dev || !in->catalog || !in->aux || !in->link) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-EINVAL); + } + + panel = devm_kzalloc(in->dev, sizeof(*panel), GFP_KERNEL); + if (!panel) + return ERR_PTR(-ENOMEM); + + panel->dev = in->dev; + panel->aux = in->aux; + panel->catalog = in->catalog; + panel->link = in->link; + + dp_panel = &panel->dp_panel; + dp_panel->max_bw_code = DP_LINK_BW_8_1; + panel->aux_cfg_update_done = false; + + return dp_panel; +} + +void dp_panel_put(struct dp_panel *dp_panel) +{ + if (!dp_panel) + return; + + kfree(dp_panel->edid); +} diff --git a/drivers/gpu/drm/msm/dp/dp_panel.h b/drivers/gpu/drm/msm/dp/dp_panel.h new file mode 100644 index 000000000..d861197ac --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_panel.h @@ -0,0 +1,99 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_PANEL_H_ +#define _DP_PANEL_H_ + +#include <drm/msm_drm.h> + +#include "dp_aux.h" +#include "dp_link.h" +#include "dp_hpd.h" + +struct edid; + +#define DPRX_EXTENDED_DPCD_FIELD 0x2200 + +#define DP_DOWNSTREAM_PORTS 4 +#define DP_DOWNSTREAM_CAP_SIZE 4 + +struct dp_display_mode { + struct drm_display_mode drm_mode; + u32 capabilities; + u32 bpp; + u32 h_active_low; + u32 v_active_low; +}; + +struct dp_panel_in { + struct device *dev; + struct drm_dp_aux *aux; + struct dp_link *link; + struct dp_catalog *catalog; +}; + +struct dp_panel { + /* dpcd raw data */ + u8 dpcd[DP_RECEIVER_CAP_SIZE + 1]; + u8 ds_cap_info[DP_DOWNSTREAM_PORTS * DP_DOWNSTREAM_CAP_SIZE]; + u32 ds_port_cnt; + u32 dfp_present; + + struct dp_link_info link_info; + struct drm_dp_desc desc; + struct edid *edid; + struct drm_connector *connector; + struct dp_display_mode dp_mode; + bool video_test; + + u32 vic; + u32 max_dp_lanes; + + u32 max_bw_code; +}; + +int dp_panel_init_panel_info(struct dp_panel *dp_panel); +int dp_panel_deinit(struct dp_panel *dp_panel); +int dp_panel_timing_cfg(struct dp_panel *dp_panel); +void dp_panel_dump_regs(struct dp_panel *dp_panel); +int dp_panel_read_sink_caps(struct dp_panel *dp_panel, + struct drm_connector *connector); +u32 dp_panel_get_mode_bpp(struct dp_panel *dp_panel, u32 mode_max_bpp, + u32 mode_pclk_khz); +int dp_panel_get_modes(struct dp_panel *dp_panel, + struct drm_connector *connector); +void dp_panel_handle_sink_request(struct dp_panel *dp_panel); +void dp_panel_tpg_config(struct dp_panel *dp_panel, bool enable); + +/** + * is_link_rate_valid() - validates the link rate + * @lane_rate: link rate requested by the sink + * + * Returns true if the requested link rate is supported. + */ +static inline bool is_link_rate_valid(u32 bw_code) +{ + return (bw_code == DP_LINK_BW_1_62 || + bw_code == DP_LINK_BW_2_7 || + bw_code == DP_LINK_BW_5_4 || + bw_code == DP_LINK_BW_8_1); +} + +/** + * dp_link_is_lane_count_valid() - validates the lane count + * @lane_count: lane count requested by the sink + * + * Returns true if the requested lane count is supported. + */ +static inline bool is_lane_count_valid(u32 lane_count) +{ + return (lane_count == 1 || + lane_count == 2 || + lane_count == 4); +} + +struct dp_panel *dp_panel_get(struct dp_panel_in *in); +void dp_panel_put(struct dp_panel *dp_panel); +#endif /* _DP_PANEL_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_parser.c b/drivers/gpu/drm/msm/dp/dp_parser.c new file mode 100644 index 000000000..dcbe893d6 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_parser.c @@ -0,0 +1,293 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#include <linux/of_gpio.h> +#include <linux/phy/phy.h> + +#include <drm/drm_of.h> +#include <drm/drm_print.h> +#include <drm/drm_bridge.h> + +#include "dp_parser.h" +#include "dp_reg.h" + +#define DP_DEFAULT_AHB_OFFSET 0x0000 +#define DP_DEFAULT_AHB_SIZE 0x0200 +#define DP_DEFAULT_AUX_OFFSET 0x0200 +#define DP_DEFAULT_AUX_SIZE 0x0200 +#define DP_DEFAULT_LINK_OFFSET 0x0400 +#define DP_DEFAULT_LINK_SIZE 0x0C00 +#define DP_DEFAULT_P0_OFFSET 0x1000 +#define DP_DEFAULT_P0_SIZE 0x0400 + +static void __iomem *dp_ioremap(struct platform_device *pdev, int idx, size_t *len) +{ + struct resource *res; + void __iomem *base; + + base = devm_platform_get_and_ioremap_resource(pdev, idx, &res); + if (!IS_ERR(base)) + *len = resource_size(res); + + return base; +} + +static int dp_parser_ctrl_res(struct dp_parser *parser) +{ + struct platform_device *pdev = parser->pdev; + struct dp_io *io = &parser->io; + struct dss_io_data *dss = &io->dp_controller; + + dss->ahb.base = dp_ioremap(pdev, 0, &dss->ahb.len); + if (IS_ERR(dss->ahb.base)) + return PTR_ERR(dss->ahb.base); + + dss->aux.base = dp_ioremap(pdev, 1, &dss->aux.len); + if (IS_ERR(dss->aux.base)) { + /* + * The initial binding had a single reg, but in order to + * support variation in the sub-region sizes this was split. + * dp_ioremap() will fail with -EINVAL here if only a single + * reg is specified, so fill in the sub-region offsets and + * lengths based on this single region. + */ + if (PTR_ERR(dss->aux.base) == -EINVAL) { + if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) { + DRM_ERROR("legacy memory region not large enough\n"); + return -EINVAL; + } + + dss->ahb.len = DP_DEFAULT_AHB_SIZE; + dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET; + dss->aux.len = DP_DEFAULT_AUX_SIZE; + dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET; + dss->link.len = DP_DEFAULT_LINK_SIZE; + dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET; + dss->p0.len = DP_DEFAULT_P0_SIZE; + } else { + DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base); + return PTR_ERR(dss->aux.base); + } + } else { + dss->link.base = dp_ioremap(pdev, 2, &dss->link.len); + if (IS_ERR(dss->link.base)) { + DRM_ERROR("unable to remap link region: %pe\n", dss->link.base); + return PTR_ERR(dss->link.base); + } + + dss->p0.base = dp_ioremap(pdev, 3, &dss->p0.len); + if (IS_ERR(dss->p0.base)) { + DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base); + return PTR_ERR(dss->p0.base); + } + } + + io->phy = devm_phy_get(&pdev->dev, "dp"); + if (IS_ERR(io->phy)) + return PTR_ERR(io->phy); + + return 0; +} + +static int dp_parser_misc(struct dp_parser *parser) +{ + struct device_node *of_node = parser->pdev->dev.of_node; + int len; + + len = drm_of_get_data_lanes_count(of_node, 1, DP_MAX_NUM_DP_LANES); + if (len < 0) { + DRM_WARN("Invalid property \"data-lanes\", default max DP lanes = %d\n", + DP_MAX_NUM_DP_LANES); + len = DP_MAX_NUM_DP_LANES; + } + + parser->max_dp_lanes = len; + return 0; +} + +static inline bool dp_parser_check_prefix(const char *clk_prefix, + const char *clk_name) +{ + return !strncmp(clk_prefix, clk_name, strlen(clk_prefix)); +} + +static int dp_parser_init_clk_data(struct dp_parser *parser) +{ + int num_clk, i, rc; + int core_clk_count = 0, ctrl_clk_count = 0, stream_clk_count = 0; + const char *clk_name; + struct device *dev = &parser->pdev->dev; + struct dss_module_power *core_power = &parser->mp[DP_CORE_PM]; + struct dss_module_power *ctrl_power = &parser->mp[DP_CTRL_PM]; + struct dss_module_power *stream_power = &parser->mp[DP_STREAM_PM]; + + num_clk = of_property_count_strings(dev->of_node, "clock-names"); + if (num_clk <= 0) { + DRM_ERROR("no clocks are defined\n"); + return -EINVAL; + } + + for (i = 0; i < num_clk; i++) { + rc = of_property_read_string_index(dev->of_node, + "clock-names", i, &clk_name); + if (rc < 0) + return rc; + + if (dp_parser_check_prefix("core", clk_name)) + core_clk_count++; + + if (dp_parser_check_prefix("ctrl", clk_name)) + ctrl_clk_count++; + + if (dp_parser_check_prefix("stream", clk_name)) + stream_clk_count++; + } + + /* Initialize the CORE power module */ + if (core_clk_count == 0) { + DRM_ERROR("no core clocks are defined\n"); + return -EINVAL; + } + + core_power->num_clk = core_clk_count; + core_power->clocks = devm_kcalloc(dev, + core_power->num_clk, sizeof(struct clk_bulk_data), + GFP_KERNEL); + if (!core_power->clocks) + return -ENOMEM; + + /* Initialize the CTRL power module */ + if (ctrl_clk_count == 0) { + DRM_ERROR("no ctrl clocks are defined\n"); + return -EINVAL; + } + + ctrl_power->num_clk = ctrl_clk_count; + ctrl_power->clocks = devm_kcalloc(dev, + ctrl_power->num_clk, sizeof(struct clk_bulk_data), + GFP_KERNEL); + if (!ctrl_power->clocks) { + ctrl_power->num_clk = 0; + return -ENOMEM; + } + + /* Initialize the STREAM power module */ + if (stream_clk_count == 0) { + DRM_ERROR("no stream (pixel) clocks are defined\n"); + return -EINVAL; + } + + stream_power->num_clk = stream_clk_count; + stream_power->clocks = devm_kcalloc(dev, + stream_power->num_clk, sizeof(struct clk_bulk_data), + GFP_KERNEL); + if (!stream_power->clocks) { + stream_power->num_clk = 0; + return -ENOMEM; + } + + return 0; +} + +static int dp_parser_clock(struct dp_parser *parser) +{ + int rc = 0, i = 0; + int num_clk = 0; + int core_clk_index = 0, ctrl_clk_index = 0, stream_clk_index = 0; + int core_clk_count = 0, ctrl_clk_count = 0, stream_clk_count = 0; + const char *clk_name; + struct device *dev = &parser->pdev->dev; + struct dss_module_power *core_power = &parser->mp[DP_CORE_PM]; + struct dss_module_power *ctrl_power = &parser->mp[DP_CTRL_PM]; + struct dss_module_power *stream_power = &parser->mp[DP_STREAM_PM]; + + rc = dp_parser_init_clk_data(parser); + if (rc) { + DRM_ERROR("failed to initialize power data %d\n", rc); + return -EINVAL; + } + + core_clk_count = core_power->num_clk; + ctrl_clk_count = ctrl_power->num_clk; + stream_clk_count = stream_power->num_clk; + + num_clk = core_clk_count + ctrl_clk_count + stream_clk_count; + + for (i = 0; i < num_clk; i++) { + rc = of_property_read_string_index(dev->of_node, "clock-names", + i, &clk_name); + if (rc) { + DRM_ERROR("error reading clock-names %d\n", rc); + return rc; + } + if (dp_parser_check_prefix("core", clk_name) && + core_clk_index < core_clk_count) { + core_power->clocks[core_clk_index].id = devm_kstrdup(dev, clk_name, GFP_KERNEL); + core_clk_index++; + } else if (dp_parser_check_prefix("stream", clk_name) && + stream_clk_index < stream_clk_count) { + stream_power->clocks[stream_clk_index].id = devm_kstrdup(dev, clk_name, GFP_KERNEL); + stream_clk_index++; + } else if (dp_parser_check_prefix("ctrl", clk_name) && + ctrl_clk_index < ctrl_clk_count) { + ctrl_power->clocks[ctrl_clk_index].id = devm_kstrdup(dev, clk_name, GFP_KERNEL); + ctrl_clk_index++; + } + } + + return 0; +} + +int devm_dp_parser_find_next_bridge(struct device *dev, struct dp_parser *parser) +{ + struct platform_device *pdev = parser->pdev; + struct drm_bridge *bridge; + + bridge = devm_drm_of_get_bridge(dev, pdev->dev.of_node, 1, 0); + if (IS_ERR(bridge)) + return PTR_ERR(bridge); + + parser->next_bridge = bridge; + + return 0; +} + +static int dp_parser_parse(struct dp_parser *parser) +{ + int rc = 0; + + if (!parser) { + DRM_ERROR("invalid input\n"); + return -EINVAL; + } + + rc = dp_parser_ctrl_res(parser); + if (rc) + return rc; + + rc = dp_parser_misc(parser); + if (rc) + return rc; + + rc = dp_parser_clock(parser); + if (rc) + return rc; + + return 0; +} + +struct dp_parser *dp_parser_get(struct platform_device *pdev) +{ + struct dp_parser *parser; + + parser = devm_kzalloc(&pdev->dev, sizeof(*parser), GFP_KERNEL); + if (!parser) + return ERR_PTR(-ENOMEM); + + parser->parse = dp_parser_parse; + parser->pdev = pdev; + + return parser; +} diff --git a/drivers/gpu/drm/msm/dp/dp_parser.h b/drivers/gpu/drm/msm/dp/dp_parser.h new file mode 100644 index 000000000..d30ab773d --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_parser.h @@ -0,0 +1,153 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_PARSER_H_ +#define _DP_PARSER_H_ + +#include <linux/platform_device.h> +#include <linux/phy/phy.h> +#include <linux/phy/phy-dp.h> + +#include "msm_drv.h" + +#define DP_LABEL "MDSS DP DISPLAY" +#define DP_MAX_PIXEL_CLK_KHZ 675000 +#define DP_MAX_NUM_DP_LANES 4 + +enum dp_pm_type { + DP_CORE_PM, + DP_CTRL_PM, + DP_STREAM_PM, + DP_PHY_PM, + DP_MAX_PM +}; + +struct dss_io_region { + size_t len; + void __iomem *base; +}; + +struct dss_io_data { + struct dss_io_region ahb; + struct dss_io_region aux; + struct dss_io_region link; + struct dss_io_region p0; +}; + +static inline const char *dp_parser_pm_name(enum dp_pm_type module) +{ + switch (module) { + case DP_CORE_PM: return "DP_CORE_PM"; + case DP_CTRL_PM: return "DP_CTRL_PM"; + case DP_STREAM_PM: return "DP_STREAM_PM"; + case DP_PHY_PM: return "DP_PHY_PM"; + default: return "???"; + } +} + +/** + * struct dp_display_data - display related device tree data. + * + * @ctrl_node: referece to controller device + * @phy_node: reference to phy device + * @is_active: is the controller currently active + * @name: name of the display + * @display_type: type of the display + */ +struct dp_display_data { + struct device_node *ctrl_node; + struct device_node *phy_node; + bool is_active; + const char *name; + const char *display_type; +}; + +/** + * struct dp_ctrl_resource - controller's IO related data + * + * @dp_controller: Display Port controller mapped memory address + * @phy_io: phy's mapped memory address + */ +struct dp_io { + struct dss_io_data dp_controller; + struct phy *phy; + union phy_configure_opts phy_opts; +}; + +/** + * struct dp_pinctrl - DP's pin control + * + * @pin: pin-controller's instance + * @state_active: active state pin control + * @state_hpd_active: hpd active state pin control + * @state_suspend: suspend state pin control + */ +struct dp_pinctrl { + struct pinctrl *pin; + struct pinctrl_state *state_active; + struct pinctrl_state *state_hpd_active; + struct pinctrl_state *state_suspend; +}; + +/* Regulators for DP devices */ +struct dp_reg_entry { + char name[32]; + int enable_load; + int disable_load; +}; + +struct dss_module_power { + unsigned int num_clk; + struct clk_bulk_data *clocks; +}; + +/** + * struct dp_parser - DP parser's data exposed to clients + * + * @pdev: platform data of the client + * @mp: gpio, regulator and clock related data + * @pinctrl: pin-control related data + * @disp_data: controller's display related data + * @parse: function to be called by client to parse device tree. + */ +struct dp_parser { + struct platform_device *pdev; + struct dss_module_power mp[DP_MAX_PM]; + struct dp_pinctrl pinctrl; + struct dp_io io; + struct dp_display_data disp_data; + u32 max_dp_lanes; + struct drm_bridge *next_bridge; + + int (*parse)(struct dp_parser *parser); +}; + +/** + * dp_parser_get() - get the DP's device tree parser module + * + * @pdev: platform data of the client + * return: pointer to dp_parser structure. + * + * This function provides client capability to parse the + * device tree and populate the data structures. The data + * related to clock, regulators, pin-control and other + * can be parsed using this module. + */ +struct dp_parser *dp_parser_get(struct platform_device *pdev); + +/** + * devm_dp_parser_find_next_bridge() - find an additional bridge to DP + * + * @dev: device to tie bridge lifetime to + * @parser: dp_parser data from client + * + * This function is used to find any additional bridge attached to + * the DP controller. The eDP interface requires a panel bridge. + * + * Return: 0 if able to get the bridge, otherwise negative errno for failure. + */ +int devm_dp_parser_find_next_bridge(struct device *dev, struct dp_parser *parser); + +#endif diff --git a/drivers/gpu/drm/msm/dp/dp_power.c b/drivers/gpu/drm/msm/dp/dp_power.c new file mode 100644 index 000000000..c0aaabb03 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_power.c @@ -0,0 +1,257 @@ +// SPDX-License-Identifier: GPL-2.0-only +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__ + +#include <linux/clk.h> +#include <linux/clk-provider.h> +#include <linux/regulator/consumer.h> +#include <linux/pm_opp.h> +#include "dp_power.h" +#include "msm_drv.h" + +struct dp_power_private { + struct dp_parser *parser; + struct platform_device *pdev; + struct device *dev; + struct drm_device *drm_dev; + struct clk *link_clk_src; + struct clk *pixel_provider; + struct clk *link_provider; + + struct dp_power dp_power; +}; + +static int dp_power_clk_init(struct dp_power_private *power) +{ + int rc = 0; + struct dss_module_power *core, *ctrl, *stream; + struct device *dev = &power->pdev->dev; + + core = &power->parser->mp[DP_CORE_PM]; + ctrl = &power->parser->mp[DP_CTRL_PM]; + stream = &power->parser->mp[DP_STREAM_PM]; + + rc = devm_clk_bulk_get(dev, core->num_clk, core->clocks); + if (rc) { + DRM_ERROR("failed to get %s clk. err=%d\n", + dp_parser_pm_name(DP_CORE_PM), rc); + return rc; + } + + rc = devm_clk_bulk_get(dev, ctrl->num_clk, ctrl->clocks); + if (rc) { + DRM_ERROR("failed to get %s clk. err=%d\n", + dp_parser_pm_name(DP_CTRL_PM), rc); + return -ENODEV; + } + + rc = devm_clk_bulk_get(dev, stream->num_clk, stream->clocks); + if (rc) { + DRM_ERROR("failed to get %s clk. err=%d\n", + dp_parser_pm_name(DP_CTRL_PM), rc); + return -ENODEV; + } + + return 0; +} + +int dp_power_clk_status(struct dp_power *dp_power, enum dp_pm_type pm_type) +{ + struct dp_power_private *power; + + power = container_of(dp_power, struct dp_power_private, dp_power); + + drm_dbg_dp(power->drm_dev, + "core_clk_on=%d link_clk_on=%d stream_clk_on=%d\n", + dp_power->core_clks_on, dp_power->link_clks_on, dp_power->stream_clks_on); + + if (pm_type == DP_CORE_PM) + return dp_power->core_clks_on; + + if (pm_type == DP_CTRL_PM) + return dp_power->link_clks_on; + + if (pm_type == DP_STREAM_PM) + return dp_power->stream_clks_on; + + return 0; +} + +int dp_power_clk_enable(struct dp_power *dp_power, + enum dp_pm_type pm_type, bool enable) +{ + int rc = 0; + struct dp_power_private *power; + struct dss_module_power *mp; + + power = container_of(dp_power, struct dp_power_private, dp_power); + + if (pm_type != DP_CORE_PM && pm_type != DP_CTRL_PM && + pm_type != DP_STREAM_PM) { + DRM_ERROR("unsupported power module: %s\n", + dp_parser_pm_name(pm_type)); + return -EINVAL; + } + + if (enable) { + if (pm_type == DP_CORE_PM && dp_power->core_clks_on) { + drm_dbg_dp(power->drm_dev, + "core clks already enabled\n"); + return 0; + } + + if (pm_type == DP_CTRL_PM && dp_power->link_clks_on) { + drm_dbg_dp(power->drm_dev, + "links clks already enabled\n"); + return 0; + } + + if (pm_type == DP_STREAM_PM && dp_power->stream_clks_on) { + drm_dbg_dp(power->drm_dev, + "pixel clks already enabled\n"); + return 0; + } + + if ((pm_type == DP_CTRL_PM) && (!dp_power->core_clks_on)) { + drm_dbg_dp(power->drm_dev, + "Enable core clks before link clks\n"); + mp = &power->parser->mp[DP_CORE_PM]; + + rc = clk_bulk_prepare_enable(mp->num_clk, mp->clocks); + if (rc) { + DRM_ERROR("fail to enable clks: %s. err=%d\n", + dp_parser_pm_name(DP_CORE_PM), rc); + return rc; + } + dp_power->core_clks_on = true; + } + } + + mp = &power->parser->mp[pm_type]; + if (enable) { + rc = clk_bulk_prepare_enable(mp->num_clk, mp->clocks); + if (rc) { + DRM_ERROR("failed to enable clks, err: %d\n", rc); + return rc; + } + } else { + clk_bulk_disable_unprepare(mp->num_clk, mp->clocks); + } + + if (pm_type == DP_CORE_PM) + dp_power->core_clks_on = enable; + else if (pm_type == DP_STREAM_PM) + dp_power->stream_clks_on = enable; + else + dp_power->link_clks_on = enable; + + drm_dbg_dp(power->drm_dev, "%s clocks for %s\n", + enable ? "enable" : "disable", + dp_parser_pm_name(pm_type)); + drm_dbg_dp(power->drm_dev, + "strem_clks:%s link_clks:%s core_clks:%s\n", + dp_power->stream_clks_on ? "on" : "off", + dp_power->link_clks_on ? "on" : "off", + dp_power->core_clks_on ? "on" : "off"); + + return 0; +} + +int dp_power_client_init(struct dp_power *dp_power) +{ + int rc = 0; + struct dp_power_private *power; + + if (!dp_power) { + DRM_ERROR("invalid power data\n"); + return -EINVAL; + } + + power = container_of(dp_power, struct dp_power_private, dp_power); + + pm_runtime_enable(&power->pdev->dev); + + rc = dp_power_clk_init(power); + if (rc) + DRM_ERROR("failed to init clocks %d\n", rc); + + return rc; +} + +void dp_power_client_deinit(struct dp_power *dp_power) +{ + struct dp_power_private *power; + + if (!dp_power) { + DRM_ERROR("invalid power data\n"); + return; + } + + power = container_of(dp_power, struct dp_power_private, dp_power); + + pm_runtime_disable(&power->pdev->dev); +} + +int dp_power_init(struct dp_power *dp_power, bool flip) +{ + int rc = 0; + struct dp_power_private *power = NULL; + + if (!dp_power) { + DRM_ERROR("invalid power data\n"); + return -EINVAL; + } + + power = container_of(dp_power, struct dp_power_private, dp_power); + + pm_runtime_get_sync(&power->pdev->dev); + + rc = dp_power_clk_enable(dp_power, DP_CORE_PM, true); + if (rc) { + DRM_ERROR("failed to enable DP core clocks, %d\n", rc); + goto exit; + } + + return 0; + +exit: + pm_runtime_put_sync(&power->pdev->dev); + return rc; +} + +int dp_power_deinit(struct dp_power *dp_power) +{ + struct dp_power_private *power; + + power = container_of(dp_power, struct dp_power_private, dp_power); + + dp_power_clk_enable(dp_power, DP_CORE_PM, false); + pm_runtime_put_sync(&power->pdev->dev); + return 0; +} + +struct dp_power *dp_power_get(struct device *dev, struct dp_parser *parser) +{ + struct dp_power_private *power; + struct dp_power *dp_power; + + if (!parser) { + DRM_ERROR("invalid input\n"); + return ERR_PTR(-EINVAL); + } + + power = devm_kzalloc(&parser->pdev->dev, sizeof(*power), GFP_KERNEL); + if (!power) + return ERR_PTR(-ENOMEM); + + power->parser = parser; + power->pdev = parser->pdev; + power->dev = dev; + + dp_power = &power->dp_power; + + return dp_power; +} diff --git a/drivers/gpu/drm/msm/dp/dp_power.h b/drivers/gpu/drm/msm/dp/dp_power.h new file mode 100644 index 000000000..e3f959ffa --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_power.h @@ -0,0 +1,107 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2012-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_POWER_H_ +#define _DP_POWER_H_ + +#include "dp_parser.h" + +/** + * sruct dp_power - DisplayPort's power related data + * + * @init: initializes the regulators/core clocks/GPIOs/pinctrl + * @deinit: turns off the regulators/core clocks/GPIOs/pinctrl + * @clk_enable: enable/disable the DP clocks + * @set_pixel_clk_parent: set the parent of DP pixel clock + */ +struct dp_power { + bool core_clks_on; + bool link_clks_on; + bool stream_clks_on; +}; + +/** + * dp_power_init() - enable power supplies for display controller + * + * @power: instance of power module + * @flip: bool for flipping gpio direction + * return: 0 if success or error if failure. + * + * This API will turn on the regulators and configures gpio's + * aux/hpd. + */ +int dp_power_init(struct dp_power *power, bool flip); + +/** + * dp_power_deinit() - turn off regulators and gpios. + * + * @power: instance of power module + * return: 0 for success + * + * This API turns off power and regulators. + */ +int dp_power_deinit(struct dp_power *power); + +/** + * dp_power_clk_status() - display controller clocks status + * + * @power: instance of power module + * @pm_type: type of pm, core/ctrl/phy + * return: status of power clocks + * + * This API return status of DP clocks + */ + +int dp_power_clk_status(struct dp_power *dp_power, enum dp_pm_type pm_type); + +/** + * dp_power_clk_enable() - enable display controller clocks + * + * @power: instance of power module + * @pm_type: type of pm, core/ctrl/phy + * @enable: enables or disables + * return: pointer to allocated power module data + * + * This API will call setrate and enable for DP clocks + */ + +int dp_power_clk_enable(struct dp_power *power, enum dp_pm_type pm_type, + bool enable); + +/** + * dp_power_client_init() - initialize clock and regulator modules + * + * @power: instance of power module + * return: 0 for success, error for failure. + * + * This API will configure the DisplayPort's clocks and regulator + * modules. + */ +int dp_power_client_init(struct dp_power *power); + +/** + * dp_power_clinet_deinit() - de-initialize clock and regulator modules + * + * @power: instance of power module + * return: 0 for success, error for failure. + * + * This API will de-initialize the DisplayPort's clocks and regulator + * modules. + */ +void dp_power_client_deinit(struct dp_power *power); + +/** + * dp_power_get() - configure and get the DisplayPort power module data + * + * @parser: instance of parser module + * return: pointer to allocated power module data + * + * This API will configure the DisplayPort's power module and provides + * methods to be called by the client to configure the power related + * modules. + */ +struct dp_power *dp_power_get(struct device *dev, struct dp_parser *parser); + +#endif /* _DP_POWER_H_ */ diff --git a/drivers/gpu/drm/msm/dp/dp_reg.h b/drivers/gpu/drm/msm/dp/dp_reg.h new file mode 100644 index 000000000..268602803 --- /dev/null +++ b/drivers/gpu/drm/msm/dp/dp_reg.h @@ -0,0 +1,308 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. + */ + +#ifndef _DP_REG_H_ +#define _DP_REG_H_ + +/* DP_TX Registers */ +#define REG_DP_HW_VERSION (0x00000000) + +#define REG_DP_SW_RESET (0x00000010) +#define DP_SW_RESET (0x00000001) + +#define REG_DP_PHY_CTRL (0x00000014) +#define DP_PHY_CTRL_SW_RESET_PLL (0x00000001) +#define DP_PHY_CTRL_SW_RESET (0x00000004) + +#define REG_DP_CLK_CTRL (0x00000018) +#define REG_DP_CLK_ACTIVE (0x0000001C) +#define REG_DP_INTR_STATUS (0x00000020) +#define REG_DP_INTR_STATUS2 (0x00000024) +#define REG_DP_INTR_STATUS3 (0x00000028) + +#define REG_DP_DP_HPD_CTRL (0x00000000) +#define DP_DP_HPD_CTRL_HPD_EN (0x00000001) + +#define REG_DP_DP_HPD_INT_STATUS (0x00000004) + +#define REG_DP_DP_HPD_INT_ACK (0x00000008) +#define DP_DP_HPD_PLUG_INT_ACK (0x00000001) +#define DP_DP_IRQ_HPD_INT_ACK (0x00000002) +#define DP_DP_HPD_REPLUG_INT_ACK (0x00000004) +#define DP_DP_HPD_UNPLUG_INT_ACK (0x00000008) +#define DP_DP_HPD_STATE_STATUS_BITS_MASK (0x0000000F) +#define DP_DP_HPD_STATE_STATUS_BITS_SHIFT (0x1C) + +#define REG_DP_DP_HPD_INT_MASK (0x0000000C) +#define DP_DP_HPD_PLUG_INT_MASK (0x00000001) +#define DP_DP_IRQ_HPD_INT_MASK (0x00000002) +#define DP_DP_HPD_REPLUG_INT_MASK (0x00000004) +#define DP_DP_HPD_UNPLUG_INT_MASK (0x00000008) +#define DP_DP_HPD_INT_MASK (DP_DP_HPD_PLUG_INT_MASK | \ + DP_DP_IRQ_HPD_INT_MASK | \ + DP_DP_HPD_REPLUG_INT_MASK | \ + DP_DP_HPD_UNPLUG_INT_MASK) +#define DP_DP_HPD_STATE_STATUS_CONNECTED (0x40000000) +#define DP_DP_HPD_STATE_STATUS_PENDING (0x20000000) +#define DP_DP_HPD_STATE_STATUS_DISCONNECTED (0x00000000) +#define DP_DP_HPD_STATE_STATUS_MASK (0xE0000000) + +#define REG_DP_DP_HPD_REFTIMER (0x00000018) +#define DP_DP_HPD_REFTIMER_ENABLE (1 << 16) + +#define REG_DP_DP_HPD_EVENT_TIME_0 (0x0000001C) +#define REG_DP_DP_HPD_EVENT_TIME_1 (0x00000020) +#define DP_DP_HPD_EVENT_TIME_0_VAL (0x3E800FA) +#define DP_DP_HPD_EVENT_TIME_1_VAL (0x1F407D0) + +#define REG_DP_AUX_CTRL (0x00000030) +#define DP_AUX_CTRL_ENABLE (0x00000001) +#define DP_AUX_CTRL_RESET (0x00000002) + +#define REG_DP_AUX_DATA (0x00000034) +#define DP_AUX_DATA_READ (0x00000001) +#define DP_AUX_DATA_WRITE (0x00000000) +#define DP_AUX_DATA_OFFSET (0x00000008) +#define DP_AUX_DATA_INDEX_OFFSET (0x00000010) +#define DP_AUX_DATA_MASK (0x0000ff00) +#define DP_AUX_DATA_INDEX_WRITE (0x80000000) + +#define REG_DP_AUX_TRANS_CTRL (0x00000038) +#define DP_AUX_TRANS_CTRL_I2C (0x00000100) +#define DP_AUX_TRANS_CTRL_GO (0x00000200) +#define DP_AUX_TRANS_CTRL_NO_SEND_ADDR (0x00000400) +#define DP_AUX_TRANS_CTRL_NO_SEND_STOP (0x00000800) + +#define REG_DP_TIMEOUT_COUNT (0x0000003C) +#define REG_DP_AUX_LIMITS (0x00000040) +#define REG_DP_AUX_STATUS (0x00000044) + +#define DP_DPCD_CP_IRQ (0x201) +#define DP_DPCD_RXSTATUS (0x69493) + +#define DP_INTERRUPT_TRANS_NUM (0x000000A0) + +#define REG_DP_MAINLINK_CTRL (0x00000000) +#define DP_MAINLINK_CTRL_ENABLE (0x00000001) +#define DP_MAINLINK_CTRL_RESET (0x00000002) +#define DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER (0x00000010) +#define DP_MAINLINK_FB_BOUNDARY_SEL (0x02000000) + +#define REG_DP_STATE_CTRL (0x00000004) +#define DP_STATE_CTRL_LINK_TRAINING_PATTERN1 (0x00000001) +#define DP_STATE_CTRL_LINK_TRAINING_PATTERN2 (0x00000002) +#define DP_STATE_CTRL_LINK_TRAINING_PATTERN3 (0x00000004) +#define DP_STATE_CTRL_LINK_TRAINING_PATTERN4 (0x00000008) +#define DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE (0x00000010) +#define DP_STATE_CTRL_LINK_PRBS7 (0x00000020) +#define DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN (0x00000040) +#define DP_STATE_CTRL_SEND_VIDEO (0x00000080) +#define DP_STATE_CTRL_PUSH_IDLE (0x00000100) + +#define REG_DP_CONFIGURATION_CTRL (0x00000008) +#define DP_CONFIGURATION_CTRL_SYNC_ASYNC_CLK (0x00000001) +#define DP_CONFIGURATION_CTRL_STATIC_DYNAMIC_CN (0x00000002) +#define DP_CONFIGURATION_CTRL_P_INTERLACED (0x00000004) +#define DP_CONFIGURATION_CTRL_INTERLACED_BTF (0x00000008) +#define DP_CONFIGURATION_CTRL_NUM_OF_LANES (0x00000010) +#define DP_CONFIGURATION_CTRL_ENHANCED_FRAMING (0x00000040) +#define DP_CONFIGURATION_CTRL_SEND_VSC (0x00000080) +#define DP_CONFIGURATION_CTRL_BPC (0x00000100) +#define DP_CONFIGURATION_CTRL_ASSR (0x00000400) +#define DP_CONFIGURATION_CTRL_RGB_YUV (0x00000800) +#define DP_CONFIGURATION_CTRL_LSCLK_DIV (0x00002000) +#define DP_CONFIGURATION_CTRL_NUM_OF_LANES_SHIFT (0x04) +#define DP_CONFIGURATION_CTRL_BPC_SHIFT (0x08) +#define DP_CONFIGURATION_CTRL_LSCLK_DIV_SHIFT (0x0D) + +#define REG_DP_SOFTWARE_MVID (0x00000010) +#define REG_DP_SOFTWARE_NVID (0x00000018) +#define REG_DP_TOTAL_HOR_VER (0x0000001C) +#define REG_DP_START_HOR_VER_FROM_SYNC (0x00000020) +#define REG_DP_HSYNC_VSYNC_WIDTH_POLARITY (0x00000024) +#define REG_DP_ACTIVE_HOR_VER (0x00000028) + +#define REG_DP_MISC1_MISC0 (0x0000002C) +#define DP_MISC0_SYNCHRONOUS_CLK (0x00000001) +#define DP_MISC0_COLORIMETRY_CFG_SHIFT (0x00000001) +#define DP_MISC0_TEST_BITS_DEPTH_SHIFT (0x00000005) + +#define REG_DP_VALID_BOUNDARY (0x00000030) +#define REG_DP_VALID_BOUNDARY_2 (0x00000034) + +#define REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING (0x00000038) +#define LANE0_MAPPING_SHIFT (0x00000000) +#define LANE1_MAPPING_SHIFT (0x00000002) +#define LANE2_MAPPING_SHIFT (0x00000004) +#define LANE3_MAPPING_SHIFT (0x00000006) + +#define REG_DP_MAINLINK_READY (0x00000040) +#define DP_MAINLINK_READY_FOR_VIDEO (0x00000001) +#define DP_MAINLINK_READY_LINK_TRAINING_SHIFT (0x00000003) + +#define REG_DP_MAINLINK_LEVELS (0x00000044) +#define DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2 (0x00000002) + + +#define REG_DP_TU (0x0000004C) + +#define REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET (0x00000054) +#define DP_HBR2_ERM_PATTERN (0x00010000) + +#define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0 (0x000000C0) +#define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1 (0x000000C4) +#define REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2 (0x000000C8) + +#define MMSS_DP_MISC1_MISC0 (0x0000002C) +#define MMSS_DP_AUDIO_TIMING_GEN (0x00000080) +#define MMSS_DP_AUDIO_TIMING_RBR_32 (0x00000084) +#define MMSS_DP_AUDIO_TIMING_HBR_32 (0x00000088) +#define MMSS_DP_AUDIO_TIMING_RBR_44 (0x0000008C) +#define MMSS_DP_AUDIO_TIMING_HBR_44 (0x00000090) +#define MMSS_DP_AUDIO_TIMING_RBR_48 (0x00000094) +#define MMSS_DP_AUDIO_TIMING_HBR_48 (0x00000098) + +#define MMSS_DP_PSR_CRC_RG (0x00000154) +#define MMSS_DP_PSR_CRC_B (0x00000158) + +#define REG_DP_COMPRESSION_MODE_CTRL (0x00000180) + +#define MMSS_DP_AUDIO_CFG (0x00000200) +#define MMSS_DP_AUDIO_STATUS (0x00000204) +#define MMSS_DP_AUDIO_PKT_CTRL (0x00000208) +#define MMSS_DP_AUDIO_PKT_CTRL2 (0x0000020C) +#define MMSS_DP_AUDIO_ACR_CTRL (0x00000210) +#define MMSS_DP_AUDIO_CTRL_RESET (0x00000214) + +#define MMSS_DP_SDP_CFG (0x00000228) +#define MMSS_DP_SDP_CFG2 (0x0000022C) +#define MMSS_DP_AUDIO_TIMESTAMP_0 (0x00000230) +#define MMSS_DP_AUDIO_TIMESTAMP_1 (0x00000234) + +#define MMSS_DP_AUDIO_STREAM_0 (0x00000240) +#define MMSS_DP_AUDIO_STREAM_1 (0x00000244) + +#define MMSS_DP_EXTENSION_0 (0x00000250) +#define MMSS_DP_EXTENSION_1 (0x00000254) +#define MMSS_DP_EXTENSION_2 (0x00000258) +#define MMSS_DP_EXTENSION_3 (0x0000025C) +#define MMSS_DP_EXTENSION_4 (0x00000260) +#define MMSS_DP_EXTENSION_5 (0x00000264) +#define MMSS_DP_EXTENSION_6 (0x00000268) +#define MMSS_DP_EXTENSION_7 (0x0000026C) +#define MMSS_DP_EXTENSION_8 (0x00000270) +#define MMSS_DP_EXTENSION_9 (0x00000274) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_0 (0x00000278) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_1 (0x0000027C) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_2 (0x00000280) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_3 (0x00000284) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_4 (0x00000288) +#define MMSS_DP_AUDIO_COPYMANAGEMENT_5 (0x0000028C) +#define MMSS_DP_AUDIO_ISRC_0 (0x00000290) +#define MMSS_DP_AUDIO_ISRC_1 (0x00000294) +#define MMSS_DP_AUDIO_ISRC_2 (0x00000298) +#define MMSS_DP_AUDIO_ISRC_3 (0x0000029C) +#define MMSS_DP_AUDIO_ISRC_4 (0x000002A0) +#define MMSS_DP_AUDIO_ISRC_5 (0x000002A4) +#define MMSS_DP_AUDIO_INFOFRAME_0 (0x000002A8) +#define MMSS_DP_AUDIO_INFOFRAME_1 (0x000002AC) +#define MMSS_DP_AUDIO_INFOFRAME_2 (0x000002B0) + +#define MMSS_DP_GENERIC0_0 (0x00000300) +#define MMSS_DP_GENERIC0_1 (0x00000304) +#define MMSS_DP_GENERIC0_2 (0x00000308) +#define MMSS_DP_GENERIC0_3 (0x0000030C) +#define MMSS_DP_GENERIC0_4 (0x00000310) +#define MMSS_DP_GENERIC0_5 (0x00000314) +#define MMSS_DP_GENERIC0_6 (0x00000318) +#define MMSS_DP_GENERIC0_7 (0x0000031C) +#define MMSS_DP_GENERIC0_8 (0x00000320) +#define MMSS_DP_GENERIC0_9 (0x00000324) +#define MMSS_DP_GENERIC1_0 (0x00000328) +#define MMSS_DP_GENERIC1_1 (0x0000032C) +#define MMSS_DP_GENERIC1_2 (0x00000330) +#define MMSS_DP_GENERIC1_3 (0x00000334) +#define MMSS_DP_GENERIC1_4 (0x00000338) +#define MMSS_DP_GENERIC1_5 (0x0000033C) +#define MMSS_DP_GENERIC1_6 (0x00000340) +#define MMSS_DP_GENERIC1_7 (0x00000344) +#define MMSS_DP_GENERIC1_8 (0x00000348) +#define MMSS_DP_GENERIC1_9 (0x0000034C) + +#define MMSS_DP_VSCEXT_0 (0x000002D0) +#define MMSS_DP_VSCEXT_1 (0x000002D4) +#define MMSS_DP_VSCEXT_2 (0x000002D8) +#define MMSS_DP_VSCEXT_3 (0x000002DC) +#define MMSS_DP_VSCEXT_4 (0x000002E0) +#define MMSS_DP_VSCEXT_5 (0x000002E4) +#define MMSS_DP_VSCEXT_6 (0x000002E8) +#define MMSS_DP_VSCEXT_7 (0x000002EC) +#define MMSS_DP_VSCEXT_8 (0x000002F0) +#define MMSS_DP_VSCEXT_9 (0x000002F4) + +#define MMSS_DP_BIST_ENABLE (0x00000000) +#define DP_BIST_ENABLE_DPBIST_EN (0x00000001) + +#define MMSS_DP_TIMING_ENGINE_EN (0x00000010) +#define DP_TIMING_ENGINE_EN_EN (0x00000001) + +#define MMSS_DP_INTF_CONFIG (0x00000014) +#define MMSS_DP_INTF_HSYNC_CTL (0x00000018) +#define MMSS_DP_INTF_VSYNC_PERIOD_F0 (0x0000001C) +#define MMSS_DP_INTF_VSYNC_PERIOD_F1 (0x00000020) +#define MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0 (0x00000024) +#define MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1 (0x00000028) +#define MMSS_INTF_DISPLAY_V_START_F0 (0x0000002C) +#define MMSS_INTF_DISPLAY_V_START_F1 (0x00000030) +#define MMSS_DP_INTF_DISPLAY_V_END_F0 (0x00000034) +#define MMSS_DP_INTF_DISPLAY_V_END_F1 (0x00000038) +#define MMSS_DP_INTF_ACTIVE_V_START_F0 (0x0000003C) +#define MMSS_DP_INTF_ACTIVE_V_START_F1 (0x00000040) +#define MMSS_DP_INTF_ACTIVE_V_END_F0 (0x00000044) +#define MMSS_DP_INTF_ACTIVE_V_END_F1 (0x00000048) +#define MMSS_DP_INTF_DISPLAY_HCTL (0x0000004C) +#define MMSS_DP_INTF_ACTIVE_HCTL (0x00000050) +#define MMSS_DP_INTF_POLARITY_CTL (0x00000058) + +#define MMSS_DP_TPG_MAIN_CONTROL (0x00000060) +#define MMSS_DP_DSC_DTO (0x0000007C) +#define DP_TPG_CHECKERED_RECT_PATTERN (0x00000100) + +#define MMSS_DP_TPG_VIDEO_CONFIG (0x00000064) +#define DP_TPG_VIDEO_CONFIG_BPP_8BIT (0x00000001) +#define DP_TPG_VIDEO_CONFIG_RGB (0x00000004) + +#define MMSS_DP_ASYNC_FIFO_CONFIG (0x00000088) + +#define REG_DP_PHY_AUX_INTERRUPT_CLEAR (0x0000004C) +#define REG_DP_PHY_AUX_BIST_CFG (0x00000050) +#define REG_DP_PHY_AUX_INTERRUPT_STATUS (0x000000BC) + +/* DP HDCP 1.3 registers */ +#define DP_HDCP_CTRL (0x0A0) +#define DP_HDCP_STATUS (0x0A4) +#define DP_HDCP_SW_UPPER_AKSV (0x098) +#define DP_HDCP_SW_LOWER_AKSV (0x09C) +#define DP_HDCP_ENTROPY_CTRL0 (0x350) +#define DP_HDCP_ENTROPY_CTRL1 (0x35C) +#define DP_HDCP_SHA_STATUS (0x0C8) +#define DP_HDCP_RCVPORT_DATA2_0 (0x0B0) +#define DP_HDCP_RCVPORT_DATA3 (0x0A4) +#define DP_HDCP_RCVPORT_DATA4 (0x0A8) +#define DP_HDCP_RCVPORT_DATA5 (0x0C0) +#define DP_HDCP_RCVPORT_DATA6 (0x0C4) + +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_SHA_CTRL (0x024) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_SHA_DATA (0x028) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA0 (0x004) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA1 (0x008) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA7 (0x00C) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA8 (0x010) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA9 (0x014) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA10 (0x018) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA11 (0x01C) +#define HDCP_SEC_DP_TZ_HV_HLOS_HDCP_RCVPORT_DATA12 (0x020) + +#endif /* _DP_REG_H_ */ |