1 /*
2 * Copyright 2021 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "amdgpu_dm_psr.h"
27 #include "dc_dmub_srv.h"
28 #include "dc.h"
29 #include "dm_helpers.h"
30 #include "amdgpu_dm.h"
31 #include "modules/power/power_helpers.h"
32
link_supports_psrsu(struct dc_link * link)33 static bool link_supports_psrsu(struct dc_link *link)
34 {
35 struct dc *dc = link->ctx->dc;
36
37 if (!dc->caps.dmcub_support)
38 return false;
39
40 if (dc->ctx->dce_version < DCN_VERSION_3_1)
41 return false;
42
43 if (!is_psr_su_specific_panel(link))
44 return false;
45
46 if (!link->dpcd_caps.alpm_caps.bits.AUX_WAKE_ALPM_CAP ||
47 !link->dpcd_caps.psr_info.psr_dpcd_caps.bits.Y_COORDINATE_REQUIRED)
48 return false;
49
50 if (link->dpcd_caps.psr_info.psr_dpcd_caps.bits.SU_GRANULARITY_REQUIRED &&
51 !link->dpcd_caps.psr_info.psr2_su_y_granularity_cap)
52 return false;
53
54 if (amdgpu_dc_debug_mask & DC_DISABLE_PSR_SU)
55 return false;
56
57 /* Temporarily disable PSR-SU to avoid glitches */
58 return false;
59 }
60
61 /*
62 * amdgpu_dm_set_psr_caps() - set link psr capabilities
63 * @link: link
64 *
65 */
amdgpu_dm_set_psr_caps(struct dc_link * link)66 void amdgpu_dm_set_psr_caps(struct dc_link *link)
67 {
68 if (!(link->connector_signal & SIGNAL_TYPE_EDP)) {
69 link->psr_settings.psr_feature_enabled = false;
70 return;
71 }
72
73 if (link->type == dc_connection_none) {
74 link->psr_settings.psr_feature_enabled = false;
75 return;
76 }
77
78 if (link->dpcd_caps.psr_info.psr_version == 0) {
79 link->psr_settings.psr_version = DC_PSR_VERSION_UNSUPPORTED;
80 link->psr_settings.psr_feature_enabled = false;
81
82 } else {
83 if (link_supports_psrsu(link))
84 link->psr_settings.psr_version = DC_PSR_VERSION_SU_1;
85 else
86 link->psr_settings.psr_version = DC_PSR_VERSION_1;
87
88 link->psr_settings.psr_feature_enabled = true;
89 }
90
91 DRM_INFO("PSR support %d, DC PSR ver %d, sink PSR ver %d DPCD caps 0x%x su_y_granularity %d\n",
92 link->psr_settings.psr_feature_enabled,
93 link->psr_settings.psr_version,
94 link->dpcd_caps.psr_info.psr_version,
95 link->dpcd_caps.psr_info.psr_dpcd_caps.raw,
96 link->dpcd_caps.psr_info.psr2_su_y_granularity_cap);
97
98 }
99
100 /*
101 * amdgpu_dm_link_setup_psr() - configure psr link
102 * @stream: stream state
103 *
104 * Return: true if success
105 */
amdgpu_dm_link_setup_psr(struct dc_stream_state * stream)106 bool amdgpu_dm_link_setup_psr(struct dc_stream_state *stream)
107 {
108 struct dc_link *link = NULL;
109 struct psr_config psr_config = {0};
110 struct psr_context psr_context = {0};
111 struct dc *dc = NULL;
112 bool ret = false;
113
114 if (stream == NULL)
115 return false;
116
117 link = stream->link;
118 dc = link->ctx->dc;
119
120 if (link->psr_settings.psr_version != DC_PSR_VERSION_UNSUPPORTED) {
121 mod_power_calc_psr_configs(&psr_config, link, stream);
122
123 /* linux DM specific updating for psr config fields */
124 psr_config.allow_smu_optimizations =
125 (amdgpu_dc_feature_mask & DC_PSR_ALLOW_SMU_OPT) &&
126 mod_power_only_edp(dc->current_state, stream);
127 psr_config.allow_multi_disp_optimizations =
128 (amdgpu_dc_feature_mask & DC_PSR_ALLOW_MULTI_DISP_OPT);
129
130 if (!psr_su_set_dsc_slice_height(dc, link, stream, &psr_config))
131 return false;
132
133 ret = dc_link_setup_psr(link, stream, &psr_config, &psr_context);
134
135 }
136 DRM_DEBUG_DRIVER("PSR link: %d\n", link->psr_settings.psr_feature_enabled);
137
138 return ret;
139 }
140
141 /*
142 * amdgpu_dm_psr_enable() - enable psr f/w
143 * @stream: stream state
144 *
145 */
amdgpu_dm_psr_enable(struct dc_stream_state * stream)146 void amdgpu_dm_psr_enable(struct dc_stream_state *stream)
147 {
148 struct dc_link *link = stream->link;
149 unsigned int vsync_rate_hz = 0;
150 struct dc_static_screen_params params = {0};
151 /* Calculate number of static frames before generating interrupt to
152 * enter PSR.
153 */
154 // Init fail safe of 2 frames static
155 unsigned int num_frames_static = 2;
156 unsigned int power_opt = 0;
157 bool psr_enable = true;
158
159 DRM_DEBUG_DRIVER("Enabling psr...\n");
160
161 vsync_rate_hz = div64_u64(div64_u64((
162 stream->timing.pix_clk_100hz * (uint64_t)100),
163 stream->timing.v_total),
164 stream->timing.h_total);
165
166 /* Round up
167 * Calculate number of frames such that at least 30 ms of time has
168 * passed.
169 */
170 if (vsync_rate_hz != 0) {
171 unsigned int frame_time_microsec = 1000000 / vsync_rate_hz;
172
173 num_frames_static = (30000 / frame_time_microsec) + 1;
174 }
175
176 params.triggers.cursor_update = true;
177 params.triggers.overlay_update = true;
178 params.triggers.surface_update = true;
179 params.num_frames = num_frames_static;
180
181 dc_stream_set_static_screen_params(link->ctx->dc,
182 &stream, 1,
183 ¶ms);
184
185 /*
186 * Only enable static-screen optimizations for PSR1. For PSR SU, this
187 * causes vstartup interrupt issues, used by amdgpu_dm to send vblank
188 * events.
189 */
190 if (link->psr_settings.psr_version < DC_PSR_VERSION_SU_1)
191 power_opt |= psr_power_opt_z10_static_screen;
192
193 dc_link_set_psr_allow_active(link, &psr_enable, false, false, &power_opt);
194
195 if (link->ctx->dc->caps.ips_support)
196 dc_allow_idle_optimizations(link->ctx->dc, true);
197 }
198
199 /*
200 * amdgpu_dm_psr_disable() - disable psr f/w
201 * @stream: stream state
202 *
203 * Return: true if success
204 */
amdgpu_dm_psr_disable(struct dc_stream_state * stream,bool wait)205 bool amdgpu_dm_psr_disable(struct dc_stream_state *stream, bool wait)
206 {
207 bool psr_enable = false;
208
209 DRM_DEBUG_DRIVER("Disabling psr...\n");
210
211 return dc_link_set_psr_allow_active(stream->link, &psr_enable, wait, false, NULL);
212 }
213
214 /*
215 * amdgpu_dm_psr_disable_all() - disable psr f/w for all streams
216 * if psr is enabled on any stream
217 *
218 * Return: true if success
219 */
amdgpu_dm_psr_disable_all(struct amdgpu_display_manager * dm)220 bool amdgpu_dm_psr_disable_all(struct amdgpu_display_manager *dm)
221 {
222 DRM_DEBUG_DRIVER("Disabling psr if psr is enabled on any stream\n");
223 return dc_set_psr_allow_active(dm->dc, false);
224 }
225
226 /*
227 * amdgpu_dm_psr_is_active_allowed() - check if psr is allowed on any stream
228 * @dm: pointer to amdgpu_display_manager
229 *
230 * Return: true if allowed
231 */
232
amdgpu_dm_psr_is_active_allowed(struct amdgpu_display_manager * dm)233 bool amdgpu_dm_psr_is_active_allowed(struct amdgpu_display_manager *dm)
234 {
235 unsigned int i;
236 bool allow_active = false;
237
238 for (i = 0; i < dm->dc->current_state->stream_count ; i++) {
239 struct dc_link *link;
240 struct dc_stream_state *stream = dm->dc->current_state->streams[i];
241
242 link = stream->link;
243 if (!link)
244 continue;
245 if (link->psr_settings.psr_feature_enabled &&
246 link->psr_settings.psr_allow_active) {
247 allow_active = true;
248 break;
249 }
250 }
251
252 return allow_active;
253 }
254
255 /**
256 * amdgpu_dm_psr_wait_disable() - Wait for eDP panel to exit PSR
257 * @stream: stream state attached to the eDP link
258 *
259 * Waits for a max of 500ms for the eDP panel to exit PSR.
260 *
261 * Return: true if panel exited PSR, false otherwise.
262 */
amdgpu_dm_psr_wait_disable(struct dc_stream_state * stream)263 bool amdgpu_dm_psr_wait_disable(struct dc_stream_state *stream)
264 {
265 enum dc_psr_state psr_state = PSR_STATE0;
266 struct dc_link *link = stream->link;
267 int retry_count;
268
269 if (link == NULL)
270 return false;
271
272 for (retry_count = 0; retry_count <= 1000; retry_count++) {
273 dc_link_get_psr_state(link, &psr_state);
274 if (psr_state == PSR_STATE0)
275 break;
276 udelay(500);
277 }
278
279 if (retry_count == 1000)
280 return false;
281
282 return true;
283 }
284