1 /*
2 * Copyright 2021 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26 #include "amdgpu_dm_psr.h"
27 #include "dc_dmub_srv.h"
28 #include "dc.h"
29 #include "dm_helpers.h"
30 #include "amdgpu_dm.h"
31 #include "modules/power/power_helpers.h"
32
link_supports_psrsu(struct dc_link * link)33 static bool link_supports_psrsu(struct dc_link *link)
34 {
35 struct dc *dc = link->ctx->dc;
36
37 if (!dc->caps.dmcub_support)
38 return false;
39
40 if (dc->ctx->dce_version < DCN_VERSION_3_1)
41 return false;
42
43 if (!is_psr_su_specific_panel(link))
44 return false;
45
46 if (!link->dpcd_caps.alpm_caps.bits.AUX_WAKE_ALPM_CAP ||
47 !link->dpcd_caps.psr_info.psr_dpcd_caps.bits.Y_COORDINATE_REQUIRED)
48 return false;
49
50 if (link->dpcd_caps.psr_info.psr_dpcd_caps.bits.SU_GRANULARITY_REQUIRED &&
51 !link->dpcd_caps.psr_info.psr2_su_y_granularity_cap)
52 return false;
53
54 if (amdgpu_dc_debug_mask & DC_DISABLE_PSR_SU)
55 return false;
56
57 return dc_dmub_check_min_version(dc->ctx->dmub_srv->dmub);
58 }
59
60 /*
61 * amdgpu_dm_set_psr_caps() - set link psr capabilities
62 * @link: link
63 *
64 */
amdgpu_dm_set_psr_caps(struct dc_link * link)65 void amdgpu_dm_set_psr_caps(struct dc_link *link)
66 {
67 if (!(link->connector_signal & SIGNAL_TYPE_EDP)) {
68 link->psr_settings.psr_feature_enabled = false;
69 return;
70 }
71
72 if (link->type == dc_connection_none) {
73 link->psr_settings.psr_feature_enabled = false;
74 return;
75 }
76
77 if (link->dpcd_caps.psr_info.psr_version == 0) {
78 link->psr_settings.psr_version = DC_PSR_VERSION_UNSUPPORTED;
79 link->psr_settings.psr_feature_enabled = false;
80
81 } else {
82 if (link_supports_psrsu(link))
83 link->psr_settings.psr_version = DC_PSR_VERSION_SU_1;
84 else
85 link->psr_settings.psr_version = DC_PSR_VERSION_1;
86
87 link->psr_settings.psr_feature_enabled = true;
88 }
89
90 DRM_INFO("PSR support %d, DC PSR ver %d, sink PSR ver %d DPCD caps 0x%x su_y_granularity %d\n",
91 link->psr_settings.psr_feature_enabled,
92 link->psr_settings.psr_version,
93 link->dpcd_caps.psr_info.psr_version,
94 link->dpcd_caps.psr_info.psr_dpcd_caps.raw,
95 link->dpcd_caps.psr_info.psr2_su_y_granularity_cap);
96
97 }
98
99 /*
100 * amdgpu_dm_link_setup_psr() - configure psr link
101 * @stream: stream state
102 *
103 * Return: true if success
104 */
amdgpu_dm_link_setup_psr(struct dc_stream_state * stream)105 bool amdgpu_dm_link_setup_psr(struct dc_stream_state *stream)
106 {
107 struct dc_link *link = NULL;
108 struct psr_config psr_config = {0};
109 struct psr_context psr_context = {0};
110 struct dc *dc = NULL;
111 bool ret = false;
112
113 if (stream == NULL)
114 return false;
115
116 link = stream->link;
117 dc = link->ctx->dc;
118
119 if (link->psr_settings.psr_version != DC_PSR_VERSION_UNSUPPORTED) {
120 mod_power_calc_psr_configs(&psr_config, link, stream);
121
122 /* linux DM specific updating for psr config fields */
123 psr_config.allow_smu_optimizations =
124 (amdgpu_dc_feature_mask & DC_PSR_ALLOW_SMU_OPT) &&
125 mod_power_only_edp(dc->current_state, stream);
126 psr_config.allow_multi_disp_optimizations =
127 (amdgpu_dc_feature_mask & DC_PSR_ALLOW_MULTI_DISP_OPT);
128
129 if (!psr_su_set_dsc_slice_height(dc, link, stream, &psr_config))
130 return false;
131
132 ret = dc_link_setup_psr(link, stream, &psr_config, &psr_context);
133
134 }
135 DRM_DEBUG_DRIVER("PSR link: %d\n", link->psr_settings.psr_feature_enabled);
136
137 return ret;
138 }
139
140 /*
141 * amdgpu_dm_psr_enable() - enable psr f/w
142 * @stream: stream state
143 *
144 */
amdgpu_dm_psr_enable(struct dc_stream_state * stream)145 void amdgpu_dm_psr_enable(struct dc_stream_state *stream)
146 {
147 struct dc_link *link = stream->link;
148 unsigned int vsync_rate_hz = 0;
149 struct dc_static_screen_params params = {0};
150 /* Calculate number of static frames before generating interrupt to
151 * enter PSR.
152 */
153 // Init fail safe of 2 frames static
154 unsigned int num_frames_static = 2;
155 unsigned int power_opt = 0;
156 bool psr_enable = true;
157
158 DRM_DEBUG_DRIVER("Enabling psr...\n");
159
160 vsync_rate_hz = div64_u64(div64_u64((
161 stream->timing.pix_clk_100hz * (uint64_t)100),
162 stream->timing.v_total),
163 stream->timing.h_total);
164
165 /* Round up
166 * Calculate number of frames such that at least 30 ms of time has
167 * passed.
168 */
169 if (vsync_rate_hz != 0) {
170 unsigned int frame_time_microsec = 1000000 / vsync_rate_hz;
171
172 num_frames_static = (30000 / frame_time_microsec) + 1;
173 }
174
175 params.triggers.cursor_update = true;
176 params.triggers.overlay_update = true;
177 params.triggers.surface_update = true;
178 params.num_frames = num_frames_static;
179
180 dc_stream_set_static_screen_params(link->ctx->dc,
181 &stream, 1,
182 ¶ms);
183
184 /*
185 * Only enable static-screen optimizations for PSR1. For PSR SU, this
186 * causes vstartup interrupt issues, used by amdgpu_dm to send vblank
187 * events.
188 */
189 if (link->psr_settings.psr_version < DC_PSR_VERSION_SU_1)
190 power_opt |= psr_power_opt_z10_static_screen;
191
192 dc_link_set_psr_allow_active(link, &psr_enable, false, false, &power_opt);
193
194 if (link->ctx->dc->caps.ips_support)
195 dc_allow_idle_optimizations(link->ctx->dc, true);
196 }
197
198 /*
199 * amdgpu_dm_psr_disable() - disable psr f/w
200 * @stream: stream state
201 *
202 * Return: true if success
203 */
amdgpu_dm_psr_disable(struct dc_stream_state * stream)204 bool amdgpu_dm_psr_disable(struct dc_stream_state *stream)
205 {
206 unsigned int power_opt = 0;
207 bool psr_enable = false;
208
209 DRM_DEBUG_DRIVER("Disabling psr...\n");
210
211 return dc_link_set_psr_allow_active(stream->link, &psr_enable, true, false, &power_opt);
212 }
213
214 /*
215 * amdgpu_dm_psr_disable_all() - disable psr f/w for all streams
216 * if psr is enabled on any stream
217 *
218 * Return: true if success
219 */
amdgpu_dm_psr_disable_all(struct amdgpu_display_manager * dm)220 bool amdgpu_dm_psr_disable_all(struct amdgpu_display_manager *dm)
221 {
222 DRM_DEBUG_DRIVER("Disabling psr if psr is enabled on any stream\n");
223 return dc_set_psr_allow_active(dm->dc, false);
224 }
225
226 /*
227 * amdgpu_dm_psr_is_active_allowed() - check if psr is allowed on any stream
228 * @dm: pointer to amdgpu_display_manager
229 *
230 * Return: true if allowed
231 */
232
amdgpu_dm_psr_is_active_allowed(struct amdgpu_display_manager * dm)233 bool amdgpu_dm_psr_is_active_allowed(struct amdgpu_display_manager *dm)
234 {
235 unsigned int i;
236 bool allow_active = false;
237
238 for (i = 0; i < dm->dc->current_state->stream_count ; i++) {
239 struct dc_link *link;
240 struct dc_stream_state *stream = dm->dc->current_state->streams[i];
241
242 link = stream->link;
243 if (!link)
244 continue;
245 if (link->psr_settings.psr_feature_enabled &&
246 link->psr_settings.psr_allow_active) {
247 allow_active = true;
248 break;
249 }
250 }
251
252 return allow_active;
253 }
254