1/*
2 * Copyright 2012-16 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25
26#include "core_types.h"
27#include "clk_mgr_internal.h"
28
29#include "dce/dce_11_0_d.h"
30#include "dce/dce_11_0_sh_mask.h"
31#include "dce110_clk_mgr.h"
32#include "../clk_mgr/dce100/dce_clk_mgr.h"
33
34/* set register offset */
35#define SR(reg_name)\
36 .reg_name = mm ## reg_name
37
38/* set register offset with instance */
39#define SRI(reg_name, block, id)\
40 .reg_name = mm ## block ## id ## _ ## reg_name
41
42static const struct clk_mgr_registers disp_clk_regs = {
43 CLK_COMMON_REG_LIST_DCE_BASE()
44};
45
46static const struct clk_mgr_shift disp_clk_shift = {
47 CLK_COMMON_MASK_SH_LIST_DCE_COMMON_BASE(__SHIFT)
48};
49
50static const struct clk_mgr_mask disp_clk_mask = {
51 CLK_COMMON_MASK_SH_LIST_DCE_COMMON_BASE(_MASK)
52};
53
54static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
55/*ClocksStateInvalid - should not be used*/
56{ .display_clk_khz = 0, .pixel_clk_khz = 0 },
57/*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
58{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
59/*ClocksStateLow*/
60{ .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
61/*ClocksStateNominal*/
62{ .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
63/*ClocksStatePerformance*/
64{ .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
65
66static int determine_sclk_from_bounding_box(
67 const struct dc *dc,
68 int required_sclk)
69{
70 int i;
71
72 /*
73 * Some asics do not give us sclk levels, so we just report the actual
74 * required sclk
75 */
76 if (dc->sclk_lvls.num_levels == 0)
77 return required_sclk;
78
79 for (i = 0; i < dc->sclk_lvls.num_levels; i++) {
80 if (dc->sclk_lvls.clocks_in_khz[i] >= required_sclk)
81 return dc->sclk_lvls.clocks_in_khz[i];
82 }
83 /*
84 * even maximum level could not satisfy requirement, this
85 * is unexpected at this stage, should have been caught at
86 * validation time
87 */
88 ASSERT(0);
89 return dc->sclk_lvls.clocks_in_khz[dc->sclk_lvls.num_levels - 1];
90}
91
92uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context)
93{
94 uint8_t j;
95 uint32_t min_vertical_blank_time = -1;
96
97 for (j = 0; j < context->stream_count; j++) {
98 struct dc_stream_state *stream = context->streams[j];
99 uint32_t vertical_blank_in_pixels = 0;
100 uint32_t vertical_blank_time = 0;
101 uint32_t vertical_total_min = stream->timing.v_total;
102 struct dc_crtc_timing_adjust adjust = stream->adjust;
103 if (adjust.v_total_max != adjust.v_total_min)
104 vertical_total_min = adjust.v_total_min;
105
106 vertical_blank_in_pixels = stream->timing.h_total *
107 (vertical_total_min
108 - stream->timing.v_addressable);
109 vertical_blank_time = vertical_blank_in_pixels
110 * 10000 / stream->timing.pix_clk_100hz;
111
112 if (min_vertical_blank_time > vertical_blank_time)
113 min_vertical_blank_time = vertical_blank_time;
114 }
115
116 return min_vertical_blank_time;
117}
118
119void dce110_fill_display_configs(
120 const struct dc_state *context,
121 struct dm_pp_display_configuration *pp_display_cfg)
122{
123 int j;
124 int num_cfgs = 0;
125
126 for (j = 0; j < context->stream_count; j++) {
127 int k;
128
129 const struct dc_stream_state *stream = context->streams[j];
130 struct dm_pp_single_disp_config *cfg =
131 &pp_display_cfg->disp_configs[num_cfgs];
132 const struct pipe_ctx *pipe_ctx = NULL;
133
134 for (k = 0; k < MAX_PIPES; k++)
135 if (stream == context->res_ctx.pipe_ctx[k].stream) {
136 pipe_ctx = &context->res_ctx.pipe_ctx[k];
137 break;
138 }
139
140 ASSERT(pipe_ctx != NULL);
141
142 /* only notify active stream */
143 if (stream->dpms_off)
144 continue;
145
146 num_cfgs++;
147 cfg->signal = pipe_ctx->stream->signal;
148 cfg->pipe_idx = pipe_ctx->stream_res.tg->inst;
149 cfg->src_height = stream->src.height;
150 cfg->src_width = stream->src.width;
151 cfg->ddi_channel_mapping =
152 stream->link->ddi_channel_mapping.raw;
153 cfg->transmitter =
154 stream->link->link_enc->transmitter;
155 cfg->link_settings.lane_count =
156 stream->link->cur_link_settings.lane_count;
157 cfg->link_settings.link_rate =
158 stream->link->cur_link_settings.link_rate;
159 cfg->link_settings.link_spread =
160 stream->link->cur_link_settings.link_spread;
161 cfg->sym_clock = stream->phy_pix_clk;
162 /* Round v_refresh*/
163 cfg->v_refresh = stream->timing.pix_clk_100hz * 100;
164 cfg->v_refresh /= stream->timing.h_total;
165 cfg->v_refresh = (cfg->v_refresh + stream->timing.v_total / 2)
166 / stream->timing.v_total;
167 }
168
169 pp_display_cfg->display_count = num_cfgs;
170}
171
172void dce11_pplib_apply_display_requirements(
173 struct dc *dc,
174 struct dc_state *context)
175{
176 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
177 int memory_type_multiplier = MEMORY_TYPE_MULTIPLIER_CZ;
178
179 if (dc->bw_vbios && dc->bw_vbios->memory_type == bw_def_hbm)
180 memory_type_multiplier = MEMORY_TYPE_HBM;
181
182 pp_display_cfg->all_displays_in_sync =
183 context->bw_ctx.bw.dce.all_displays_in_sync;
184 pp_display_cfg->nb_pstate_switch_disable =
185 context->bw_ctx.bw.dce.nbp_state_change_enable == false;
186 pp_display_cfg->cpu_cc6_disable =
187 context->bw_ctx.bw.dce.cpuc_state_change_enable == false;
188 pp_display_cfg->cpu_pstate_disable =
189 context->bw_ctx.bw.dce.cpup_state_change_enable == false;
190 pp_display_cfg->cpu_pstate_separation_time =
191 context->bw_ctx.bw.dce.blackout_recovery_time_us;
192
193 /*
194 * TODO: determine whether the bandwidth has reached memory's limitation
195 * , then change minimum memory clock based on real-time bandwidth
196 * limitation.
197 */
198 if ((dc->ctx->asic_id.chip_family == FAMILY_AI) &&
199 ASICREV_IS_VEGA20_P(dc->ctx->asic_id.hw_internal_rev) && (context->stream_count >= 2)) {
200 pp_display_cfg->min_memory_clock_khz = max(pp_display_cfg->min_memory_clock_khz,
201 (uint32_t) div64_s64(
202 div64_s64(dc->bw_vbios->high_yclk.value,
203 memory_type_multiplier), 10000));
204 } else {
205 pp_display_cfg->min_memory_clock_khz = context->bw_ctx.bw.dce.yclk_khz
206 / memory_type_multiplier;
207 }
208
209 pp_display_cfg->min_engine_clock_khz = determine_sclk_from_bounding_box(
210 dc,
211 required_sclk: context->bw_ctx.bw.dce.sclk_khz);
212
213 /*
214 * As workaround for >4x4K lightup set dcfclock to min_engine_clock value.
215 * This is not required for less than 5 displays,
216 * thus don't request decfclk in dc to avoid impact
217 * on power saving.
218 *
219 */
220 pp_display_cfg->min_dcfclock_khz = (context->stream_count > 4) ?
221 pp_display_cfg->min_engine_clock_khz : 0;
222
223 pp_display_cfg->min_engine_clock_deep_sleep_khz
224 = context->bw_ctx.bw.dce.sclk_deep_sleep_khz;
225
226 pp_display_cfg->avail_mclk_switch_time_us =
227 dce110_get_min_vblank_time_us(context);
228 /* TODO: dce11.2*/
229 pp_display_cfg->avail_mclk_switch_time_in_disp_active_us = 0;
230
231 pp_display_cfg->disp_clk_khz = dc->clk_mgr->clks.dispclk_khz;
232
233 dce110_fill_display_configs(context, pp_display_cfg);
234
235 /* TODO: is this still applicable?*/
236 if (pp_display_cfg->display_count == 1) {
237 const struct dc_crtc_timing *timing =
238 &context->streams[0]->timing;
239
240 pp_display_cfg->crtc_index =
241 pp_display_cfg->disp_configs[0].pipe_idx;
242 pp_display_cfg->line_time_in_us = timing->h_total * 10000 / timing->pix_clk_100hz;
243 }
244
245 if (memcmp(p: &dc->current_state->pp_display_cfg, q: pp_display_cfg, size: sizeof(*pp_display_cfg)) != 0)
246 dm_pp_apply_display_requirements(ctx: dc->ctx, pp_display_cfg);
247}
248
249static void dce11_update_clocks(struct clk_mgr *clk_mgr_base,
250 struct dc_state *context,
251 bool safe_to_lower)
252{
253 struct clk_mgr_internal *clk_mgr_dce = TO_CLK_MGR_INTERNAL(clk_mgr_base);
254 struct dm_pp_power_level_change_request level_change_req;
255 int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
256
257 /*TODO: W/A for dal3 linux, investigate why this works */
258 if (!clk_mgr_dce->dfs_bypass_active)
259 patched_disp_clk = patched_disp_clk * 115 / 100;
260
261 level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context);
262 /* get max clock state from PPLIB */
263 if ((level_change_req.power_level < clk_mgr_dce->cur_min_clks_state && safe_to_lower)
264 || level_change_req.power_level > clk_mgr_dce->cur_min_clks_state) {
265 if (dm_pp_apply_power_level_change_request(ctx: clk_mgr_base->ctx, level_change_req: &level_change_req))
266 clk_mgr_dce->cur_min_clks_state = level_change_req.power_level;
267 }
268
269 if (should_set_clock(safe_to_lower, calc_clk: patched_disp_clk, cur_clk: clk_mgr_base->clks.dispclk_khz)) {
270 context->bw_ctx.bw.dce.dispclk_khz = dce_set_clock(clk_mgr_base, requested_clk_khz: patched_disp_clk);
271 clk_mgr_base->clks.dispclk_khz = patched_disp_clk;
272 }
273 dce11_pplib_apply_display_requirements(dc: clk_mgr_base->ctx->dc, context);
274}
275
276static struct clk_mgr_funcs dce110_funcs = {
277 .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
278 .update_clocks = dce11_update_clocks
279};
280
281void dce110_clk_mgr_construct(
282 struct dc_context *ctx,
283 struct clk_mgr_internal *clk_mgr)
284{
285 dce_clk_mgr_construct(ctx, clk_mgr_dce: clk_mgr);
286
287 memcpy(clk_mgr->max_clks_by_state,
288 dce110_max_clks_by_state,
289 sizeof(dce110_max_clks_by_state));
290
291 clk_mgr->regs = &disp_clk_regs;
292 clk_mgr->clk_mgr_shift = &disp_clk_shift;
293 clk_mgr->clk_mgr_mask = &disp_clk_mask;
294 clk_mgr->base.funcs = &dce110_funcs;
295
296}
297

source code of linux/drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c