1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4
5 #include "dml2_internal_types.h"
6 #include "dml_top.h"
7 #include "dml2_core_dcn4_calcs.h"
8 #include "dml2_internal_shared_types.h"
9 #include "dml21_utils.h"
10 #include "dml21_translation_helper.h"
11 #include "dml2_dc_resource_mgmt.h"
12
13 #define INVALID -1
14
dml21_allocate_memory(struct dml2_context ** dml_ctx)15 static bool dml21_allocate_memory(struct dml2_context **dml_ctx)
16 {
17 *dml_ctx = vzalloc(sizeof(struct dml2_context));
18 if (!(*dml_ctx))
19 return false;
20
21 (*dml_ctx)->v21.dml_init.dml2_instance = vzalloc(sizeof(struct dml2_instance));
22 if (!((*dml_ctx)->v21.dml_init.dml2_instance))
23 return false;
24
25 (*dml_ctx)->v21.mode_support.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
26 (*dml_ctx)->v21.mode_programming.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
27
28 (*dml_ctx)->v21.mode_support.display_config = &(*dml_ctx)->v21.display_config;
29 (*dml_ctx)->v21.mode_programming.display_config = (*dml_ctx)->v21.mode_support.display_config;
30
31 (*dml_ctx)->v21.mode_programming.programming = vzalloc(sizeof(struct dml2_display_cfg_programming));
32 if (!((*dml_ctx)->v21.mode_programming.programming))
33 return false;
34
35 return true;
36 }
37
dml21_populate_configuration_options(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)38 static void dml21_populate_configuration_options(const struct dc *in_dc,
39 struct dml2_context *dml_ctx,
40 const struct dml2_configuration_options *config)
41 {
42 dml_ctx->config = *config;
43
44 /* UCLK P-State options */
45 if (in_dc->debug.dml21_force_pstate_method) {
46 dml_ctx->config.pmo.force_pstate_method_enable = true;
47 for (int i = 0; i < MAX_PIPES; i++)
48 dml_ctx->config.pmo.force_pstate_method_values[i] = in_dc->debug.dml21_force_pstate_method_values[i];
49 } else {
50 dml_ctx->config.pmo.force_pstate_method_enable = false;
51 }
52 }
53
dml21_init(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)54 static void dml21_init(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
55 {
56
57 dml_ctx->architecture = dml2_architecture_21;
58
59 dml21_populate_configuration_options(in_dc, dml_ctx, config);
60
61 DC_FP_START();
62
63 dml21_populate_dml_init_params(&dml_ctx->v21.dml_init, config, in_dc);
64
65 dml2_initialize_instance(&dml_ctx->v21.dml_init);
66
67 DC_FP_END();
68 }
69
dml21_create(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)70 bool dml21_create(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
71 {
72 /* Allocate memory for initializing DML21 instance */
73 if (!dml21_allocate_memory(dml_ctx))
74 return false;
75
76 dml21_init(in_dc, *dml_ctx, config);
77
78 return true;
79 }
80
dml21_destroy(struct dml2_context * dml2)81 void dml21_destroy(struct dml2_context *dml2)
82 {
83 vfree(dml2->v21.dml_init.dml2_instance);
84 vfree(dml2->v21.mode_programming.programming);
85 }
86
dml21_calculate_rq_and_dlg_params(const struct dc * dc,struct dc_state * context,struct resource_context * out_new_hw_state,struct dml2_context * in_ctx,unsigned int pipe_cnt)87 static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state,
88 struct dml2_context *in_ctx, unsigned int pipe_cnt)
89 {
90 unsigned int dml_prog_idx = 0, dc_pipe_index = 0, num_dpps_required = 0;
91 struct dml2_per_plane_programming *pln_prog = NULL;
92 struct dml2_per_stream_programming *stream_prog = NULL;
93 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
94 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
95 int num_pipes;
96 unsigned int dml_phantom_prog_idx;
97
98 context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
99
100 /* copy global DCHUBBUB arbiter registers */
101 memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs));
102
103 /* legacy only */
104 context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64;
105
106 context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
107 context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
108 context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
109
110 /* phantom's start after main planes */
111 dml_phantom_prog_idx = in_ctx->v21.mode_programming.programming->display_config.num_planes;
112
113 for (dml_prog_idx = 0; dml_prog_idx < DML2_MAX_PLANES; dml_prog_idx++) {
114 pln_prog = &in_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
115
116 if (!pln_prog->plane_descriptor)
117 continue;
118
119 stream_prog = &in_ctx->v21.mode_programming.programming->stream_programming[pln_prog->plane_descriptor->stream_index];
120 num_dpps_required = pln_prog->num_dpps_required;
121
122 if (num_dpps_required == 0) {
123 continue;
124 }
125 num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
126
127 if (num_pipes <= 0)
128 continue;
129
130 /* program each pipe */
131 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
132 dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog);
133
134 if (pln_prog->phantom_plane.valid && dc_phantom_pipes[dc_pipe_index]) {
135 dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog);
136 }
137 }
138
139 /* copy per plane mcache allocation */
140 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx], &pln_prog->mcache_allocation, sizeof(struct dml2_mcache_surface_allocation));
141 if (pln_prog->phantom_plane.valid) {
142 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx],
143 &pln_prog->phantom_plane.mcache_allocation,
144 sizeof(struct dml2_mcache_surface_allocation));
145
146 dml_phantom_prog_idx++;
147 }
148 }
149
150 /* assign global clocks */
151 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
152 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
153 if (in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values > 1) {
154 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz =
155 in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values] * 1000;
156 } else {
157 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000;
158 }
159
160 if (in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values > 1) {
161 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
162 in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values] * 1000;
163 } else {
164 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000;
165 }
166
167 /* get global mall allocation */
168 if (dc->res_pool->funcs->calculate_mall_ways_from_bytes) {
169 context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
170 } else {
171 context->bw_ctx.bw.dcn.clk.num_ways = 0;
172 }
173 }
174
dml21_prepare_mcache_params(struct dml2_context * dml_ctx,struct dc_state * context,struct dc_mcache_params * mcache_params)175 static void dml21_prepare_mcache_params(struct dml2_context *dml_ctx, struct dc_state *context, struct dc_mcache_params *mcache_params)
176 {
177 int dc_plane_idx = 0;
178 int dml_prog_idx, stream_idx, plane_idx;
179 struct dml2_per_plane_programming *pln_prog = NULL;
180
181 for (stream_idx = 0; stream_idx < context->stream_count; stream_idx++) {
182 for (plane_idx = 0; plane_idx < context->stream_status[stream_idx].plane_count; plane_idx++) {
183 dml_prog_idx = map_plane_to_dml21_display_cfg(dml_ctx, context->streams[stream_idx]->stream_id, context->stream_status[stream_idx].plane_states[plane_idx], context);
184 if (dml_prog_idx == INVALID) {
185 continue;
186 }
187 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
188 mcache_params[dc_plane_idx].valid = pln_prog->mcache_allocation.valid;
189 mcache_params[dc_plane_idx].num_mcaches_plane0 = pln_prog->mcache_allocation.num_mcaches_plane0;
190 mcache_params[dc_plane_idx].num_mcaches_plane1 = pln_prog->mcache_allocation.num_mcaches_plane1;
191 mcache_params[dc_plane_idx].requires_dedicated_mall_mcache = pln_prog->mcache_allocation.requires_dedicated_mall_mcache;
192 mcache_params[dc_plane_idx].last_slice_sharing.plane0_plane1 = pln_prog->mcache_allocation.last_slice_sharing.plane0_plane1;
193 memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane0,
194 pln_prog->mcache_allocation.mcache_x_offsets_plane0,
195 sizeof(int) * (DML2_MAX_MCACHES + 1));
196 memcpy(mcache_params[dc_plane_idx].mcache_x_offsets_plane1,
197 pln_prog->mcache_allocation.mcache_x_offsets_plane1,
198 sizeof(int) * (DML2_MAX_MCACHES + 1));
199 dc_plane_idx++;
200 }
201 }
202 }
203
dml21_mode_check_and_programming(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)204 static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
205 {
206 bool result = false;
207 struct dml2_build_mode_programming_in_out *mode_programming = &dml_ctx->v21.mode_programming;
208 struct dc_mcache_params mcache_params[MAX_PLANES] = {0};
209
210 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
211 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
212 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params, 0, sizeof(struct dml2_core_mode_programming_in_out));
213
214 if (!context)
215 return true;
216
217 if (context->stream_count == 0) {
218 dml21_build_fams2_programming(in_dc, context, dml_ctx);
219 return true;
220 }
221
222 /* scrub phantom's from current dc_state */
223 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
224 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
225
226 /* Populate stream, plane mappings and other fields in display config. */
227 result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
228 if (!result)
229 return false;
230
231 DC_FP_START();
232 result = dml2_build_mode_programming(mode_programming);
233 DC_FP_END();
234 if (!result)
235 return false;
236
237 /* Check and map HW resources */
238 if (result && !dml_ctx->config.skip_hw_state_mapping) {
239 dml21_map_hw_resources(dml_ctx);
240 dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state);
241 /* if subvp phantoms are present, expand them into dc context */
242 dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx);
243
244 if (in_dc->res_pool->funcs->program_mcache_pipe_config) {
245 //Prepare mcache params for each plane based on mcache output from DML
246 dml21_prepare_mcache_params(dml_ctx, context, mcache_params);
247
248 //populate mcache regs to each pipe
249 dml_ctx->config.callbacks.allocate_mcache(context, mcache_params);
250 }
251 }
252
253 /* Copy DML CLK, WM and REG outputs to bandwidth context */
254 if (result && !dml_ctx->config.skip_hw_state_mapping) {
255 dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count);
256 dml21_copy_clocks_to_dc_state(dml_ctx, context);
257 dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx);
258 dml21_build_fams2_programming(in_dc, context, dml_ctx);
259 }
260
261 return true;
262 }
263
dml21_check_mode_support(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)264 static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
265 {
266 bool is_supported = false;
267 struct dml2_initialize_instance_in_out *dml_init = &dml_ctx->v21.dml_init;
268 struct dml2_check_mode_supported_in_out *mode_support = &dml_ctx->v21.mode_support;
269
270 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
271 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
272 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.check_mode_supported_locals.mode_support_params, 0, sizeof(struct dml2_core_mode_support_in_out));
273
274 if (!context || context->stream_count == 0)
275 return true;
276
277 /* Scrub phantom's from current dc_state */
278 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
279 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
280
281 mode_support->dml2_instance = dml_init->dml2_instance;
282 dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
283 dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params.programming = dml_ctx->v21.mode_programming.programming;
284 DC_FP_START();
285 is_supported = dml2_check_mode_supported(mode_support);
286 DC_FP_END();
287 if (!is_supported)
288 return false;
289
290 return true;
291 }
292
dml21_validate(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx,enum dc_validate_mode validate_mode)293 bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx,
294 enum dc_validate_mode validate_mode)
295 {
296 bool out = false;
297
298 /* Use dml21_check_mode_support for DC_VALIDATE_MODE_ONLY and DC_VALIDATE_MODE_AND_STATE_INDEX path */
299 if (validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING)
300 out = dml21_check_mode_support(in_dc, context, dml_ctx);
301 else
302 out = dml21_mode_check_and_programming(in_dc, context, dml_ctx);
303
304 return out;
305 }
306
dml21_prepare_mcache_programming(struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)307 void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
308 {
309 unsigned int dml_prog_idx, dml_phantom_prog_idx, dc_pipe_index;
310 int num_pipes;
311 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
312 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
313
314 struct dml2_per_plane_programming *pln_prog = NULL;
315 struct dml2_plane_mcache_configuration_descriptor *mcache_config = NULL;
316 struct prepare_mcache_programming_locals *l = &dml_ctx->v21.scratch.prepare_mcache_locals;
317
318 if (context->stream_count == 0) {
319 return;
320 }
321
322 memset(&l->build_mcache_programming_params, 0, sizeof(struct dml2_build_mcache_programming_in_out));
323 l->build_mcache_programming_params.dml2_instance = dml_ctx->v21.dml_init.dml2_instance;
324
325 /* phantom's start after main planes */
326 dml_phantom_prog_idx = dml_ctx->v21.mode_programming.programming->display_config.num_planes;
327
328 /* Build mcache programming parameters per plane per pipe */
329 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
330 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
331
332 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_prog_idx];
333 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
334 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
335 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx];
336 mcache_config->num_pipes = pln_prog->num_dpps_required;
337 l->build_mcache_programming_params.num_configurations++;
338
339 if (pln_prog->num_dpps_required == 0) {
340 continue;
341 }
342
343 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
344 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
345 dc_main_pipes[0]->plane_state == NULL)
346 continue;
347
348 /* get config for each pipe */
349 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
350 ASSERT(dc_main_pipes[dc_pipe_index]);
351 dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
352 }
353
354 /* get config for each phantom pipe */
355 if (pln_prog->phantom_plane.valid &&
356 dc_phantom_pipes[0] &&
357 dc_main_pipes[0]->stream &&
358 dc_phantom_pipes[0]->plane_state) {
359 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_phantom_prog_idx];
360 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
361 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
362 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx];
363 mcache_config->num_pipes = pln_prog->num_dpps_required;
364 l->build_mcache_programming_params.num_configurations++;
365
366 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
367 ASSERT(dc_phantom_pipes[dc_pipe_index]);
368 dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
369 }
370
371 /* increment phantom index */
372 dml_phantom_prog_idx++;
373 }
374 }
375
376 /* Call to generate mcache programming per plane per pipe for the given display configuration */
377 dml2_build_mcache_programming(&l->build_mcache_programming_params);
378
379 /* get per plane per pipe mcache programming */
380 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
381 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
382
383 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
384 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
385 dc_main_pipes[0]->plane_state == NULL)
386 continue;
387
388 /* get config for each pipe */
389 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
390 ASSERT(dc_main_pipes[dc_pipe_index]);
391 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index]) {
392 memcpy(&dc_main_pipes[dc_pipe_index]->mcache_regs,
393 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index],
394 sizeof(struct dml2_hubp_pipe_mcache_regs));
395 }
396 }
397
398 /* get config for each phantom pipe */
399 if (pln_prog->phantom_plane.valid &&
400 dc_phantom_pipes[0] &&
401 dc_main_pipes[0]->stream &&
402 dc_phantom_pipes[0]->plane_state) {
403 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
404 ASSERT(dc_phantom_pipes[dc_pipe_index]);
405 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index]) {
406 memcpy(&dc_phantom_pipes[dc_pipe_index]->mcache_regs,
407 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index],
408 sizeof(struct dml2_hubp_pipe_mcache_regs));
409 }
410 }
411 /* increment phantom index */
412 dml_phantom_prog_idx++;
413 }
414 }
415 }
416
dml21_copy(struct dml2_context * dst_dml_ctx,struct dml2_context * src_dml_ctx)417 void dml21_copy(struct dml2_context *dst_dml_ctx,
418 struct dml2_context *src_dml_ctx)
419 {
420 /* Preserve references to internals */
421 struct dml2_instance *dst_dml2_instance = dst_dml_ctx->v21.dml_init.dml2_instance;
422 struct dml2_display_cfg_programming *dst_dml2_programming = dst_dml_ctx->v21.mode_programming.programming;
423
424 /* Copy context */
425 memcpy(dst_dml_ctx, src_dml_ctx, sizeof(struct dml2_context));
426
427 /* Copy Internals */
428 memcpy(dst_dml2_instance, src_dml_ctx->v21.dml_init.dml2_instance, sizeof(struct dml2_instance));
429 memcpy(dst_dml2_programming, src_dml_ctx->v21.mode_programming.programming, sizeof(struct dml2_display_cfg_programming));
430
431 /* Restore references to internals */
432 dst_dml_ctx->v21.dml_init.dml2_instance = dst_dml2_instance;
433
434 dst_dml_ctx->v21.mode_support.dml2_instance = dst_dml2_instance;
435 dst_dml_ctx->v21.mode_programming.dml2_instance = dst_dml2_instance;
436
437 dst_dml_ctx->v21.mode_support.display_config = &dst_dml_ctx->v21.display_config;
438 dst_dml_ctx->v21.mode_programming.display_config = dst_dml_ctx->v21.mode_support.display_config;
439
440 dst_dml_ctx->v21.mode_programming.programming = dst_dml2_programming;
441
442 DC_FP_START();
443
444 /* need to initialize copied instance for internal references to be correct */
445 dml2_initialize_instance(&dst_dml_ctx->v21.dml_init);
446
447 DC_FP_END();
448 }
449
dml21_create_copy(struct dml2_context ** dst_dml_ctx,struct dml2_context * src_dml_ctx)450 bool dml21_create_copy(struct dml2_context **dst_dml_ctx,
451 struct dml2_context *src_dml_ctx)
452 {
453 /* Allocate memory for initializing DML21 instance */
454 if (!dml21_allocate_memory(dst_dml_ctx))
455 return false;
456
457 dml21_copy(*dst_dml_ctx, src_dml_ctx);
458
459 return true;
460 }
461
dml21_reinit(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)462 void dml21_reinit(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
463 {
464 dml21_init(in_dc, dml_ctx, config);
465 }
466
467