1 // SPDX-License-Identifier: MIT 2 // 3 // Copyright 2024 Advanced Micro Devices, Inc. 4 5 #include <linux/vmalloc.h> 6 7 #include "dml2_internal_types.h" 8 #include "dml_top.h" 9 #include "dml2_core_dcn4_calcs.h" 10 #include "dml2_internal_shared_types.h" 11 #include "dml21_utils.h" 12 #include "dml21_translation_helper.h" 13 #include "dml2_dc_resource_mgmt.h" 14 15 static bool dml21_allocate_memory(struct dml2_context **dml_ctx) 16 { 17 *dml_ctx = vzalloc(sizeof(struct dml2_context)); 18 if (!(*dml_ctx)) 19 return false; 20 21 (*dml_ctx)->v21.dml_init.dml2_instance = vzalloc(sizeof(struct dml2_instance)); 22 if (!((*dml_ctx)->v21.dml_init.dml2_instance)) 23 return false; 24 25 (*dml_ctx)->v21.mode_support.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance; 26 (*dml_ctx)->v21.mode_programming.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance; 27 28 (*dml_ctx)->v21.mode_support.display_config = &(*dml_ctx)->v21.display_config; 29 (*dml_ctx)->v21.mode_programming.display_config = (*dml_ctx)->v21.mode_support.display_config; 30 31 (*dml_ctx)->v21.mode_programming.programming = vzalloc(sizeof(struct dml2_display_cfg_programming)); 32 if (!((*dml_ctx)->v21.mode_programming.programming)) 33 return false; 34 35 return true; 36 } 37 38 static void dml21_apply_debug_options(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config) 39 { 40 bool disable_fams2; 41 struct dml2_pmo_options *pmo_options = &dml_ctx->v21.dml_init.options.pmo_options; 42 43 /* ODM options */ 44 pmo_options->disable_dyn_odm = !config->minimize_dispclk_using_odm; 45 pmo_options->disable_dyn_odm_for_multi_stream = true; 46 pmo_options->disable_dyn_odm_for_stream_with_svp = true; 47 48 /* UCLK P-State options */ 49 if (in_dc->debug.dml21_force_pstate_method) { 50 dml_ctx->config.pmo.force_pstate_method_enable = true; 51 for (int i = 0; i < MAX_PIPES; i++) 52 dml_ctx->config.pmo.force_pstate_method_values[i] = in_dc->debug.dml21_force_pstate_method_values[i]; 53 } else { 54 dml_ctx->config.pmo.force_pstate_method_enable = false; 55 } 56 57 pmo_options->disable_vblank = ((in_dc->debug.dml21_disable_pstate_method_mask >> 1) & 1); 58 59 /* NOTE: DRR and SubVP Require FAMS2 */ 60 disable_fams2 = !in_dc->debug.fams2_config.bits.enable; 61 pmo_options->disable_svp = ((in_dc->debug.dml21_disable_pstate_method_mask >> 2) & 1) || 62 in_dc->debug.force_disable_subvp || 63 disable_fams2; 64 pmo_options->disable_drr_clamped = ((in_dc->debug.dml21_disable_pstate_method_mask >> 3) & 1) || 65 disable_fams2; 66 pmo_options->disable_drr_var = ((in_dc->debug.dml21_disable_pstate_method_mask >> 4) & 1) || 67 disable_fams2; 68 pmo_options->disable_fams2 = disable_fams2; 69 70 pmo_options->disable_drr_var_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE || 71 in_dc->debug.disable_fams_gaming == INGAME_FAMS_MULTI_DISP_CLAMPED_ONLY; 72 pmo_options->disable_drr_clamped_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE; 73 } 74 75 static void dml21_init(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config) 76 { 77 switch (in_dc->ctx->dce_version) { 78 case DCN_VERSION_4_01: 79 (*dml_ctx)->v21.dml_init.options.project_id = dml2_project_dcn4x_stage2_auto_drr_svp; 80 break; 81 default: 82 (*dml_ctx)->v21.dml_init.options.project_id = dml2_project_invalid; 83 } 84 85 (*dml_ctx)->architecture = dml2_architecture_21; 86 87 /* Store configuration options */ 88 (*dml_ctx)->config = *config; 89 90 DC_FP_START(); 91 92 /*Initialize SOCBB and DCNIP params */ 93 dml21_initialize_soc_bb_params(&(*dml_ctx)->v21.dml_init, config, in_dc); 94 dml21_initialize_ip_params(&(*dml_ctx)->v21.dml_init, config, in_dc); 95 dml21_apply_soc_bb_overrides(&(*dml_ctx)->v21.dml_init, config, in_dc); 96 97 /* apply debug overrides */ 98 dml21_apply_debug_options(in_dc, *dml_ctx, config); 99 100 /*Initialize DML21 instance */ 101 dml2_initialize_instance(&(*dml_ctx)->v21.dml_init); 102 103 DC_FP_END(); 104 } 105 106 bool dml21_create(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config) 107 { 108 /* Allocate memory for initializing DML21 instance */ 109 if (!dml21_allocate_memory(dml_ctx)) 110 return false; 111 112 dml21_init(in_dc, dml_ctx, config); 113 114 return true; 115 } 116 117 void dml21_destroy(struct dml2_context *dml2) 118 { 119 vfree(dml2->v21.dml_init.dml2_instance); 120 vfree(dml2->v21.mode_programming.programming); 121 } 122 123 static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state, 124 struct dml2_context *in_ctx, unsigned int pipe_cnt) 125 { 126 unsigned int dml_prog_idx = 0, dc_pipe_index = 0, num_dpps_required = 0; 127 struct dml2_per_plane_programming *pln_prog = NULL; 128 struct dml2_per_stream_programming *stream_prog = NULL; 129 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__]; 130 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0}; 131 int num_pipes; 132 unsigned int dml_phantom_prog_idx; 133 134 context->bw_ctx.bw.dcn.clk.dppclk_khz = 0; 135 136 /* copy global DCHUBBUB arbiter registers */ 137 memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs)); 138 139 /* legacy only */ 140 context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64; 141 142 context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0; 143 context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0; 144 context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0; 145 146 /* phantom's start after main planes */ 147 dml_phantom_prog_idx = in_ctx->v21.mode_programming.programming->display_config.num_planes; 148 149 for (dml_prog_idx = 0; dml_prog_idx < DML2_MAX_PLANES; dml_prog_idx++) { 150 pln_prog = &in_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx]; 151 152 if (!pln_prog->plane_descriptor) 153 continue; 154 155 stream_prog = &in_ctx->v21.mode_programming.programming->stream_programming[pln_prog->plane_descriptor->stream_index]; 156 num_dpps_required = pln_prog->num_dpps_required; 157 158 if (num_dpps_required == 0) { 159 continue; 160 } 161 num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx); 162 163 if (num_pipes <= 0) 164 continue; 165 166 /* program each pipe */ 167 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) { 168 dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog); 169 170 if (pln_prog->phantom_plane.valid && dc_phantom_pipes[dc_pipe_index]) { 171 dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog); 172 } 173 } 174 175 /* copy per plane mcache allocation */ 176 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx], &pln_prog->mcache_allocation, sizeof(struct dml2_mcache_surface_allocation)); 177 if (pln_prog->phantom_plane.valid) { 178 memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx], 179 &pln_prog->phantom_plane.mcache_allocation, 180 sizeof(struct dml2_mcache_surface_allocation)); 181 182 dml_phantom_prog_idx++; 183 } 184 } 185 186 /* assign global clocks */ 187 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz; 188 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz; 189 if (in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values > 1) { 190 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = 191 in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values] * 1000; 192 } else { 193 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000; 194 } 195 196 if (in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values > 1) { 197 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = 198 in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values] * 1000; 199 } else { 200 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000; 201 } 202 203 /* get global mall allocation */ 204 if (dc->res_pool->funcs->calculate_mall_ways_from_bytes) { 205 context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes); 206 } else { 207 context->bw_ctx.bw.dcn.clk.num_ways = 0; 208 } 209 } 210 211 static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx) 212 { 213 bool result = false; 214 struct dml2_build_mode_programming_in_out *mode_programming = &dml_ctx->v21.mode_programming; 215 216 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg)); 217 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping)); 218 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params, 0, sizeof(struct dml2_core_mode_programming_in_out)); 219 220 if (!context) 221 return true; 222 223 if (context->stream_count == 0) { 224 dml21_build_fams2_programming(in_dc, context, dml_ctx); 225 return true; 226 } 227 228 /* scrub phantom's from current dc_state */ 229 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context); 230 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context); 231 232 /* Populate stream, plane mappings and other fields in display config. */ 233 result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx); 234 if (!result) 235 return false; 236 237 result = dml2_build_mode_programming(mode_programming); 238 if (!result) 239 return false; 240 241 /* Check and map HW resources */ 242 if (result && !dml_ctx->config.skip_hw_state_mapping) { 243 dml21_map_hw_resources(dml_ctx); 244 dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state); 245 /* if subvp phantoms are present, expand them into dc context */ 246 dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx); 247 } 248 249 /* Copy DML CLK, WM and REG outputs to bandwidth context */ 250 if (result && !dml_ctx->config.skip_hw_state_mapping) { 251 dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count); 252 dml21_copy_clocks_to_dc_state(dml_ctx, context); 253 dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx); 254 dml21_build_fams2_programming(in_dc, context, dml_ctx); 255 } 256 257 return true; 258 } 259 260 static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx) 261 { 262 bool is_supported = false; 263 struct dml2_initialize_instance_in_out *dml_init = &dml_ctx->v21.dml_init; 264 struct dml2_check_mode_supported_in_out *mode_support = &dml_ctx->v21.mode_support; 265 266 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg)); 267 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping)); 268 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.check_mode_supported_locals.mode_support_params, 0, sizeof(struct dml2_core_mode_support_in_out)); 269 270 if (!context || context->stream_count == 0) 271 return true; 272 273 /* Scrub phantom's from current dc_state */ 274 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context); 275 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context); 276 277 mode_support->dml2_instance = dml_init->dml2_instance; 278 dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx); 279 dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params.programming = dml_ctx->v21.mode_programming.programming; 280 is_supported = dml2_check_mode_supported(mode_support); 281 if (!is_supported) 282 return false; 283 284 return true; 285 } 286 287 bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx, bool fast_validate) 288 { 289 bool out = false; 290 291 DC_FP_START(); 292 293 /* Use dml_validate_only for fast_validate path */ 294 if (fast_validate) 295 out = dml21_check_mode_support(in_dc, context, dml_ctx); 296 else 297 out = dml21_mode_check_and_programming(in_dc, context, dml_ctx); 298 299 DC_FP_END(); 300 301 return out; 302 } 303 304 void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx) 305 { 306 unsigned int dml_prog_idx, dml_phantom_prog_idx, dc_pipe_index; 307 int num_pipes; 308 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__]; 309 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0}; 310 311 struct dml2_per_plane_programming *pln_prog = NULL; 312 struct dml2_plane_mcache_configuration_descriptor *mcache_config = NULL; 313 struct prepare_mcache_programming_locals *l = &dml_ctx->v21.scratch.prepare_mcache_locals; 314 315 if (context->stream_count == 0) { 316 return; 317 } 318 319 memset(&l->build_mcache_programming_params, 0, sizeof(struct dml2_build_mcache_programming_in_out)); 320 l->build_mcache_programming_params.dml2_instance = dml_ctx->v21.dml_init.dml2_instance; 321 322 /* phantom's start after main planes */ 323 dml_phantom_prog_idx = dml_ctx->v21.mode_programming.programming->display_config.num_planes; 324 325 /* Build mcache programming parameters per plane per pipe */ 326 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) { 327 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx]; 328 329 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_prog_idx]; 330 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor)); 331 mcache_config->plane_descriptor = pln_prog->plane_descriptor; 332 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx]; 333 mcache_config->num_pipes = pln_prog->num_dpps_required; 334 l->build_mcache_programming_params.num_configurations++; 335 336 if (pln_prog->num_dpps_required == 0) { 337 continue; 338 } 339 340 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx); 341 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL || 342 dc_main_pipes[0]->plane_state == NULL) 343 continue; 344 345 /* get config for each pipe */ 346 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) { 347 ASSERT(dc_main_pipes[dc_pipe_index]); 348 dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]); 349 } 350 351 /* get config for each phantom pipe */ 352 if (pln_prog->phantom_plane.valid && 353 dc_phantom_pipes[0] && 354 dc_main_pipes[0]->stream && 355 dc_phantom_pipes[0]->plane_state) { 356 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_phantom_prog_idx]; 357 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor)); 358 mcache_config->plane_descriptor = pln_prog->plane_descriptor; 359 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx]; 360 mcache_config->num_pipes = pln_prog->num_dpps_required; 361 l->build_mcache_programming_params.num_configurations++; 362 363 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) { 364 ASSERT(dc_phantom_pipes[dc_pipe_index]); 365 dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]); 366 } 367 368 /* increment phantom index */ 369 dml_phantom_prog_idx++; 370 } 371 } 372 373 /* Call to generate mcache programming per plane per pipe for the given display configuration */ 374 dml2_build_mcache_programming(&l->build_mcache_programming_params); 375 376 /* get per plane per pipe mcache programming */ 377 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) { 378 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx]; 379 380 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx); 381 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL || 382 dc_main_pipes[0]->plane_state == NULL) 383 continue; 384 385 /* get config for each pipe */ 386 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) { 387 ASSERT(dc_main_pipes[dc_pipe_index]); 388 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index]) { 389 memcpy(&dc_main_pipes[dc_pipe_index]->mcache_regs, 390 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index], 391 sizeof(struct dml2_hubp_pipe_mcache_regs)); 392 } 393 } 394 395 /* get config for each phantom pipe */ 396 if (pln_prog->phantom_plane.valid && 397 dc_phantom_pipes[0] && 398 dc_main_pipes[0]->stream && 399 dc_phantom_pipes[0]->plane_state) { 400 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) { 401 ASSERT(dc_phantom_pipes[dc_pipe_index]); 402 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index]) { 403 memcpy(&dc_phantom_pipes[dc_pipe_index]->mcache_regs, 404 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index], 405 sizeof(struct dml2_hubp_pipe_mcache_regs)); 406 } 407 } 408 /* increment phantom index */ 409 dml_phantom_prog_idx++; 410 } 411 } 412 } 413 414 void dml21_copy(struct dml2_context *dst_dml_ctx, 415 struct dml2_context *src_dml_ctx) 416 { 417 /* Preserve references to internals */ 418 struct dml2_instance *dst_dml2_instance = dst_dml_ctx->v21.dml_init.dml2_instance; 419 struct dml2_display_cfg_programming *dst_dml2_programming = dst_dml_ctx->v21.mode_programming.programming; 420 421 /* Copy context */ 422 memcpy(dst_dml_ctx, src_dml_ctx, sizeof(struct dml2_context)); 423 424 /* Copy Internals */ 425 memcpy(dst_dml2_instance, src_dml_ctx->v21.dml_init.dml2_instance, sizeof(struct dml2_instance)); 426 memcpy(dst_dml2_programming, src_dml_ctx->v21.mode_programming.programming, sizeof(struct dml2_display_cfg_programming)); 427 428 /* Restore references to internals */ 429 dst_dml_ctx->v21.dml_init.dml2_instance = dst_dml2_instance; 430 431 dst_dml_ctx->v21.mode_support.dml2_instance = dst_dml2_instance; 432 dst_dml_ctx->v21.mode_programming.dml2_instance = dst_dml2_instance; 433 434 dst_dml_ctx->v21.mode_support.display_config = &dst_dml_ctx->v21.display_config; 435 dst_dml_ctx->v21.mode_programming.display_config = dst_dml_ctx->v21.mode_support.display_config; 436 437 dst_dml_ctx->v21.mode_programming.programming = dst_dml2_programming; 438 439 DC_FP_START(); 440 441 /* need to initialize copied instance for internal references to be correct */ 442 dml2_initialize_instance(&dst_dml_ctx->v21.dml_init); 443 444 DC_FP_END(); 445 } 446 447 bool dml21_create_copy(struct dml2_context **dst_dml_ctx, 448 struct dml2_context *src_dml_ctx) 449 { 450 /* Allocate memory for initializing DML21 instance */ 451 if (!dml21_allocate_memory(dst_dml_ctx)) 452 return false; 453 454 dml21_copy(*dst_dml_ctx, src_dml_ctx); 455 456 return true; 457 } 458 459 void dml21_reinit(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config) 460 { 461 dml21_init(in_dc, dml_ctx, config); 462 } 463 464