Home
last modified time | relevance | path

Searched refs:scratch (Results 1 – 25 of 285) sorted by relevance

12345678910>>...12

/linux/arch/sparc/include/asm/
H A Dwinmacro.h50 #define LOAD_PT_YREG(base_reg, scratch) \ argument
51 ld [%base_reg + STACKFRAME_SZ + PT_Y], %scratch; \
52 wr %scratch, 0x0, %y;
59 #define LOAD_PT_ALL(base_reg, pt_psr, pt_pc, pt_npc, scratch) \ argument
60 LOAD_PT_YREG(base_reg, scratch) \
77 #define STORE_PT_YREG(base_reg, scratch) \ argument
78 rd %y, %scratch; \
79 st %scratch, [%base_reg + STACKFRAME_SZ + PT_Y];
92 #define SAVE_BOLIXED_USER_STACK(cur_reg, scratch) \ argument
93 ld [%cur_reg + TI_W_SAVED], %scratch; \
[all...]
/linux/drivers/infiniband/hw/irdma/
H A Duda.h40 u32 op, u64 scratch);
43 u64 scratch);
51 struct irdma_ah_info *info, u64 scratch) in irdma_sc_create_ah() argument
54 scratch); in irdma_sc_create_ah()
58 struct irdma_ah_info *info, u64 scratch) in irdma_sc_destroy_ah() argument
61 scratch); in irdma_sc_destroy_ah()
66 u64 scratch) in irdma_sc_create_mcast_grp() argument
69 scratch); in irdma_sc_create_mcast_grp()
74 u64 scratch) in irdma_sc_modify_mcast_grp() argument
77 scratch); in irdma_sc_modify_mcast_grp()
82 irdma_sc_destroy_mcast_grp(struct irdma_sc_cqp * cqp,struct irdma_mcast_grp_info * info,u64 scratch) irdma_sc_destroy_mcast_grp() argument
[all...]
H A Dtype.h669 u64 scratch; member
1164 int irdma_sc_ccq_create(struct irdma_sc_cq *ccq, u64 scratch,
1166 int irdma_sc_ccq_destroy(struct irdma_sc_cq *ccq, u64 scratch, bool post_sq);
1172 int irdma_sc_cceq_create(struct irdma_sc_ceq *ceq, u64 scratch);
1175 int irdma_sc_ceq_destroy(struct irdma_sc_ceq *ceq, u64 scratch, bool post_sq);
1202 struct irdma_create_qp_info *info, u64 scratch,
1204 int irdma_sc_qp_destroy(struct irdma_sc_qp *qp, u64 scratch,
1207 struct irdma_qp_flush_info *info, u64 scratch,
1211 struct irdma_modify_qp_info *info, u64 scratch,
1221 int irdma_sc_cq_destroy(struct irdma_sc_cq *cq, u64 scratch, boo
1234 u64 scratch; global() member
1240 u64 scratch; global() member
1245 u64 scratch; global() member
1252 u64 scratch; global() member
1259 u64 scratch; global() member
1264 u64 scratch; global() member
1270 u64 scratch; global() member
1276 u64 scratch; global() member
1282 u64 scratch; global() member
1288 u64 scratch; global() member
1294 u64 scratch; global() member
1299 u64 scratch; global() member
1306 u64 scratch; global() member
1311 u64 scratch; global() member
1318 u64 scratch; global() member
1324 u64 scratch; global() member
1330 u64 scratch; global() member
1336 u64 scratch; global() member
1341 u64 scratch; global() member
1346 u64 scratch; global() member
1351 u64 scratch; global() member
1356 u64 scratch; global() member
1362 u64 scratch; global() member
1368 u64 scratch; global() member
1376 u64 scratch; global() member
1384 u64 scratch; global() member
1390 u64 scratch; global() member
1396 u64 scratch; global() member
1402 u64 scratch; global() member
1408 u64 scratch; global() member
1414 u64 scratch; global() member
1420 u64 scratch; global() member
1426 u64 scratch; global() member
1432 u64 scratch; global() member
1438 u64 scratch; global() member
1444 u64 scratch; global() member
1450 u64 scratch; global() member
1456 u64 scratch; global() member
1462 u64 scratch; global() member
1468 u64 scratch; global() member
1488 irdma_sc_cqp_get_next_send_wqe(struct irdma_sc_cqp * cqp,u64 scratch) irdma_sc_cqp_get_next_send_wqe() argument
[all...]
H A Dctrl.c179 * @scratch: u64 saved to be used during cqp completion
184 u64 scratch, bool post_sq) in irdma_sc_add_arp_cache_entry() argument
189 wqe = irdma_sc_cqp_get_next_send_wqe(cqp, scratch); in irdma_sc_add_arp_cache_entry()
215 * @scratch: u64 saved to be used during cqp completion
219 static int irdma_sc_del_arp_cache_entry(struct irdma_sc_cqp *cqp, u64 scratch, in irdma_sc_del_arp_cache_entry() argument
225 wqe = irdma_sc_cqp_get_next_send_wqe(cqp, scratch); in irdma_sc_del_arp_cache_entry()
249 * @scratch: u64 saved to be used during cqp completion
254 u64 scratch, bool post_sq) in irdma_sc_manage_apbvt_entry() argument
259 wqe = irdma_sc_cqp_get_next_send_wqe(cqp, scratch); in irdma_sc_manage_apbvt_entry()
284 * @scratch
302 irdma_sc_manage_qhash_table_entry(struct irdma_sc_cqp * cqp,struct irdma_qhash_table_info * info,u64 scratch,bool post_sq) irdma_sc_manage_qhash_table_entry() argument
449 irdma_sc_qp_create(struct irdma_sc_qp * qp,struct irdma_create_qp_info * info,u64 scratch,bool post_sq) irdma_sc_qp_create() argument
500 irdma_sc_qp_modify(struct irdma_sc_qp * qp,struct irdma_modify_qp_info * info,u64 scratch,bool post_sq) irdma_sc_qp_modify() argument
569 irdma_sc_qp_destroy(struct irdma_sc_qp * qp,u64 scratch,bool remove_hash_idx,bool ignore_mw_bnd,bool post_sq) irdma_sc_qp_destroy() argument
761 irdma_sc_alloc_local_mac_entry(struct irdma_sc_cqp * cqp,u64 scratch,bool post_sq) irdma_sc_alloc_local_mac_entry() argument
797 irdma_sc_add_local_mac_entry(struct irdma_sc_cqp * cqp,struct irdma_local_mac_entry_info * info,u64 scratch,bool post_sq) irdma_sc_add_local_mac_entry() argument
833 irdma_sc_del_local_mac_entry(struct irdma_sc_cqp * cqp,u64 scratch,u16 entry_idx,u8 ignore_ref_count,bool post_sq) irdma_sc_del_local_mac_entry() argument
1057 irdma_sc_alloc_stag(struct irdma_sc_dev * dev,struct irdma_allocate_stag_info * info,u64 scratch,bool post_sq) irdma_sc_alloc_stag() argument
1121 irdma_sc_mr_reg_non_shared(struct irdma_sc_dev * dev,struct irdma_reg_ns_stag_info * info,u64 scratch,bool post_sq) irdma_sc_mr_reg_non_shared() argument
1212 irdma_sc_dealloc_stag(struct irdma_sc_dev * dev,struct irdma_dealloc_stag_info * info,u64 scratch,bool post_sq) irdma_sc_dealloc_stag() argument
1251 irdma_sc_mw_alloc(struct irdma_sc_dev * dev,struct irdma_mw_alloc_info * info,u64 scratch,bool post_sq) irdma_sc_mw_alloc() argument
2049 irdma_sc_gather_stats(struct irdma_sc_cqp * cqp,struct irdma_stats_gather_info * info,u64 scratch) irdma_sc_gather_stats() argument
2096 irdma_sc_manage_stats_inst(struct irdma_sc_cqp * cqp,struct irdma_stats_inst_info * info,bool alloc,u64 scratch) irdma_sc_manage_stats_inst() argument
2132 irdma_sc_set_up_map(struct irdma_sc_cqp * cqp,struct irdma_up_info * info,u64 scratch) irdma_sc_set_up_map() argument
2175 irdma_sc_manage_ws_node(struct irdma_sc_cqp * cqp,struct irdma_ws_node_info * info,enum irdma_ws_node_op node_op,u64 scratch) irdma_sc_manage_ws_node() argument
2216 irdma_sc_qp_flush_wqes(struct irdma_sc_qp * qp,struct irdma_qp_flush_info * info,u64 scratch,bool post_sq) irdma_sc_qp_flush_wqes() argument
2290 irdma_sc_gen_ae(struct irdma_sc_qp * qp,struct irdma_gen_ae_info * info,u64 scratch,bool post_sq) irdma_sc_gen_ae() argument
2331 irdma_sc_qp_upload_context(struct irdma_sc_dev * dev,struct irdma_upload_context_info * info,u64 scratch,bool post_sq) irdma_sc_qp_upload_context() argument
2371 irdma_sc_manage_push_page(struct irdma_sc_cqp * cqp,struct irdma_cqp_manage_push_page_info * info,u64 scratch,bool post_sq) irdma_sc_manage_push_page() argument
2409 irdma_sc_suspend_qp(struct irdma_sc_cqp * cqp,struct irdma_sc_qp * qp,u64 scratch) irdma_sc_suspend_qp() argument
2439 irdma_sc_resume_qp(struct irdma_sc_cqp * cqp,struct irdma_sc_qp * qp,u64 scratch) irdma_sc_resume_qp() argument
2516 irdma_sc_cq_create(struct irdma_sc_cq * cq,u64 scratch,bool check_overflow,bool post_sq) irdma_sc_cq_create() argument
2589 irdma_sc_cq_destroy(struct irdma_sc_cq * cq,u64 scratch,bool post_sq) irdma_sc_cq_destroy() argument
2656 irdma_sc_cq_modify(struct irdma_sc_cq * cq,struct irdma_modify_cq_info * info,u64 scratch,bool post_sq) irdma_sc_cq_modify() argument
3260 irdma_sc_cqp_get_next_send_wqe_idx(struct irdma_sc_cqp * cqp,u64 scratch,u32 * wqe_idx) irdma_sc_cqp_get_next_send_wqe_idx() argument
3458 irdma_sc_manage_hmc_pm_func_table(struct irdma_sc_cqp * cqp,struct irdma_hmc_fcn_info * info,u64 scratch,bool post_sq) irdma_sc_manage_hmc_pm_func_table() argument
3513 irdma_sc_commit_fpm_val(struct irdma_sc_cqp * cqp,u64 scratch,u8 hmc_fn_id,struct irdma_dma_mem * commit_fpm_mem,bool post_sq,u8 wait_type) irdma_sc_commit_fpm_val() argument
3574 irdma_sc_query_fpm_val(struct irdma_sc_cqp * cqp,u64 scratch,u8 hmc_fn_id,struct irdma_dma_mem * query_fpm_mem,bool post_sq,u8 wait_type) irdma_sc_query_fpm_val() argument
3665 irdma_sc_ceq_create(struct irdma_sc_ceq * ceq,u64 scratch,bool post_sq) irdma_sc_ceq_create() argument
3739 irdma_sc_cceq_create(struct irdma_sc_ceq * ceq,u64 scratch) irdma_sc_cceq_create() argument
3764 irdma_sc_ceq_destroy(struct irdma_sc_ceq * ceq,u64 scratch,bool post_sq) irdma_sc_ceq_destroy() argument
3929 irdma_sc_aeq_create(struct irdma_sc_aeq * aeq,u64 scratch,bool post_sq) irdma_sc_aeq_create() argument
3968 irdma_sc_aeq_destroy(struct irdma_sc_aeq * aeq,u64 scratch,bool post_sq) irdma_sc_aeq_destroy() argument
4233 irdma_sc_ccq_create(struct irdma_sc_cq * ccq,u64 scratch,bool check_overflow,bool post_sq) irdma_sc_ccq_create() argument
4258 irdma_sc_ccq_destroy(struct irdma_sc_cq * ccq,u64 scratch,bool post_sq) irdma_sc_ccq_destroy() argument
4414 cqp_sds_wqe_fill(struct irdma_sc_cqp * cqp,struct irdma_update_sds_info * info,u64 scratch) cqp_sds_wqe_fill() argument
4493 irdma_update_pe_sds(struct irdma_sc_dev * dev,struct irdma_update_sds_info * info,u64 scratch) irdma_update_pe_sds() argument
4536 irdma_sc_static_hmc_pages_allocated(struct irdma_sc_cqp * cqp,u64 scratch,u8 hmc_fn_id,bool post_sq,bool poll_registers) irdma_sc_static_hmc_pages_allocated() argument
4635 irdma_sc_query_rdma_features(struct irdma_sc_cqp * cqp,struct irdma_dma_mem * buf,u64 scratch) irdma_sc_query_rdma_features() argument
[all...]
/linux/arch/arc/include/asm/
H A Dirqflags-compact.h185 .macro IRQ_DISABLE scratch
186 lr \scratch, [status32]
187 bic \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK)
188 flag \scratch
192 .macro IRQ_ENABLE scratch
194 lr \scratch, [status32]
195 or \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK)
196 flag \scratch
[all...]
/linux/drivers/gpu/drm/amd/display/dc/dml2/dml21/src/dml2_pmo/
H A Ddml2_pmo_dcn3.c124 if (pmo->scratch.pmo_dcn3.current_candidate[0] > 0) { in iterate_to_next_candidiate()
125 pmo->scratch.pmo_dcn3.current_candidate[0]--; in iterate_to_next_candidiate()
128 for (borrow_from = 1; borrow_from < size && pmo->scratch.pmo_dcn3.current_candidate[borrow_from] == 0; borrow_from++) in iterate_to_next_candidiate()
132 pmo->scratch.pmo_dcn3.current_candidate[borrow_from]--; in iterate_to_next_candidiate()
134 pmo->scratch.pmo_dcn3.current_candidate[i] = pmo->scratch.pmo_dcn3.reserved_time_candidates_count[i] - 1; in iterate_to_next_candidiate()
537 pmo->scratch.pmo_dcn3.min_latency_index = in_out->base_display_config->stage1.min_clk_index_for_latency; in pmo_dcn3_init_for_pstate_support()
538 pmo->scratch.pmo_dcn3.max_latency_index = pmo->mcg_clock_table_size - 1; in pmo_dcn3_init_for_pstate_support()
539 pmo->scratch.pmo_dcn3.cur_latency_index = in_out->base_display_config->stage1.min_clk_index_for_latency; in pmo_dcn3_init_for_pstate_support()
541 pmo->scratch in pmo_dcn3_init_for_pstate_support()
[all...]
H A Ddml2_pmo_dcn4_fams2.c943 struct dml2_pmo_scratch *s = &pmo->scratch; in build_synchronized_timing_groups()
980 set_bit_in_bitfield(&pmo->scratch.pmo_dcn4.synchronized_timing_group_masks[timing_group_idx], j); in build_synchronized_timing_groups()
1000 valid &= is_bit_set_in_bitfield(pmo->scratch.pmo_dcn4.stream_vactive_capability_mask, i); in all_timings_support_vactive()
1018 if (mask != pmo->scratch.pmo_dcn4.synchronized_timing_group_masks[i]) { in all_timings_support_vblank()
1049 stream_fams2_meta = &pmo->scratch.pmo_dcn4.stream_fams2_meta[i]; in all_timings_support_drr()
1127 stream_fams2_meta = &pmo->scratch.pmo_dcn4.stream_fams2_meta[i]; in all_timings_support_svp()
1133 microschedule_vlines = calc_svp_microschedule(&pmo->scratch.pmo_dcn4.stream_fams2_meta[i]); in all_timings_support_svp()
1154 static void insert_into_candidate_list(const struct dml2_pmo_pstate_strategy *pstate_strategy, int stream_count, struct dml2_pmo_scratch *scratch) in insert_into_candidate_list() argument
1156 scratch->pmo_dcn4.pstate_strategy_candidates[scratch in insert_into_candidate_list()
1986 struct dml2_pmo_scratch *scratch = &pmo->scratch; setup_planes_for_svp_by_mask() local
2009 struct dml2_pmo_scratch *scratch = &pmo->scratch; setup_planes_for_svp_drr_by_mask() local
2109 struct dml2_pmo_scratch *scratch = &pmo->scratch; setup_display_config() local
[all...]
/linux/drivers/gpu/drm/amd/display/dc/dml2/
H A Ddml2_dc_resource_mgmt.c62 bool is_plane_duplicate = dml2->v20.scratch.plane_duplicate_exists; in get_plane_id()
132 ctx->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_plane_index[state->res_ctx.pipe_ctx[i].pipe_idx], &plane_id_assigned_to_pipe)) { in find_master_pipe_of_plane()
155 ctx->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_plane_index[pipe->pipe_idx], in find_pipes_assigned_to_plane()
537 struct dc_pipe_mapping_scratch *scratch, in add_odm_slice_to_odm_tree() argument
545 ASSERT(scratch->pipe_pool.num_pipes_assigned_to_plane_for_mpcc_combine == 1 || scratch->pipe_pool.num_pipes_assigned_to_plane_for_odm_combine == 1); in add_odm_slice_to_odm_tree()
547 for (i = 0; i < scratch->pipe_pool.num_pipes_assigned_to_plane_for_mpcc_combine; i++) { in add_odm_slice_to_odm_tree()
548 pipe = &state->res_ctx.pipe_ctx[scratch->pipe_pool.pipes_assigned_to_plane[odm_slice_index][i]]; in add_odm_slice_to_odm_tree()
550 if (scratch->mpc_info.prev_odm_pipe) in add_odm_slice_to_odm_tree()
551 scratch in add_odm_slice_to_odm_tree()
737 map_pipes_for_stream(struct dml2_context * ctx,struct dc_state * state,const struct dc_stream_state * stream,struct dc_pipe_mapping_scratch * scratch,const struct dc_state * existing_state) map_pipes_for_stream() argument
757 map_pipes_for_plane(struct dml2_context * ctx,struct dc_state * state,const struct dc_stream_state * stream,const struct dc_plane_state * plane,int plane_index,struct dc_pipe_mapping_scratch * scratch,const struct dc_state * existing_state) map_pipes_for_plane() argument
1050 struct dc_pipe_mapping_scratch scratch; dml2_map_dc_pipes() local
[all...]
/linux/lib/tests/
H A Dkunit_iov_iter.c105 u8 *scratch, *buffer; in iov_kunit_copy_to_kvec() local
112 scratch = iov_kunit_create_buffer(test, &spages, npages); in iov_kunit_copy_to_kvec()
114 scratch[i] = pattern(i); in iov_kunit_copy_to_kvec()
123 copied = copy_to_iter(scratch, size, &iter); in iov_kunit_copy_to_kvec()
129 /* Build the expected image in the scratch buffer. */ in iov_kunit_copy_to_kvec()
131 memset(scratch, 0, bufsize); in iov_kunit_copy_to_kvec()
134 scratch[i] = pattern(patt++); in iov_kunit_copy_to_kvec()
138 KUNIT_EXPECT_EQ_MSG(test, buffer[i], scratch[i], "at i=%x", i); in iov_kunit_copy_to_kvec()
139 if (buffer[i] != scratch[i]) in iov_kunit_copy_to_kvec()
155 u8 *scratch, *buffe in iov_kunit_copy_from_kvec() local
264 u8 *scratch, *buffer; iov_kunit_copy_to_bvec() local
318 u8 *scratch, *buffer; iov_kunit_copy_from_bvec() local
422 u8 *scratch, *buffer; iov_kunit_copy_to_folioq() local
484 u8 *scratch, *buffer; iov_kunit_copy_from_folioq() local
584 u8 *scratch, *buffer; iov_kunit_copy_to_xarray() local
642 u8 *scratch, *buffer; iov_kunit_copy_from_xarray() local
[all...]
/linux/arch/arc/kernel/
H A Dptrace.c192 REG_IN_ONE(scratch.bta, &ptregs->bta); in genregs_set()
193 REG_IN_ONE(scratch.lp_start, &ptregs->lp_start); in genregs_set()
194 REG_IN_ONE(scratch.lp_end, &ptregs->lp_end); in genregs_set()
195 REG_IN_ONE(scratch.lp_count, &ptregs->lp_count); in genregs_set()
197 REG_IGNORE_ONE(scratch.status32); in genregs_set()
199 REG_IN_ONE(scratch.ret, &ptregs->ret); in genregs_set()
200 REG_IN_ONE(scratch.blink, &ptregs->blink); in genregs_set()
201 REG_IN_ONE(scratch.fp, &ptregs->fp); in genregs_set()
202 REG_IN_ONE(scratch.gp, &ptregs->r26); in genregs_set()
203 REG_IN_ONE(scratch in genregs_set()
[all...]
/linux/fs/xfs/libxfs/
H A Dxfs_parent.c310 struct xfs_da_args *scratch) in xfs_parent_lookup() argument
312 memset(scratch, 0, sizeof(struct xfs_da_args)); in xfs_parent_lookup()
313 xfs_parent_da_args_init(scratch, tp, pptr, ip, ip->i_ino, parent_name); in xfs_parent_lookup()
314 return xfs_attr_get_ilocked(scratch); in xfs_parent_lookup()
346 struct xfs_da_args *scratch) in xfs_parent_set() argument
353 memset(scratch, 0, sizeof(struct xfs_da_args)); in xfs_parent_set()
354 xfs_parent_da_args_init(scratch, NULL, pptr, ip, owner, parent_name); in xfs_parent_set()
355 return xfs_attr_set(scratch, XFS_ATTRUPDATE_CREATE, false); in xfs_parent_set()
369 struct xfs_da_args *scratch) in xfs_parent_unset() argument
376 memset(scratch, in xfs_parent_unset()
[all...]
/linux/drivers/mmc/host/
H A Dmmc_spi.c97 /* "scratch" is per-{command,block} data exchanged with the card */
98 struct scratch { struct
122 struct scratch *data; argument
227 * be stored in the scratch buffer. It's somewhere after the in mmc_spi_response_get()
402 struct scratch *data = host->data; in mmc_spi_command_send()
512 struct scratch *scratch = host->data; in mmc_spi_setup_data_message() local
524 scratch->data_token = SPI_TOKEN_MULTI_WRITE; in mmc_spi_setup_data_message()
526 scratch->data_token = SPI_TOKEN_SINGLE; in mmc_spi_setup_data_message()
527 t->tx_buf = &scratch in mmc_spi_setup_data_message()
595 struct scratch *scratch = host->data; mmc_spi_writeblock() local
689 struct scratch *scratch = host->data; mmc_spi_readblock() local
849 struct scratch *scratch = host->data; mmc_spi_data_do() local
[all...]
/linux/drivers/gpu/drm/amd/display/dmub/src/
H A Ddmub_dcn20.c98 uint32_t in_reset, scratch, i; in dmub_dcn20_reset() local
124 scratch = dmub->hw_funcs.get_gpint_response(dmub); in dmub_dcn20_reset()
125 if (scratch == DMUB_GPINT__STOP_FW_RESPONSE) in dmub_dcn20_reset()
433 dmub->debug.scratch[0] = REG_READ(DMCUB_SCRATCH0); in dmub_dcn20_get_diagnostic_data()
434 dmub->debug.scratch[1] = REG_READ(DMCUB_SCRATCH1); in dmub_dcn20_get_diagnostic_data()
435 dmub->debug.scratch[2] = REG_READ(DMCUB_SCRATCH2); in dmub_dcn20_get_diagnostic_data()
436 dmub->debug.scratch[3] = REG_READ(DMCUB_SCRATCH3); in dmub_dcn20_get_diagnostic_data()
437 dmub->debug.scratch[4] = REG_READ(DMCUB_SCRATCH4); in dmub_dcn20_get_diagnostic_data()
438 dmub->debug.scratch[5] = REG_READ(DMCUB_SCRATCH5); in dmub_dcn20_get_diagnostic_data()
439 dmub->debug.scratch[ in dmub_dcn20_get_diagnostic_data()
[all...]
H A Ddmub_dcn31.c87 uint32_t in_reset, is_enabled, scratch, i, pwait_mode; in dmub_dcn31_reset() local
111 scratch = REG_READ(DMCUB_SCRATCH7); in dmub_dcn31_reset()
112 if (scratch == DMUB_GPINT__STOP_FW_RESPONSE) in dmub_dcn31_reset()
433 dmub->debug.scratch[0] = REG_READ(DMCUB_SCRATCH0); in dmub_dcn31_get_diagnostic_data()
434 dmub->debug.scratch[1] = REG_READ(DMCUB_SCRATCH1); in dmub_dcn31_get_diagnostic_data()
435 dmub->debug.scratch[2] = REG_READ(DMCUB_SCRATCH2); in dmub_dcn31_get_diagnostic_data()
436 dmub->debug.scratch[3] = REG_READ(DMCUB_SCRATCH3); in dmub_dcn31_get_diagnostic_data()
437 dmub->debug.scratch[4] = REG_READ(DMCUB_SCRATCH4); in dmub_dcn31_get_diagnostic_data()
438 dmub->debug.scratch[5] = REG_READ(DMCUB_SCRATCH5); in dmub_dcn31_get_diagnostic_data()
439 dmub->debug.scratch[ in dmub_dcn31_get_diagnostic_data()
[all...]
H A Ddmub_dcn35.c92 uint32_t in_reset, is_enabled, scratch, i, pwait_mode; in dmub_dcn35_reset() local
112 scratch = REG_READ(DMCUB_SCRATCH7); in dmub_dcn35_reset()
113 if (scratch == DMUB_GPINT__STOP_FW_RESPONSE) in dmub_dcn35_reset()
471 dmub->debug.scratch[0] = REG_READ(DMCUB_SCRATCH0); in dmub_dcn35_get_diagnostic_data()
472 dmub->debug.scratch[1] = REG_READ(DMCUB_SCRATCH1); in dmub_dcn35_get_diagnostic_data()
473 dmub->debug.scratch[2] = REG_READ(DMCUB_SCRATCH2); in dmub_dcn35_get_diagnostic_data()
474 dmub->debug.scratch[3] = REG_READ(DMCUB_SCRATCH3); in dmub_dcn35_get_diagnostic_data()
475 dmub->debug.scratch[4] = REG_READ(DMCUB_SCRATCH4); in dmub_dcn35_get_diagnostic_data()
476 dmub->debug.scratch[5] = REG_READ(DMCUB_SCRATCH5); in dmub_dcn35_get_diagnostic_data()
477 dmub->debug.scratch[ in dmub_dcn35_get_diagnostic_data()
[all...]
H A Ddmub_dcn32.c93 uint32_t in_reset, scratch, i; in dmub_dcn32_reset() local
119 scratch = dmub->hw_funcs.get_gpint_response(dmub); in dmub_dcn32_reset()
120 if (scratch == DMUB_GPINT__STOP_FW_RESPONSE) in dmub_dcn32_reset()
436 dmub->debug.scratch[0] = REG_READ(DMCUB_SCRATCH0); in dmub_dcn32_get_diagnostic_data()
437 dmub->debug.scratch[1] = REG_READ(DMCUB_SCRATCH1); in dmub_dcn32_get_diagnostic_data()
438 dmub->debug.scratch[2] = REG_READ(DMCUB_SCRATCH2); in dmub_dcn32_get_diagnostic_data()
439 dmub->debug.scratch[3] = REG_READ(DMCUB_SCRATCH3); in dmub_dcn32_get_diagnostic_data()
440 dmub->debug.scratch[4] = REG_READ(DMCUB_SCRATCH4); in dmub_dcn32_get_diagnostic_data()
441 dmub->debug.scratch[5] = REG_READ(DMCUB_SCRATCH5); in dmub_dcn32_get_diagnostic_data()
442 dmub->debug.scratch[ in dmub_dcn32_get_diagnostic_data()
[all...]
H A Ddmub_dcn401.c69 uint32_t enabled, in_reset, scratch, pwait_mode; in dmub_dcn401_reset() local
84 scratch = dmub->hw_funcs.get_gpint_response(dmub); in dmub_dcn401_reset()
85 if (scratch == DMUB_GPINT__STOP_FW_RESPONSE) in dmub_dcn401_reset()
430 dmub->debug.scratch[0] = REG_READ(DMCUB_SCRATCH0); in dmub_dcn401_get_diagnostic_data()
431 dmub->debug.scratch[1] = REG_READ(DMCUB_SCRATCH1); in dmub_dcn401_get_diagnostic_data()
432 dmub->debug.scratch[2] = REG_READ(DMCUB_SCRATCH2); in dmub_dcn401_get_diagnostic_data()
433 dmub->debug.scratch[3] = REG_READ(DMCUB_SCRATCH3); in dmub_dcn401_get_diagnostic_data()
434 dmub->debug.scratch[4] = REG_READ(DMCUB_SCRATCH4); in dmub_dcn401_get_diagnostic_data()
435 dmub->debug.scratch[5] = REG_READ(DMCUB_SCRATCH5); in dmub_dcn401_get_diagnostic_data()
436 dmub->debug.scratch[ in dmub_dcn401_get_diagnostic_data()
[all...]
/linux/arch/riscv/kernel/
H A Dmodule-sections.c121 Elf_Rela *scratch = NULL; in module_frob_arch_sections() local
168 scratch_size_needed = (num_scratch_relas + num_relas) * sizeof(*scratch); in module_frob_arch_sections()
171 scratch = kvrealloc(scratch, scratch_size, GFP_KERNEL); in module_frob_arch_sections()
172 if (!scratch) in module_frob_arch_sections()
178 scratch[num_scratch_relas++] = relas[j]; in module_frob_arch_sections()
181 if (scratch) { in module_frob_arch_sections()
183 sort(scratch, num_scratch_relas, sizeof(*scratch), cmp_rela, NULL); in module_frob_arch_sections()
184 count_max_entries(scratch, num_scratch_rela in module_frob_arch_sections()
[all...]
/linux/arch/x86/include/asm/
H A Dbootparam_utils.h52 static struct boot_params scratch; in sanitize_boot_params() local
54 char *save_base = (char *)&scratch; in sanitize_boot_params()
68 BOOT_PARAM_PRESERVE(scratch), in sanitize_boot_params()
80 memset(&scratch, 0, sizeof(scratch)); in sanitize_boot_params()
/linux/tools/testing/selftests/powerpc/copyloops/asm/
H A Dppc_asm.h50 #define DCBT_SETUP_STREAMS(from, from_parms, to, to_parms, scratch) \ argument
51 lis scratch,0x8000; /* GO=1 */ \
52 clrldi scratch,scratch,32; \
60 dcbt 0,scratch,0b01010; /* all streams GO */
/linux/drivers/media/platform/sunxi/sun4i-csi/
H A Dsun4i_dma.c96 dma_addr_t addr = csi->scratch.paddr; in sun4i_csi_setup_scratch_buffer()
100 "No more available buffer, using the scratch buffer\n"); in sun4i_csi_setup_scratch_buffer()
245 * We need a scratch buffer in case where we'll not have any in sun4i_csi_start_streaming()
255 csi->scratch.size = 0; in sun4i_csi_start_streaming()
257 csi->scratch.size += csi->fmt.plane_fmt[i].sizeimage; in sun4i_csi_start_streaming()
259 csi->scratch.vaddr = dma_alloc_coherent(csi->dev, in sun4i_csi_start_streaming()
260 csi->scratch.size, in sun4i_csi_start_streaming()
261 &csi->scratch.paddr, in sun4i_csi_start_streaming()
263 if (!csi->scratch.vaddr) { in sun4i_csi_start_streaming()
264 dev_err(csi->dev, "Failed to allocate scratch buffe in sun4i_csi_start_streaming()
[all...]
/linux/drivers/usb/host/
H A Dehci-dbg.c373 u32 scratch; in qh_lines() local
396 scratch = hc32_to_cpup(ehci, &hw->hw_info1); in qh_lines()
401 qh, scratch & 0x007f, in qh_lines()
402 speed_char (scratch), in qh_lines()
403 (scratch >> 8) & 0x000f, in qh_lines()
404 scratch, hc32_to_cpup(ehci, &hw->hw_info2), in qh_lines()
420 scratch = hc32_to_cpup(ehci, &td->hw_token); in qh_lines()
426 } else if (QTD_LENGTH(scratch)) { in qh_lines()
432 switch ((scratch >> 8) & 0x03) { in qh_lines()
450 (scratch >> 1 in qh_lines()
596 u32 scratch = hc32_to_cpup(ehci, &hw->hw_info1); output_buf_tds_dir() local
761 char *next, scratch[80]; fill_registers_buffer() local
[all...]
/linux/drivers/gpu/drm/i915/gt/
H A Dgen6_ppgtt.c23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
79 const gen6_pte_t scratch_pte = vm->scratch[0]->encode; in gen6_ppgtt_clear_range()
100 * entries back to scratch. in gen6_ppgtt_clear_range()
193 fill32_px(pt, vm->scratch[0]->encode); in gen6_alloc_va_range()
228 vm->scratch[0]->encode = in gen6_ppgtt_init_scratch()
229 vm->pte_encode(px_dma(vm->scratch[0]), in gen6_ppgtt_init_scratch()
234 vm->scratch[1] = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in gen6_ppgtt_init_scratch()
235 if (IS_ERR(vm->scratch[1])) { in gen6_ppgtt_init_scratch()
236 ret = PTR_ERR(vm->scratch[1]); in gen6_ppgtt_init_scratch()
240 ret = map_pt_dma(vm, vm->scratch[ in gen6_ppgtt_init_scratch()
[all...]
H A Dselftest_workarounds.c503 struct i915_vma *scratch; in check_dirty_whitelist() local
509 scratch = __vm_create_scratch_for_read_pinned(ce->vm, sz); in check_dirty_whitelist()
510 if (IS_ERR(scratch)) in check_dirty_whitelist()
511 return PTR_ERR(scratch); in check_dirty_whitelist()
522 u64 addr = i915_vma_offset(scratch); in check_dirty_whitelist()
540 err = i915_gem_object_lock(scratch->obj, &ww); in check_dirty_whitelist()
554 results = i915_gem_object_pin_map(scratch->obj, I915_MAP_WB); in check_dirty_whitelist()
604 GEM_BUG_ON(idx * sizeof(u32) > scratch->size); in check_dirty_whitelist()
635 err = i915_vma_move_to_active(scratch, rq, in check_dirty_whitelist()
730 i915_gem_object_unpin_map(scratch in check_dirty_whitelist()
1061 struct i915_vma *scratch[2]; live_isolated_whitelist() member
[all...]
/linux/arch/alpha/kernel/
H A Derr_marvel.c153 int scratch, i; in marvel_print_po7_uncrr_sym() local
198 scratch = EXTRACT(uncrr_sym, IO7__PO7_UNCRR_SYM__CLK); in marvel_print_po7_uncrr_sym()
199 for (i = 0; i < 4; i++, scratch >>= 2) { in marvel_print_po7_uncrr_sym()
200 if (scratch & 0x3) in marvel_print_po7_uncrr_sym()
203 clk_names[i], clk_decode[scratch & 0x3]); in marvel_print_po7_uncrr_sym()
232 if ((scratch = EXTRACT(uncrr_sym, IO7__PO7_UNCRR_SYM__VICTIM_SP))) { in marvel_print_po7_uncrr_sym()
233 int lost = scratch & (1UL << 4); in marvel_print_po7_uncrr_sym()
234 scratch &= ~lost; in marvel_print_po7_uncrr_sym()
235 for (i = 0; i < 8; i++, scratch >>= 1) { in marvel_print_po7_uncrr_sym()
236 if (!(scratch in marvel_print_po7_uncrr_sym()
[all...]

12345678910>>...12