/linux/drivers/md/ |
H A D | dm-switch.c | 63 struct switch_ctx *sctx; in alloc_switch_ctx() local 65 sctx = kzalloc(struct_size(sctx, path_list, nr_paths), GFP_KERNEL); in alloc_switch_ctx() 66 if (!sctx) in alloc_switch_ctx() 69 sctx->ti = ti; in alloc_switch_ctx() 70 sctx->region_size = region_size; in alloc_switch_ctx() 72 ti->private = sctx; in alloc_switch_ctx() 74 return sctx; in alloc_switch_ctx() 79 struct switch_ctx *sctx = ti->private; in alloc_region_table() local 83 if (!(sctx in alloc_region_table() 127 switch_get_position(struct switch_ctx * sctx,unsigned long region_nr,unsigned long * region_index,unsigned int * bit) switch_get_position() argument 141 switch_region_table_read(struct switch_ctx * sctx,unsigned long region_nr) switch_region_table_read() argument 155 switch_get_path_nr(struct switch_ctx * sctx,sector_t offset) switch_get_path_nr() argument 175 switch_region_table_write(struct switch_ctx * sctx,unsigned long region_nr,unsigned int value) switch_region_table_write() argument 193 initialise_region_table(struct switch_ctx * sctx) initialise_region_table() argument 207 struct switch_ctx *sctx = ti->private; parse_path() local 236 struct switch_ctx *sctx = ti->private; switch_dtr() local 261 struct switch_ctx *sctx; switch_ctr() local 322 struct switch_ctx *sctx = ti->private; switch_map() local 374 process_set_region_mappings(struct switch_ctx * sctx,unsigned int argc,char ** argv) process_set_region_mappings() argument 475 struct switch_ctx *sctx = ti->private; switch_message() local 493 struct switch_ctx *sctx = ti->private; switch_status() local 524 struct switch_ctx *sctx = ti->private; switch_prepare_ioctl() local 543 struct switch_ctx *sctx = ti->private; switch_iterate_devices() local [all...] |
/linux/arch/powerpc/crypto/ |
H A D | md5-glue.c | 20 struct md5_state *sctx = shash_desc_ctx(desc); in ppc_md5_init() local 22 sctx->hash[0] = MD5_H0; in ppc_md5_init() 23 sctx->hash[1] = MD5_H1; in ppc_md5_init() 24 sctx->hash[2] = MD5_H2; in ppc_md5_init() 25 sctx->hash[3] = MD5_H3; in ppc_md5_init() 26 sctx->byte_count = 0; in ppc_md5_init() 34 struct md5_state *sctx = shash_desc_ctx(desc); in ppc_md5_update() local 36 sctx->byte_count += round_down(len, MD5_HMAC_BLOCK_SIZE); in ppc_md5_update() 37 ppc_md5_transform(sctx->hash, data, len >> 6); in ppc_md5_update() 44 struct md5_state *sctx in ppc_md5_finup() local [all...] |
/linux/fs/btrfs/ |
H A D | scrub.c | 52 * How many groups we have for each sctx. 139 struct scrub_ctx *sctx; member 351 stripe->sctx = NULL; in release_scrub_stripe() 395 static void scrub_put_ctx(struct scrub_ctx *sctx); 429 static noinline_for_stack void scrub_free_ctx(struct scrub_ctx *sctx) in scrub_free_ctx() argument 433 if (!sctx) in scrub_free_ctx() 437 release_scrub_stripe(&sctx->stripes[i]); in scrub_free_ctx() 439 kvfree(sctx); in scrub_free_ctx() 442 static void scrub_put_ctx(struct scrub_ctx *sctx) in scrub_put_ctx() argument 444 if (refcount_dec_and_test(&sctx in scrub_put_ctx() 451 struct scrub_ctx *sctx; scrub_setup_ctx() local 666 fill_writer_pointer_gap(struct scrub_ctx * sctx,u64 physical) fill_writer_pointer_gap() argument 972 scrub_stripe_report_errors(struct scrub_ctx * sctx,struct scrub_stripe * stripe,const struct scrub_error_records * errors) scrub_stripe_report_errors() argument 1135 struct scrub_ctx *sctx = stripe->sctx; scrub_stripe_read_repair_worker() local 1280 scrub_submit_write_bio(struct scrub_ctx * sctx,struct scrub_stripe * stripe,struct btrfs_bio * bbio,bool dev_replace) scrub_submit_write_bio() argument 1322 scrub_write_sectors(struct scrub_ctx * sctx,struct scrub_stripe * stripe,unsigned long write_bitmap,bool dev_replace) scrub_write_sectors() argument 1355 scrub_throttle_dev_io(struct scrub_ctx * sctx,struct btrfs_device * device,unsigned int bio_size) scrub_throttle_dev_io() argument 1581 sync_write_pointer_for_zoned(struct scrub_ctx * sctx,u64 logical,u64 physical,u64 physical_end) sync_write_pointer_for_zoned() argument 1857 scrub_submit_initial_read(struct scrub_ctx * sctx,struct scrub_stripe * stripe) scrub_submit_initial_read() argument 1918 submit_initial_group_read(struct scrub_ctx * sctx,unsigned int first_slot,unsigned int nr_stripes) submit_initial_group_read() argument 1940 flush_scrub_stripes(struct scrub_ctx * sctx) flush_scrub_stripes() argument 2014 queue_scrub_stripe(struct scrub_ctx * sctx,struct btrfs_block_group * bg,struct btrfs_device * dev,int mirror_num,u64 logical,u32 length,u64 physical,u64 * found_logical_ret) queue_scrub_stripe() argument 2055 scrub_raid56_parity_stripe(struct scrub_ctx * sctx,struct btrfs_device * scrub_dev,struct btrfs_block_group * bg,struct btrfs_chunk_map * map,u64 full_stripe_start) scrub_raid56_parity_stripe() argument 2228 scrub_simple_mirror(struct scrub_ctx * sctx,struct btrfs_block_group * bg,u64 logical_start,u64 logical_length,struct btrfs_device * device,u64 physical,int mirror_num) scrub_simple_mirror() argument 2328 scrub_simple_stripe(struct scrub_ctx * sctx,struct btrfs_block_group * bg,struct btrfs_chunk_map * map,struct btrfs_device * device,int stripe_index) scrub_simple_stripe() argument 2361 scrub_stripe(struct scrub_ctx * sctx,struct btrfs_block_group * bg,struct btrfs_chunk_map * map,struct btrfs_device * scrub_dev,int stripe_index) scrub_stripe() argument 2526 scrub_chunk(struct scrub_ctx * sctx,struct btrfs_block_group * bg,struct btrfs_device * scrub_dev,u64 dev_offset,u64 dev_extent_len) scrub_chunk() argument 2585 scrub_enumerate_chunks(struct scrub_ctx * sctx,struct btrfs_device * scrub_dev,u64 start,u64 end) scrub_enumerate_chunks() argument 2880 scrub_one_super(struct scrub_ctx * sctx,struct btrfs_device * dev,struct page * page,u64 physical,u64 generation) scrub_one_super() argument 2909 scrub_supers(struct scrub_ctx * sctx,struct btrfs_device * scrub_dev) scrub_supers() argument 3019 struct scrub_ctx *sctx; btrfs_scrub_dev() local 3231 struct scrub_ctx *sctx; btrfs_scrub_cancel_dev() local 3256 struct scrub_ctx *sctx = NULL; btrfs_scrub_progress() local [all...] |
/linux/drivers/crypto/nx/ |
H A D | nx-sha512.c | 47 struct sha512_state_be *sctx = shash_desc_ctx(desc); in nx_sha512_init() local 49 sctx->state[0] = __cpu_to_be64(SHA512_H0); in nx_sha512_init() 50 sctx->state[1] = __cpu_to_be64(SHA512_H1); in nx_sha512_init() 51 sctx->state[2] = __cpu_to_be64(SHA512_H2); in nx_sha512_init() 52 sctx->state[3] = __cpu_to_be64(SHA512_H3); in nx_sha512_init() 53 sctx->state[4] = __cpu_to_be64(SHA512_H4); in nx_sha512_init() 54 sctx->state[5] = __cpu_to_be64(SHA512_H5); in nx_sha512_init() 55 sctx->state[6] = __cpu_to_be64(SHA512_H6); in nx_sha512_init() 56 sctx->state[7] = __cpu_to_be64(SHA512_H7); in nx_sha512_init() 57 sctx in nx_sha512_init() 67 struct sha512_state_be *sctx = shash_desc_ctx(desc); nx_sha512_update() local 147 struct sha512_state_be *sctx = shash_desc_ctx(desc); nx_sha512_finup() local 213 struct sha512_state_be *sctx = shash_desc_ctx(desc); nx_sha512_export() local 230 struct sha512_state_be *sctx = shash_desc_ctx(desc); nx_sha512_import() local [all...] |
H A D | nx-sha256.c | 47 struct sha256_state_be *sctx = shash_desc_ctx(desc); in nx_sha256_init() local 49 sctx->state[0] = __cpu_to_be32(SHA256_H0); in nx_sha256_init() 50 sctx->state[1] = __cpu_to_be32(SHA256_H1); in nx_sha256_init() 51 sctx->state[2] = __cpu_to_be32(SHA256_H2); in nx_sha256_init() 52 sctx->state[3] = __cpu_to_be32(SHA256_H3); in nx_sha256_init() 53 sctx->state[4] = __cpu_to_be32(SHA256_H4); in nx_sha256_init() 54 sctx->state[5] = __cpu_to_be32(SHA256_H5); in nx_sha256_init() 55 sctx->state[6] = __cpu_to_be32(SHA256_H6); in nx_sha256_init() 56 sctx->state[7] = __cpu_to_be32(SHA256_H7); in nx_sha256_init() 57 sctx in nx_sha256_init() 66 struct sha256_state_be *sctx = shash_desc_ctx(desc); nx_sha256_update() local 145 struct sha256_state_be *sctx = shash_desc_ctx(desc); nx_sha256_finup() local 208 struct sha256_state_be *sctx = shash_desc_ctx(desc); nx_sha256_export() local 225 struct sha256_state_be *sctx = shash_desc_ctx(desc); nx_sha256_import() local [all...] |
H A D | nx-aes-xcbc.c | 157 struct xcbc_state *sctx = shash_desc_ctx(desc); in nx_xcbc_init() local 159 memset(sctx, 0, sizeof *sctx); in nx_xcbc_init() 169 struct xcbc_state *sctx = shash_desc_ctx(desc); in nx_xcbc_update() local 181 memcpy(csbcpb->cpb.aes_xcbc.out_cv_mac, sctx->state, AES_BLOCK_SIZE); in nx_xcbc_update() 194 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update() 237 memcpy(sctx->state, csbcpb->cpb.aes_xcbc.out_cv_mac, AES_BLOCK_SIZE); in nx_xcbc_update() 248 struct xcbc_state *sctx = shash_desc_ctx(desc); in nx_xcbc_finup() local 259 memcpy(csbcpb->cpb.aes_xcbc.cv, sctx->state, AES_BLOCK_SIZE); in nx_xcbc_finup()
|
/linux/include/crypto/ |
H A D | sm3.h | 49 static inline void sm3_init(struct sm3_state *sctx) in sm3_init() argument 51 sctx->state[0] = SM3_IVA; in sm3_init() 52 sctx->state[1] = SM3_IVB; in sm3_init() 53 sctx->state[2] = SM3_IVC; in sm3_init() 54 sctx->state[3] = SM3_IVD; in sm3_init() 55 sctx->state[4] = SM3_IVE; in sm3_init() 56 sctx->state[5] = SM3_IVF; in sm3_init() 57 sctx->state[6] = SM3_IVG; in sm3_init() 58 sctx->state[7] = SM3_IVH; in sm3_init() 59 sctx in sm3_init() [all...] |
H A D | sm3_base.h | 33 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_do_update_blocks() local 35 sctx->count += len - remain; in sm3_base_do_update_blocks() 36 block_fn(sctx, data, len / SM3_BLOCK_SIZE); in sm3_base_do_update_blocks() 45 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_do_finup() local 63 sctx->count += len; in sm3_base_do_finup() 64 block.b64[bit_offset] = cpu_to_be64(sctx->count << 3); in sm3_base_do_finup() 65 block_fn(sctx, block.u8, (bit_offset + 1) * 8 / SM3_BLOCK_SIZE); in sm3_base_do_finup() 73 struct sm3_state *sctx = shash_desc_ctx(desc); in sm3_base_finish() local 78 put_unaligned_be32(sctx->state[i], digest++); in sm3_base_finish()
|
H A D | sha2.h | 74 static inline void sha224_block_init(struct crypto_sha256_state *sctx) in sha224_block_init() argument 76 sctx->state[0] = SHA224_H0; in sha224_block_init() 77 sctx->state[1] = SHA224_H1; in sha224_block_init() 78 sctx->state[2] = SHA224_H2; in sha224_block_init() 79 sctx->state[3] = SHA224_H3; in sha224_block_init() 80 sctx->state[4] = SHA224_H4; in sha224_block_init() 81 sctx->state[5] = SHA224_H5; in sha224_block_init() 82 sctx->state[6] = SHA224_H6; in sha224_block_init() 83 sctx->state[7] = SHA224_H7; in sha224_block_init() 84 sctx in sha224_block_init() 87 sha256_block_init(struct crypto_sha256_state * sctx) sha256_block_init() argument [all...] |
/linux/arch/s390/crypto/ |
H A D | aes_s390.c | 79 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); in setkey_fallback_cip() local 81 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in setkey_fallback_cip() 82 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & in setkey_fallback_cip() 85 return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); in setkey_fallback_cip() 91 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); in aes_set_key() local 100 sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; in aes_set_key() 101 if (!sctx->fc) in aes_set_key() 104 sctx->key_len = key_len; in aes_set_key() 105 memcpy(sctx->key, in_key, key_len); in aes_set_key() 111 struct s390_aes_ctx *sctx in crypto_aes_encrypt() local 122 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); crypto_aes_decrypt() local 135 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); fallback_init_cip() local 151 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); fallback_exit_cip() local 182 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); setkey_fallback_skcipher() local 192 fallback_skcipher_crypt(struct s390_aes_ctx * sctx,struct skcipher_request * req,unsigned long modifier) fallback_skcipher_crypt() argument 208 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); ecb_aes_set_key() local 229 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); ecb_aes_crypt() local 261 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); fallback_init_skcipher() local 279 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); fallback_exit_skcipher() local 304 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); cbc_aes_set_key() local 325 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); cbc_aes_crypt() local 638 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); ctr_aes_set_key() local 674 struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm); ctr_aes_crypt() local [all...] |
/linux/arch/sparc/crypto/ |
H A D | md5_glue.c | 52 struct sparc_md5_state *sctx = shash_desc_ctx(desc); in md5_sparc64_update() local 54 sctx->byte_count += round_down(len, MD5_HMAC_BLOCK_SIZE); in md5_sparc64_update() 55 md5_sparc64_transform(sctx->hash, data, len / MD5_HMAC_BLOCK_SIZE); in md5_sparc64_update() 63 struct sparc_md5_state *sctx = shash_desc_ctx(desc); in md5_sparc64_finup() local 73 sctx->byte_count += offset; in md5_sparc64_finup() 75 *pbits = cpu_to_le64(sctx->byte_count << 3); in md5_sparc64_finup() 76 md5_sparc64_transform(sctx->hash, src, (pbits - block + 1) / 8); in md5_sparc64_finup() 81 dst[i] = sctx->hash[i]; in md5_sparc64_finup() 88 struct sparc_md5_state *sctx = shash_desc_ctx(desc); in md5_sparc64_export() local 97 put_unaligned(le32_to_cpu(sctx in md5_sparc64_export() 104 struct sparc_md5_state *sctx = shash_desc_ctx(desc); md5_sparc64_import() local [all...] |
/linux/drivers/char/tpm/ |
H A D | tpm2-sessions.c | 393 static void tpm2_hmac_init(struct sha256_ctx *sctx, u8 *key, u32 key_len) in tpm2_hmac_init() argument 398 sha256_init(sctx); in tpm2_hmac_init() 406 sha256_update(sctx, pad, sizeof(pad)); in tpm2_hmac_init() 409 static void tpm2_hmac_final(struct sha256_ctx *sctx, u8 *key, u32 key_len, in tpm2_hmac_final() argument 424 sha256_final(sctx, out); in tpm2_hmac_final() 426 sha256_init(sctx); in tpm2_hmac_final() 427 sha256_update(sctx, pad, sizeof(pad)); in tpm2_hmac_final() 428 sha256_update(sctx, out, SHA256_DIGEST_SIZE); in tpm2_hmac_final() 429 sha256_final(sctx, out); in tpm2_hmac_final() 443 struct sha256_ctx sctx; in tpm2_KDFa() local 470 struct sha256_ctx sctx; tpm2_KDFe() local 595 struct sha256_ctx sctx; tpm_buf_fill_hmac_session() local 753 struct sha256_ctx sctx; tpm_buf_check_hmac_response() local [all...] |
/linux/security/apparmor/ |
H A D | crypto.c | 39 struct sha256_ctx sctx; in aa_calc_profile_hash() local 49 sha256_init(&sctx); in aa_calc_profile_hash() 50 sha256_update(&sctx, (u8 *)&le32_version, 4); in aa_calc_profile_hash() 51 sha256_update(&sctx, (u8 *)start, len); in aa_calc_profile_hash() 52 sha256_final(&sctx, profile->hash); in aa_calc_profile_hash()
|
/linux/lib/crypto/ |
H A D | sm3.c | 75 static void sm3_transform(struct sm3_state *sctx, u8 const *data, u32 W[16]) in sm3_transform() argument 79 a = sctx->state[0]; in sm3_transform() 80 b = sctx->state[1]; in sm3_transform() 81 c = sctx->state[2]; in sm3_transform() 82 d = sctx->state[3]; in sm3_transform() 83 e = sctx->state[4]; in sm3_transform() 84 f = sctx->state[5]; in sm3_transform() 85 g = sctx->state[6]; in sm3_transform() 86 h = sctx->state[7]; in sm3_transform() 156 sctx in sm3_transform() 172 sm3_block_generic(struct sm3_state * sctx,u8 const * data,int blocks) sm3_block_generic() argument [all...] |
/linux/arch/s390/purgatory/ |
H A D | purgatory.c | 19 struct sha256_ctx sctx; in verify_sha256_digest() local 21 sha256_init(&sctx); in verify_sha256_digest() 25 sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len); in verify_sha256_digest() 27 sha256_final(&sctx, digest); in verify_sha256_digest()
|
/linux/arch/riscv/purgatory/ |
H A D | purgatory.c | 23 struct sha256_ctx sctx; in verify_sha256_digest() local 26 sha256_init(&sctx); in verify_sha256_digest() 29 sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len); in verify_sha256_digest() 30 sha256_final(&sctx, digest); in verify_sha256_digest()
|
/linux/arch/x86/purgatory/ |
H A D | purgatory.c | 28 struct sha256_ctx sctx; in verify_sha256_digest() local 30 sha256_init(&sctx); in verify_sha256_digest() 34 sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len); in verify_sha256_digest() 36 sha256_final(&sctx, digest); in verify_sha256_digest()
|
/linux/crypto/ |
H A D | sha3_generic.c | 163 struct sha3_state *sctx = shash_desc_ctx(desc); in crypto_sha3_init() local 165 memset(sctx->st, 0, sizeof(sctx->st)); in crypto_sha3_init() 174 struct sha3_state *sctx = shash_desc_ctx(desc); in crypto_sha3_update() local 181 sctx->st[i] ^= get_unaligned_le64(data + 8 * i); in crypto_sha3_update() 182 keccakf(sctx->st); in crypto_sha3_update() 195 struct sha3_state *sctx = shash_desc_ctx(desc); in crypto_sha3_finup() local 207 sctx->st[i] ^= le64_to_cpu(block[i]); in crypto_sha3_finup() 210 keccakf(sctx->st); in crypto_sha3_finup() 213 put_unaligned_le64(sctx in crypto_sha3_finup() [all...] |
H A D | hmac.c | 163 struct hmac_ctx *sctx = crypto_shash_ctx(src); in hmac_clone_tfm() local 167 hash = crypto_clone_shash(sctx->hash); in hmac_clone_tfm() 429 struct ahash_hmac_ctx *sctx = crypto_ahash_ctx(src); in hmac_clone_ahash_tfm() local 433 hash = crypto_clone_ahash(sctx->hash); in hmac_clone_ahash_tfm()
|
/linux/drivers/misc/ |
H A D | fastrpc.c | 299 struct fastrpc_session_ctx *sctx; member 329 dev_err(map->fl->sctx->dev, "Failed to assign memory phys 0x%llx size 0x%llx err %d\n", in fastrpc_free_map() 368 struct fastrpc_session_ctx *sess = fl->sctx; in fastrpc_map_lookup() 447 if (fl->sctx && fl->sctx->sid) in fastrpc_buf_alloc() 448 buf->phys += ((u64)fl->sctx->sid << 32); in fastrpc_buf_alloc() 758 struct fastrpc_session_ctx *sess = fl->sctx; in fastrpc_map_create() 799 map->phys += ((u64)fl->sctx->sid << 32); in fastrpc_map_create() 905 struct device *dev = ctx->fl->sctx->dev; in fastrpc_create_maps() 937 struct device *dev = ctx->fl->sctx in fastrpc_get_args() 1108 fastrpc_invoke_send(struct fastrpc_session_ctx * sctx,struct fastrpc_invoke_ctx * ctx,u32 kernel,uint32_t handle) fastrpc_invoke_send() argument [all...] |
/linux/drivers/crypto/ |
H A D | padlock-sha.c | 34 struct sha1_state *sctx = padlock_shash_desc_ctx(desc); in padlock_sha1_init() local 36 *sctx = (struct sha1_state){ in padlock_sha1_init() 45 struct crypto_sha256_state *sctx = padlock_shash_desc_ctx(desc); in padlock_sha256_init() local 47 sha256_block_init(sctx); in padlock_sha256_init()
|
/linux/arch/powerpc/perf/ |
H A D | callchain_32.c | 43 struct sigcontext32 sctx; member 85 if (read_user_stack_32((unsigned int __user *) &sf->sctx.regs, ®s)) in sane_signal_32_frame()
|
/linux/drivers/staging/rtl8723bs/core/ |
H A D | rtw_cmd.c | 467 if (pcmd->sctx) { in rtw_cmd_thread() 469 FUNC_ADPT_FMT " pcmd->sctx\n", in rtw_cmd_thread() 473 rtw_sctx_done(&pcmd->sctx); in rtw_cmd_thread() 475 rtw_sctx_done_err(&pcmd->sctx, RTW_SCTX_DONE_CMD_ERROR); in rtw_cmd_thread() 630 struct submit_ctx sctx; in rtw_startbss_cmd() local 652 pcmd->sctx = &sctx; in rtw_startbss_cmd() 653 rtw_sctx_init(&sctx, 2000); in rtw_startbss_cmd() 659 rtw_sctx_wait(&sctx); in rtw_startbss_cmd() 661 if (sctx in rtw_startbss_cmd() [all...] |
/linux/kernel/ |
H A D | kexec_file.c | 803 struct sha256_ctx sctx; in kexec_calculate_store_digests() local 822 sha256_init(&sctx); in kexec_calculate_store_digests() 848 sha256_update(&sctx, ksegment->kbuf, ksegment->bufsz); in kexec_calculate_store_digests() 860 sha256_update(&sctx, zero_buf, bytes); in kexec_calculate_store_digests() 869 sha256_final(&sctx, digest); in kexec_calculate_store_digests()
|
/linux/tools/perf/util/ |
H A D | metricgroup.c | 249 m->pctx->sctx.user_requested_cpu_list = NULL; in metric__new() 251 m->pctx->sctx.user_requested_cpu_list = strdup(user_requested_cpu_list); in metric__new() 252 if (!m->pctx->sctx.user_requested_cpu_list) in metric__new() 255 m->pctx->sctx.runtime = runtime; in metric__new() 256 m->pctx->sctx.system_wide = system_wide; in metric__new() 1464 expr->runtime = m->pctx->sctx.runtime; in parse_groups()
|