Home
last modified time | relevance | path

Searched refs:src_list (Results 1 – 12 of 12) sorted by relevance

/linux/crypto/async_tx/
H A Dasync_xor.c35 dma_addr_t *src_list = unmap->addr; in do_async_xor() local
62 tmp = src_list[0]; in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
64 src_list[0] = dma_dest; in do_async_xor()
65 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor()
76 src_list, in do_async_xor()
80 src_list[0] = tmp; in do_async_xor()
91 src_list += xor_src_cnt - 1; in do_async_xor()
101 struct page **src_list, unsigned int *src_offs, in do_sync_xor_offs() argument
113 srcs = (void **) src_list; in do_sync_xor_offs()
183 async_xor_offs(struct page * dest,unsigned int offset,struct page ** src_list,unsigned int * src_offs,int src_cnt,size_t len,struct async_submit_ctl * submit) async_xor_offs() argument
273 async_xor(struct page * dest,struct page ** src_list,unsigned int offset,int src_cnt,size_t len,struct async_submit_ctl * submit) async_xor() argument
288 xor_val_chan(struct async_submit_ctl * submit,struct page * dest,struct page ** src_list,int src_cnt,size_t len) xor_val_chan() argument
316 async_xor_val_offs(struct page * dest,unsigned int offset,struct page ** src_list,unsigned int * src_offs,int src_cnt,size_t len,enum sum_check_flags * result,struct async_submit_ctl * submit) async_xor_val_offs() argument
[all...]
/linux/drivers/net/vxlan/
H A Dvxlan_mdb.c40 struct hlist_head src_list; member
66 struct list_head src_list; member
131 if (hlist_empty(&remote->src_list)) in vxlan_mdb_entry_info_fill_srcs()
138 hlist_for_each_entry(ent, &remote->src_list, node) { in vxlan_mdb_entry_info_fill_srcs()
441 list_add_tail(&src->node, &cfg->src_list); in vxlan_mdb_config_src_entry_init()
459 const struct nlattr *src_list, in vxlan_mdb_config_src_list_init() argument
466 nla_for_each_nested(src_entry, src_list, rem) { in vxlan_mdb_config_src_list_init()
476 list_for_each_entry_safe_reverse(src, tmp, &cfg->src_list, node) in vxlan_mdb_config_src_list_init()
485 list_for_each_entry_safe_reverse(src, tmp, &cfg->src_list, node) in vxlan_mdb_config_src_list_fini()
556 if (vxlan_mdb_is_star_g(&cfg->group) && list_empty(&cfg->src_list) in vxlan_mdb_config_attrs_init()
[all...]
/linux/include/linux/
H A Dasync_tx.h162 async_xor(struct page *dest, struct page **src_list, unsigned int offset,
167 struct page **src_list, unsigned int *src_offset,
172 struct page **src_list, unsigned int *src_offset,
/linux/drivers/net/ethernet/mellanox/mlx5/core/
H A Dfs_pool.c113 mlx5_fs_pool_acquire_from_list(struct list_head *src_list, in mlx5_fs_pool_acquire_from_list() argument
121 if (list_empty(src_list)) in mlx5_fs_pool_acquire_from_list()
124 fs_bulk = list_first_entry(src_list, struct mlx5_fs_bulk, pool_list); in mlx5_fs_pool_acquire_from_list()
/linux/drivers/crypto/aspeed/
H A Daspeed-hace-hash.c219 struct aspeed_sg_list *src_list; in aspeed_ahash_dma_prepare_sg() local
240 max_sg_nents = ASPEED_HASH_SRC_DMA_BUF_LEN / sizeof(*src_list) - final; in aspeed_ahash_dma_prepare_sg()
242 src_list = (struct aspeed_sg_list *)hash_engine->ahash_src_addr; in aspeed_ahash_dma_prepare_sg()
275 src_list[i].phy_addr = cpu_to_le32(phy_addr); in aspeed_ahash_dma_prepare_sg()
276 src_list[i].len = cpu_to_le32(len); in aspeed_ahash_dma_prepare_sg()
300 src_list[i].phy_addr = cpu_to_le32(rctx->buffer_dma_addr); in aspeed_ahash_dma_prepare_sg()
301 src_list[i].len = cpu_to_le32(len); in aspeed_ahash_dma_prepare_sg()
304 src_list[i - 1].len |= cpu_to_le32(HASH_SG_LAST_LIST); in aspeed_ahash_dma_prepare_sg()
H A Daspeed-hace-crypto.c222 struct aspeed_sg_list *src_list, *dst_list; in aspeed_sk_start_sg() local
267 src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr; in aspeed_sk_start_sg()
284 src_list[i].phy_addr = cpu_to_le32(phy_addr); in aspeed_sk_start_sg()
285 src_list[i].len = cpu_to_le32(len); in aspeed_sk_start_sg()
294 dst_list = src_list; in aspeed_sk_start_sg()
/linux/net/bridge/
H A Dbr_mdb.c182 if (hlist_empty(&p->src_list)) in __mdb_fill_srcs()
189 hlist_for_each_entry_rcu(ent, &p->src_list, node, in __mdb_fill_srcs()
496 if (!hlist_empty(&pg->src_list)) in rtnl_mdb_nlmsg_pg_size()
499 hlist_for_each_entry(ent, &pg->src_list, node) { in rtnl_mdb_nlmsg_pg_size()
932 hlist_for_each_entry(ent, &pg->src_list, node) in br_mdb_replace_group_srcs()
939 hlist_for_each_entry_safe(ent, tmp, &pg->src_list, node) { in br_mdb_replace_group_srcs()
947 hlist_for_each_entry(ent, &pg->src_list, node) in br_mdb_replace_group_srcs()
1119 static int br_mdb_config_src_list_init(struct nlattr *src_list, in br_mdb_config_src_list_init() argument
1127 nla_for_each_nested(src_entry, src_list, rem) in br_mdb_config_src_list_init()
1141 nla_for_each_nested(src_entry, src_list, re in br_mdb_config_src_list_init()
[all...]
H A Dbr_multicast.c373 hlist_for_each_entry(src_ent, &pg_lst->src_list, node) { in br_multicast_star_g_handle_mode()
424 hlist_for_each_entry(src_ent, &pg->src_list, node) { in br_multicast_star_g_host_state()
797 WARN_ON(!hlist_empty(&pg->src_list)); in br_multicast_destroy_port_group()
815 hlist_for_each_entry_safe(ent, tmp, &pg->src_list, node) in br_multicast_del_pg()
872 hlist_for_each_entry_safe(src_ent, tmp, &pg->src_list, node) { in br_multicast_port_group_expired()
879 if (hlist_empty(&pg->src_list)) { in br_multicast_port_group_expired()
954 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
1042 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip4_multicast_alloc_query()
1104 hlist_for_each_entry(ent, &pg->src_list, node) { in br_ip6_multicast_alloc_query()
1205 hlist_for_each_entry(ent, &pg->src_list, nod in br_ip6_multicast_alloc_query()
[all...]
H A Dbr_multicast_eht.c633 hlist_for_each_entry_safe(src_ent, tmp, &pg->src_list, node) { in __eht_inc_exc()
/linux/tools/testing/selftests/net/forwarding/
H A Dbridge_mdb.sh276 local src_list
280 src_list=${src_list},${src_prefix}${i}
283 echo $src_list | cut -c 2-
1127 local src_list
1142 src_list=$valid_src
1144 src_list=$invalid_src
1163 filter_mode $filter_mode source_list $src_list
/linux/sound/core/seq/
H A Dseq_clientmgr.c760 list_for_each_entry(subs, &grp->list_head, src_list) { in __deliver_to_subscribers()
2064 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_ioctl_query_subs()
2645 s = list_entry(p, struct snd_seq_subscribers, src_list); in snd_seq_info_dump_subscribers()
/linux/mm/
H A Dhugetlb.c3995 struct list_head *src_list) in demote_free_hugetlb_folios() argument
4002 rc = hugetlb_vmemmap_restore_folios(src, src_list, &ret_list); in demote_free_hugetlb_folios()
4003 list_splice_init(&ret_list, src_list); in demote_free_hugetlb_folios()
4015 list_for_each_entry_safe(folio, next, src_list, lru) { in demote_free_hugetlb_folios()