Lines Matching full:binding
27 * the binding to remain alive. Each page pool using this binding holds
28 * a ref to keep the binding alive. Each allocated net_iov holds a
31 * The binding undos itself and unmaps the underlying dmabuf once all
32 * those refs are dropped and the binding is no longer desired or in
42 /* rxq's this binding is active on. */
45 /* ID of this binding. Globally unique to all bindings currently
59 struct net_devmem_dmabuf_binding *binding; member
65 void __net_devmem_dmabuf_binding_free(struct net_devmem_dmabuf_binding *binding);
69 void net_devmem_unbind_dmabuf(struct net_devmem_dmabuf_binding *binding);
71 struct net_devmem_dmabuf_binding *binding,
85 return net_devmem_iov_to_chunk_owner(niov)->binding; in net_devmem_iov_binding()
102 net_devmem_dmabuf_binding_get(struct net_devmem_dmabuf_binding *binding) in net_devmem_dmabuf_binding_get() argument
104 refcount_inc(&binding->ref); in net_devmem_dmabuf_binding_get()
108 net_devmem_dmabuf_binding_put(struct net_devmem_dmabuf_binding *binding) in net_devmem_dmabuf_binding_put() argument
110 if (!refcount_dec_and_test(&binding->ref)) in net_devmem_dmabuf_binding_put()
113 __net_devmem_dmabuf_binding_free(binding); in net_devmem_dmabuf_binding_put()
117 net_devmem_alloc_dmabuf(struct net_devmem_dmabuf_binding *binding);
126 __net_devmem_dmabuf_binding_free(struct net_devmem_dmabuf_binding *binding) in __net_devmem_dmabuf_binding_free() argument
138 net_devmem_unbind_dmabuf(struct net_devmem_dmabuf_binding *binding) in net_devmem_unbind_dmabuf() argument
144 struct net_devmem_dmabuf_binding *binding, in net_devmem_bind_dmabuf_to_queue() argument
152 net_devmem_alloc_dmabuf(struct net_devmem_dmabuf_binding *binding) in net_devmem_alloc_dmabuf() argument