/linux/drivers/media/v4l2-core/ |
H A D | v4l2-async.c | 66 static bool match_i2c(struct v4l2_async_notifier *notifier, in match_i2c() argument 81 static struct device *notifier_dev(struct v4l2_async_notifier *notifier) in notifier_dev() argument 83 if (notifier->sd) in notifier_dev() 84 return notifier->sd->dev; in notifier_dev() 86 if (notifier->v4l2_dev) in notifier_dev() 87 return notifier->v4l2_dev->dev; in notifier_dev() 93 match_fwnode_one(struct v4l2_async_notifier *notifier, in match_fwnode_one() argument 100 dev_dbg(notifier_dev(notifier), in match_fwnode_one() 105 dev_dbg(notifier_dev(notifier), in match_fwnode_one() 111 dev_dbg(notifier_dev(notifier), in match_fwnode_one() 129 match_fwnode(struct v4l2_async_notifier * notifier,struct v4l2_subdev * sd,struct v4l2_async_match_desc * match) match_fwnode() argument 179 v4l2_async_find_match(struct v4l2_async_notifier * notifier,struct v4l2_subdev * sd) v4l2_async_find_match() argument 245 v4l2_async_nf_find_v4l2_dev(struct v4l2_async_notifier * notifier) v4l2_async_nf_find_v4l2_dev() argument 257 v4l2_async_nf_can_complete(struct v4l2_async_notifier * notifier) v4l2_async_nf_can_complete() argument 281 v4l2_async_nf_try_complete(struct v4l2_async_notifier * notifier) v4l2_async_nf_try_complete() argument 341 v4l2_async_match_notify(struct v4l2_async_notifier * notifier,struct v4l2_device * v4l2_dev,struct v4l2_subdev * sd,struct v4l2_async_connection * asc) v4l2_async_match_notify() argument 421 v4l2_async_nf_try_all_subdevs(struct v4l2_async_notifier * notifier) v4l2_async_nf_try_all_subdevs() argument 460 v4l2_async_unbind_subdev_one(struct v4l2_async_notifier * notifier,struct v4l2_async_connection * asc) v4l2_async_unbind_subdev_one() argument 474 v4l2_async_nf_unbind_all_subdevs(struct v4l2_async_notifier * notifier) v4l2_async_nf_unbind_all_subdevs() argument 494 v4l2_async_nf_has_async_match_entry(struct v4l2_async_notifier * notifier,struct v4l2_async_match_desc * match) v4l2_async_nf_has_async_match_entry() argument 515 v4l2_async_nf_has_async_match(struct v4l2_async_notifier * notifier,struct v4l2_async_match_desc * match) v4l2_async_nf_has_async_match() argument 546 v4l2_async_nf_match_valid(struct v4l2_async_notifier * notifier,struct v4l2_async_match_desc * match) v4l2_async_nf_match_valid() argument 568 v4l2_async_nf_init(struct v4l2_async_notifier * notifier,struct v4l2_device * v4l2_dev) v4l2_async_nf_init() argument 578 v4l2_async_subdev_nf_init(struct v4l2_async_notifier * notifier,struct v4l2_subdev * sd) v4l2_async_subdev_nf_init() argument 588 __v4l2_async_nf_register(struct v4l2_async_notifier * notifier) __v4l2_async_nf_register() argument 628 v4l2_async_nf_register(struct v4l2_async_notifier * notifier) v4l2_async_nf_register() argument 638 __v4l2_async_nf_unregister(struct v4l2_async_notifier * notifier) __v4l2_async_nf_unregister() argument 648 v4l2_async_nf_unregister(struct v4l2_async_notifier * notifier) v4l2_async_nf_unregister() argument 658 __v4l2_async_nf_cleanup(struct v4l2_async_notifier * notifier) __v4l2_async_nf_cleanup() argument 681 v4l2_async_nf_cleanup(struct v4l2_async_notifier * notifier) v4l2_async_nf_cleanup() argument 691 __v4l2_async_nf_add_connection(struct v4l2_async_notifier * notifier,struct v4l2_async_connection * asc) __v4l2_async_nf_add_connection() argument 702 __v4l2_async_nf_add_fwnode(struct v4l2_async_notifier * notifier,struct fwnode_handle * fwnode,unsigned int asc_struct_size) __v4l2_async_nf_add_fwnode() argument 745 __v4l2_async_nf_add_i2c(struct v4l2_async_notifier * notifier,int adapter_id,unsigned short address,unsigned int asc_struct_size) __v4l2_async_nf_add_i2c() argument 796 struct v4l2_async_notifier *notifier; __v4l2_async_register_subdev() local 923 v4l2_async_nf_name(struct v4l2_async_notifier * notifier) v4l2_async_nf_name() argument [all...] |
/linux/drivers/gpu/drm/ |
H A D | drm_gpusvm.c | 37 * list of ranges that fall within the notifier interval. Notifiers are 47 * dynamically allocated on GPU fault and removed on an MMU notifier UNMAP 48 * event. As mentioned above, ranges are tracked in a notifier's Red-Black 53 * range allocation, notifier allocation, and invalidations. 79 * notifier callback. 91 * GPU SVM introduces a global notifier lock, which safeguards the notifier's 98 * global lock to a per-notifier lock if finer-grained locking is deemed 133 * notifier callback. 234 * struct drm_gpusvm_notifier *notifier, 282 drm_gpusvm_range_find(struct drm_gpusvm_notifier * notifier,unsigned long start,unsigned long end) drm_gpusvm_range_find() argument 322 __drm_gpusvm_notifier_next(struct drm_gpusvm_notifier * notifier) __drm_gpusvm_notifier_next() argument 393 struct drm_gpusvm_notifier *notifier = drm_gpusvm_notifier_invalidate() local 510 drm_gpusvm_notifier_insert(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_insert() argument 534 drm_gpusvm_notifier_remove(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_remove() argument 549 struct drm_gpusvm_notifier *notifier, *next; drm_gpusvm_fini() local 582 struct drm_gpusvm_notifier *notifier; drm_gpusvm_notifier_alloc() local 610 drm_gpusvm_notifier_free(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_free() argument 638 drm_gpusvm_range_insert(struct drm_gpusvm_notifier * notifier,struct drm_gpusvm_range * range) drm_gpusvm_range_insert() argument 664 __drm_gpusvm_range_remove(struct drm_gpusvm_notifier * notifier,struct drm_gpusvm_range * range) __drm_gpusvm_range_remove() argument 685 drm_gpusvm_range_alloc(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier,unsigned long fault_addr,unsigned long chunk_size,bool migrate_devmem) drm_gpusvm_range_alloc() argument 724 drm_gpusvm_check_pages(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier,unsigned long start,unsigned long end) drm_gpusvm_check_pages() argument 795 drm_gpusvm_range_chunk_size(struct drm_gpusvm * gpusvm,struct drm_gpusvm_notifier * notifier,struct vm_area_struct * vas,unsigned long fault_addr,unsigned long gpuva_start,unsigned long gpuva_end,unsigned long check_pages_threshold) drm_gpusvm_range_chunk_size() argument 928 struct drm_gpusvm_notifier *notifier; drm_gpusvm_range_find_or_insert() local 1105 struct drm_gpusvm_notifier *notifier; drm_gpusvm_range_remove() local 1247 struct mmu_interval_notifier *notifier = &range->notifier->notifier; drm_gpusvm_range_get_pages() local 1482 struct mmu_interval_notifier *notifier = &range->notifier->notifier; drm_gpusvm_range_evict() local 1538 struct drm_gpusvm_notifier *notifier; drm_gpusvm_has_mapping() local [all...] |
/linux/Documentation/fault-injection/ |
H A D | notifier-error-inject.rst | 5 specified notifier chain callbacks. It is useful to test the error handling of 6 notifier call chain failures which is rarely executed. There are kernel 9 * PM notifier 10 * Memory hotplug notifier 11 * powerpc pSeries reconfig notifier 12 * Netdevice notifier 14 PM notifier error injection module 18 /sys/kernel/debug/notifier-error-inject/pm/actions/<notifier event>/error 20 Possible PM notifier event [all...] |
/linux/include/media/ |
H A D | v4l2-async.h | 69 * @notifier: the async notifier the connection is related to 71 * notifier @waiting_list or @done_list 81 struct v4l2_async_notifier *notifier; member 88 * struct v4l2_async_notifier_operations - Asynchronous V4L2 notifier operations 91 * callback is only executed for the root notifier. 96 int (*bound)(struct v4l2_async_notifier *notifier, 99 int (*complete)(struct v4l2_async_notifier *notifier); 100 void (*unbind)(struct v4l2_async_notifier *notifier, 107 * struct v4l2_async_notifier - v4l2_device notifier dat 195 v4l2_async_nf_add_fwnode(notifier,fwnode,type) global() argument 222 v4l2_async_nf_add_fwnode_remote(notifier,ep,type) global() argument 243 v4l2_async_nf_add_i2c(notifier,adapter,address,type) global() argument [all...] |
/linux/drivers/net/ethernet/mellanox/mlx5/core/sf/ |
H A D | vhca_event.c | 20 struct mlx5_vhca_state_notifier *notifier; member 104 struct mlx5_vhca_state_notifier *notifier = work->notifier; in mlx5_vhca_state_work_handler() local 105 struct mlx5_core_dev *dev = notifier->dev; in mlx5_vhca_state_work_handler() 119 struct mlx5_vhca_state_notifier *notifier = in mlx5_vhca_state_change_notifier() local 129 work->notifier = notifier; in mlx5_vhca_state_change_notifier() 132 mlx5_vhca_events_work_enqueue(notifier->dev, wq_idx, &work->work); in mlx5_vhca_state_change_notifier() 150 struct mlx5_vhca_state_notifier *notifier; in mlx5_vhca_event_init() local 173 notifier in mlx5_vhca_event_init() 224 struct mlx5_vhca_state_notifier *notifier; mlx5_vhca_event_start() local 235 struct mlx5_vhca_state_notifier *notifier; mlx5_vhca_event_stop() local [all...] |
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_hmm.c | 34 * For coherent userptr handling registers an MMU notifier to inform the driver 70 struct amdgpu_bo *bo = container_of(mni, struct amdgpu_bo, notifier); in amdgpu_hmm_invalidate_gfx() 107 struct amdgpu_bo *bo = container_of(mni, struct amdgpu_bo, notifier); in amdgpu_hmm_invalidate_hsa() 122 * amdgpu_hmm_register - register a BO for notifier updates 135 r = mmu_interval_notifier_insert(&bo->notifier, current->mm, in amdgpu_hmm_register() 139 r = mmu_interval_notifier_insert(&bo->notifier, current->mm, addr, in amdgpu_hmm_register() 145 * mmu_interval_notifier_remove() when the notifier isn't properly in amdgpu_hmm_register() 148 bo->notifier.mm = NULL; in amdgpu_hmm_register() 154 * amdgpu_hmm_unregister - unregister a BO for notifier updates 158 * Remove any registration of mmu notifier update 168 amdgpu_hmm_range_get_pages(struct mmu_interval_notifier * notifier,uint64_t start,uint64_t npages,bool readonly,void * owner,struct page ** pages,struct hmm_range ** phmm_range) amdgpu_hmm_range_get_pages() argument [all...] |
/linux/include/drm/ |
H A D | drm_gpusvm.h | 31 * @notifier_alloc: Allocate a GPU SVM notifier (optional) 33 * Allocate a GPU SVM notifier. 35 * Return: Pointer to the allocated GPU SVM notifier on success, NULL on failure. 40 * @notifier_free: Free a GPU SVM notifier (optional) 41 * @notifier: Pointer to the GPU SVM notifier to be freed 43 * Free a GPU SVM notifier. 45 void (*notifier_free)(struct drm_gpusvm_notifier *notifier); 66 * @invalidate: Invalidate GPU SVM notifier (required) 68 * @notifier 98 struct mmu_interval_notifier notifier; global() member 152 struct drm_gpusvm_notifier *notifier; global() member 373 drm_gpusvm_notifier_start(struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_start() argument 385 drm_gpusvm_notifier_end(struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_end() argument 397 drm_gpusvm_notifier_size(struct drm_gpusvm_notifier * notifier) drm_gpusvm_notifier_size() argument [all...] |
/linux/drivers/staging/media/imx/ |
H A D | imx-media-dev.c | 17 return container_of(n, struct imx_media_dev, notifier); in notifier2dev() 20 /* async subdev bound notifier */ 21 static int imx_media_subdev_bound(struct v4l2_async_notifier *notifier, in imx_media_subdev_bound() argument 25 struct imx_media_dev *imxmd = notifier2dev(notifier); in imx_media_subdev_bound() 40 /* async subdev complete notifier */ 41 static int imx6_media_probe_complete(struct v4l2_async_notifier *notifier) in imx6_media_probe_complete() argument 43 struct imx_media_dev *imxmd = notifier2dev(notifier); in imx6_media_probe_complete() 47 ret = imx_media_probe_complete(notifier); in imx6_media_probe_complete() 66 /* async subdev complete notifier */ 97 v4l2_async_nf_cleanup(&imxmd->notifier); in imx_media_probe() [all...] |
/linux/drivers/gpu/drm/radeon/ |
H A D | radeon_mn.c | 42 * @mn: our notifier 53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); in radeon_mn_invalidate() 88 * radeon_mn_register - register a BO for notifier updates 93 * Registers an MMU notifier for the given BO at the specified address. 100 ret = mmu_interval_notifier_insert(&bo->notifier, current->mm, addr, in radeon_mn_register() 111 mmu_interval_read_begin(&bo->notifier); in radeon_mn_register() 116 * radeon_mn_unregister - unregister a BO for notifier updates 120 * Remove any registration of MMU notifier updates from the buffer object. 124 if (!bo->notifier.mm) in radeon_mn_unregister() 126 mmu_interval_notifier_remove(&bo->notifier); in radeon_mn_unregister() [all...] |
/linux/drivers/media/platform/renesas/rzg2l-cru/ |
H A D | rzg2l-core.c | 29 return container_of(n, struct rzg2l_cru_dev, notifier); in notifier_to_cru() 37 * Group async notifier 40 static int rzg2l_cru_group_notify_complete(struct v4l2_async_notifier *notifier) in rzg2l_cru_group_notify_complete() argument 42 struct rzg2l_cru_dev *cru = notifier_to_cru(notifier); in rzg2l_cru_group_notify_complete() 93 static void rzg2l_cru_group_notify_unbind(struct v4l2_async_notifier *notifier, in rzg2l_cru_group_notify_unbind() argument 97 struct rzg2l_cru_dev *cru = notifier_to_cru(notifier); in rzg2l_cru_group_notify_unbind() 111 static int rzg2l_cru_group_notify_bound(struct v4l2_async_notifier *notifier, in rzg2l_cru_group_notify_bound() argument 115 struct rzg2l_cru_dev *cru = notifier_to_cru(notifier); in rzg2l_cru_group_notify_bound() 164 asd = v4l2_async_nf_add_fwnode(&cru->notifier, fwnode, in rzg2l_cru_mc_parse_of() 185 v4l2_async_nf_init(&cru->notifier, in rzg2l_cru_mc_parse_of_graph() [all...] |
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_svm.c | 256 container_of(mn, struct nouveau_svmm, notifier); in nouveau_svmm_invalidate_range_start() 294 kfree(container_of(mn, struct nouveau_svmm, notifier)); in nouveau_svmm_free_notifier() 310 mmu_notifier_put(&svmm->notifier); in nouveau_svmm_fini() 359 svmm->notifier.ops = &nouveau_mn_ops; in nouveau_svmm_init() 360 ret = __mmu_notifier_register(&svmm->notifier, current->mm); in nouveau_svmm_init() 502 struct mmu_interval_notifier notifier; member 511 container_of(mni, struct svm_notifier, notifier); in nouveau_svm_range_invalidate() 521 * notifier that does invalidation is always called after the range in nouveau_svm_range_invalidate() 522 * notifier. in nouveau_svm_range_invalidate() 588 struct svm_notifier *notifier) in nouveau_atomic_range_fault() argument 656 nouveau_range_fault(struct nouveau_svmm * svmm,struct nouveau_drm * drm,struct nouveau_pfnmap_args * args,u32 size,unsigned long hmm_flags,struct svm_notifier * notifier) nouveau_range_fault() argument 779 struct svm_notifier notifier; nouveau_svm_fault() local [all...] |
/linux/drivers/gpu/drm/xe/ |
H A D | xe_hmm.c | 34 if (mmu_interval_read_retry(range->notifier, range->notifier_seq)) { in xe_alloc_sg() 66 * @notifier_sem: The xe notifier lock. 203 * This function also read mmu notifier sequence # ( 230 .notifier = &uvma->userptr.notifier, in xe_hmm_userptr_populate_range() 241 mmap_assert_locked(userptr->notifier.mm); in xe_hmm_userptr_populate_range() 246 notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 261 if (!mmget_not_zero(userptr->notifier.mm)) { in xe_hmm_userptr_populate_range() 269 hmm_range.notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 272 mmap_read_lock(userptr->notifier in xe_hmm_userptr_populate_range() [all...] |
/linux/drivers/md/dm-vdo/ |
H A D | vdo.c | 1042 struct read_only_notifier *notifier = &vdo->read_only_notifier; in vdo_enable_read_only_entry() local 1045 notifier->read_only_error = VDO_READ_ONLY; in vdo_enable_read_only_entry() 1046 notifier->state = NOTIFIED; in vdo_enable_read_only_entry() 1048 notifier->state = MAY_NOT_NOTIFY; in vdo_enable_read_only_entry() 1051 spin_lock_init(¬ifier->lock); in vdo_enable_read_only_entry() 1052 vdo_initialize_completion(¬ifier->completion, vdo, in vdo_enable_read_only_entry() 1073 struct read_only_notifier *notifier = &vdo->read_only_notifier; in vdo_wait_until_not_entering_read_only_mode() local 1077 if (notifier->waiter != NULL) { in vdo_wait_until_not_entering_read_only_mode() 1082 spin_lock(¬ifier->lock); in vdo_wait_until_not_entering_read_only_mode() 1083 if (notifier in vdo_wait_until_not_entering_read_only_mode() 1117 struct read_only_notifier *notifier = as_notifier(completion); finish_entering_read_only_mode() local 1138 struct read_only_notifier *notifier = as_notifier(completion); make_thread_read_only() local 1201 struct read_only_notifier *notifier = &vdo->read_only_notifier; vdo_allow_read_only_mode_entry() local 1243 struct read_only_notifier *notifier = &vdo->read_only_notifier; vdo_enter_read_only_mode() local [all...] |
/linux/drivers/media/platform/amlogic/c3/isp/ |
H A D | c3-isp-dev.c | 92 /* Subdev notifier register */ 93 static int c3_isp_notify_bound(struct v4l2_async_notifier *notifier, in c3_isp_notify_bound() argument 98 container_of(notifier, struct c3_isp_device, notifier); in c3_isp_notify_bound() 106 static int c3_isp_notify_complete(struct v4l2_async_notifier *notifier) in c3_isp_notify_complete() argument 109 container_of(notifier, struct c3_isp_device, notifier); in c3_isp_notify_complete() 125 v4l2_async_nf_init(&isp->notifier, &isp->v4l2_dev); in c3_isp_async_nf_register() 132 asc = v4l2_async_nf_add_fwnode_remote(&isp->notifier, ep, in c3_isp_async_nf_register() 139 isp->notifier in c3_isp_async_nf_register() [all...] |
/linux/include/linux/ |
H A D | preempt.h | 327 * notifier: struct preempt_notifier for the task being scheduled 330 * notifier: struct preempt_notifier for the task being preempted 339 void (*sched_in)(struct preempt_notifier *notifier, int cpu); 340 void (*sched_out)(struct preempt_notifier *notifier, 347 * @ops: defines the notifier functions to be called 358 void preempt_notifier_register(struct preempt_notifier *notifier); 359 void preempt_notifier_unregister(struct preempt_notifier *notifier); 361 static inline void preempt_notifier_init(struct preempt_notifier *notifier, in preempt_notifier_init() argument 365 notifier->link.next = NULL; in preempt_notifier_init() 366 notifier in preempt_notifier_init() [all...] |
/linux/drivers/media/platform/renesas/rcar-isp/ |
H A D | csisp.c | 170 struct v4l2_async_notifier notifier; member 184 return container_of(n, struct rcar_isp, notifier); in notifier_to_isp() 372 static int risp_notify_bound(struct v4l2_async_notifier *notifier, in risp_notify_bound() argument 376 struct rcar_isp *isp = notifier_to_isp(notifier); in risp_notify_bound() 397 static void risp_notify_unbind(struct v4l2_async_notifier *notifier, in risp_notify_unbind() argument 401 struct rcar_isp *isp = notifier_to_isp(notifier); in risp_notify_unbind() 441 v4l2_async_subdev_nf_init(&isp->notifier, &isp->subdev); in risp_parse_dt() 442 isp->notifier.ops = &risp_notify_ops; in risp_parse_dt() 444 asd = v4l2_async_nf_add_fwnode(&isp->notifier, fwnode, in risp_parse_dt() 450 ret = v4l2_async_nf_register(&isp->notifier); in risp_parse_dt() [all...] |
/linux/drivers/pwm/ |
H A D | pwm-iqs620a.c | 22 #include <linux/notifier.h> 38 struct notifier_block notifier; member 139 static int iqs620_pwm_notifier(struct notifier_block *notifier, in iqs620_pwm_notifier() argument 148 iqs620_pwm = container_of(notifier, struct iqs620_pwm_private, in iqs620_pwm_notifier() 149 notifier); in iqs620_pwm_notifier() 182 &iqs620_pwm->notifier); in iqs620_pwm_notifier_unregister() 185 "Failed to unregister notifier: %d\n", ret); in iqs620_pwm_notifier_unregister() 220 iqs620_pwm->notifier.notifier_call = iqs620_pwm_notifier; in iqs620_pwm_probe() 222 &iqs620_pwm->notifier); in iqs620_pwm_probe() 224 dev_err(&pdev->dev, "Failed to register notifier in iqs620_pwm_probe() [all...] |
/linux/drivers/edac/ |
H A D | sifive_edac.c | 18 struct notifier_block notifier; member 33 p = container_of(this, struct sifive_edac_priv, notifier); in ecc_err_event() 51 p->notifier.notifier_call = ecc_err_event; in ecc_register() 69 register_sifive_ccache_error_notifier(&p->notifier); in ecc_register() 83 unregister_sifive_ccache_error_notifier(&p->notifier); in ecc_unregister()
|
H A D | octeon_edac-pc.c | 29 struct notifier_block notifier; member 42 notifier); in co_cache_error_event() 91 p->notifier.notifier_call = co_cache_error_event; in co_cache_error_probe() 112 register_co_cache_error_notifier(&p->notifier); in co_cache_error_probe() 126 unregister_co_cache_error_notifier(&p->notifier); in co_cache_error_remove()
|
/linux/drivers/media/platform/ |
H A D | video-mux.c | 25 struct v4l2_async_notifier notifier; member 42 return container_of(n, struct video_mux, notifier); in notifier_to_video_mux() 318 static int video_mux_notify_bound(struct v4l2_async_notifier *notifier, in video_mux_notify_bound() argument 322 struct video_mux *vmux = notifier_to_video_mux(notifier); in video_mux_notify_bound() 337 v4l2_async_subdev_nf_init(&vmux->notifier, &vmux->subdev); in video_mux_async_register() 357 asd = v4l2_async_nf_add_fwnode_remote(&vmux->notifier, ep, in video_mux_async_register() 370 vmux->notifier.ops = &video_mux_notify_ops; in video_mux_async_register() 372 ret = v4l2_async_nf_register(&vmux->notifier); in video_mux_async_register() 383 v4l2_async_nf_unregister(&vmux->notifier); in video_mux_async_register() 385 v4l2_async_nf_cleanup(&vmux->notifier); in video_mux_async_register() [all...] |
/linux/drivers/gpu/drm/display/ |
H A D | drm_hdmi_cec_notifier_helper.c | 15 #include <media/cec-notifier.h> 46 struct cec_notifier *notifier; in drmm_connector_hdmi_cec_notifier_register() local 50 notifier = cec_notifier_conn_register(dev, port_name, &conn_info); in drmm_connector_hdmi_cec_notifier_register() 51 if (!notifier) in drmm_connector_hdmi_cec_notifier_register() 56 connector->cec.data = notifier; in drmm_connector_hdmi_cec_notifier_register()
|
/linux/drivers/media/platform/st/stm32/stm32-dcmipp/ |
H A D | dcmipp-core.c | 58 struct v4l2_async_notifier notifier; member 64 return container_of(n, struct dcmipp_device, notifier); in notifier_to_dcmipp() 298 static int dcmipp_graph_notify_bound(struct v4l2_async_notifier *notifier, in dcmipp_graph_notify_bound() argument 302 struct dcmipp_device *dcmipp = notifier_to_dcmipp(notifier); in dcmipp_graph_notify_bound() 386 static void dcmipp_graph_notify_unbind(struct v4l2_async_notifier *notifier, in dcmipp_graph_notify_unbind() argument 390 struct dcmipp_device *dcmipp = notifier_to_dcmipp(notifier); in dcmipp_graph_notify_unbind() 395 static int dcmipp_graph_notify_complete(struct v4l2_async_notifier *notifier) in dcmipp_graph_notify_complete() argument 397 struct dcmipp_device *dcmipp = notifier_to_dcmipp(notifier); in dcmipp_graph_notify_complete() 442 v4l2_async_nf_init(&dcmipp->notifier, &dcmipp->v4l2_dev); in dcmipp_graph_init() 444 asd = v4l2_async_nf_add_fwnode_remote(&dcmipp->notifier, e in dcmipp_graph_init() [all...] |
/linux/drivers/iommu/intel/ |
H A D | svm.c | 55 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_arch_invalidate_secondary_tlbs() 72 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_mm_release() 101 struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier); in intel_mm_free_notifier() 194 mmu_notifier_put(&dmar_domain->notifier); in intel_svm_domain_free() 222 domain->notifier.ops = &intel_mmuops; in intel_svm_domain_alloc() 223 ret = mmu_notifier_register(&domain->notifier, mm); in intel_svm_domain_alloc()
|
/linux/drivers/media/platform/nxp/imx8-isi/ |
H A D | imx8-isi-core.c | 47 return container_of(n, struct mxc_isi_dev, notifier); in notifier_to_mxc_isi_dev() 50 static int mxc_isi_async_notifier_bound(struct v4l2_async_notifier *notifier, in mxc_isi_async_notifier_bound() argument 56 struct mxc_isi_dev *isi = notifier_to_mxc_isi_dev(notifier); in mxc_isi_async_notifier_bound() 79 static int mxc_isi_async_notifier_complete(struct v4l2_async_notifier *notifier) in mxc_isi_async_notifier_complete() argument 81 struct mxc_isi_dev *isi = notifier_to_mxc_isi_dev(notifier); in mxc_isi_async_notifier_complete() 178 /* Initialize, fill and register the async notifier. */ in mxc_isi_v4l2_init() 179 v4l2_async_nf_init(&isi->notifier, v4l2_dev); in mxc_isi_v4l2_init() 180 isi->notifier.ops = &mxc_isi_async_notifier_ops; in mxc_isi_v4l2_init() 192 masd = v4l2_async_nf_add_fwnode_remote(&isi->notifier, ep, in mxc_isi_v4l2_init() 204 ret = v4l2_async_nf_register(&isi->notifier); in mxc_isi_v4l2_init() [all...] |
/linux/drivers/media/pci/intel/ivsc/ |
H A D | mei_csi.c | 123 struct v4l2_async_notifier notifier; member 149 return container_of(n, struct mei_csi, notifier); in notifier_to_csi() 511 static int mei_csi_notify_bound(struct v4l2_async_notifier *notifier, in mei_csi_notify_bound() argument 515 struct mei_csi *csi = notifier_to_csi(notifier); in mei_csi_notify_bound() 531 static void mei_csi_notify_unbind(struct v4l2_async_notifier *notifier, in mei_csi_notify_unbind() argument 535 struct mei_csi *csi = notifier_to_csi(notifier); in mei_csi_notify_unbind() 586 v4l2_async_subdev_nf_init(&csi->notifier, &csi->subdev); in mei_csi_parse_firmware() 587 csi->notifier.ops = &mei_csi_notify_ops; in mei_csi_parse_firmware() 619 asd = v4l2_async_nf_add_fwnode_remote(&csi->notifier, sink_ep, in mei_csi_parse_firmware() 626 ret = v4l2_async_nf_register(&csi->notifier); in mei_csi_parse_firmware() [all...] |