Home
last modified time | relevance | path

Searched full:ctrl (Results 1 – 25 of 2729) sorted by relevance

12345678910>>...110

/linux/drivers/pci/hotplug/
H A Dpciehp_ctrl.c33 static void set_slot_off(struct controller *ctrl) in set_slot_off() argument
39 if (POWER_CTRL(ctrl)) { in set_slot_off()
40 pciehp_power_off_slot(ctrl); in set_slot_off()
50 pciehp_set_indicators(ctrl, PCI_EXP_SLTCTL_PWR_IND_OFF, in set_slot_off()
56 * @ctrl: PCIe hotplug controller where board is added
61 static int board_added(struct controller *ctrl) in board_added() argument
64 struct pci_bus *parent = ctrl->pcie->port->subordinate; in board_added()
66 if (POWER_CTRL(ctrl)) { in board_added()
68 retval = pciehp_power_on_slot(ctrl); in board_added()
73 pciehp_set_indicators(ctrl, PCI_EXP_SLTCTL_PWR_IND_BLINK, in board_added()
[all …]
H A Dpciehp_hpc.c49 static inline struct pci_dev *ctrl_dev(struct controller *ctrl) in ctrl_dev() argument
51 return ctrl->pcie->port; in ctrl_dev()
58 static inline int pciehp_request_irq(struct controller *ctrl) in pciehp_request_irq() argument
60 int retval, irq = ctrl->pcie->irq; in pciehp_request_irq()
63 ctrl->poll_thread = kthread_run(&pciehp_poll, ctrl, in pciehp_request_irq()
65 slot_name(ctrl)); in pciehp_request_irq()
66 return PTR_ERR_OR_ZERO(ctrl->poll_thread); in pciehp_request_irq()
71 IRQF_SHARED, "pciehp", ctrl); in pciehp_request_irq()
73 ctrl_err(ctrl, "Cannot get irq %d for the hotplug controller\n", in pciehp_request_irq()
78 static inline void pciehp_free_irq(struct controller *ctrl) in pciehp_free_irq() argument
[all …]
H A Dshpchp_hpc.c169 static void start_int_poll_timer(struct controller *ctrl, int sec);
171 static inline u8 shpc_readb(struct controller *ctrl, int reg) in shpc_readb() argument
173 return readb(ctrl->creg + reg); in shpc_readb()
176 static inline u16 shpc_readw(struct controller *ctrl, int reg) in shpc_readw() argument
178 return readw(ctrl->creg + reg); in shpc_readw()
181 static inline void shpc_writew(struct controller *ctrl, int reg, u16 val) in shpc_writew() argument
183 writew(val, ctrl->creg + reg); in shpc_writew()
186 static inline u32 shpc_readl(struct controller *ctrl, int reg) in shpc_readl() argument
188 return readl(ctrl->creg + reg); in shpc_readl()
191 static inline void shpc_writel(struct controller *ctrl, int reg, u32 val) in shpc_writel() argument
[all …]
H A Dpciehp_core.c51 static int init_slot(struct controller *ctrl) in init_slot() argument
67 if (MRL_SENS(ctrl)) in init_slot()
69 if (ATTN_LED(ctrl)) { in init_slot()
72 } else if (ctrl->pcie->port->hotplug_user_indicators) { in init_slot()
78 ctrl->hotplug_slot.ops = ops; in init_slot()
79 snprintf(name, SLOT_NAME_SIZE, "%u", PSN(ctrl)); in init_slot()
81 retval = pci_hp_initialize(&ctrl->hotplug_slot, in init_slot()
82 ctrl->pcie->port->subordinate, 0, name); in init_slot()
84 ctrl_err(ctrl, "pci_hp_initialize failed: error %d\n", retval); in init_slot()
90 static void cleanup_slot(struct controller *ctrl) in cleanup_slot() argument
[all …]
H A Dcpqphp_core.c116 * @ctrl: controller to use
120 static int init_SERR(struct controller *ctrl) in init_SERR() argument
125 if (!ctrl) in init_SERR()
128 tempdword = ctrl->first_slot; in init_SERR()
130 number_of_slots = readb(ctrl->hpc_reg + SLOT_MASK) & 0x0F; in init_SERR()
133 writeb(0, ctrl->hpc_reg + SLOT_SERR); in init_SERR()
266 static int ctrl_slot_cleanup(struct controller *ctrl) in ctrl_slot_cleanup() argument
270 old_slot = ctrl->slot; in ctrl_slot_cleanup()
271 ctrl->slot = NULL; in ctrl_slot_cleanup()
280 cpqhp_remove_debugfs_files(ctrl); in ctrl_slot_cleanup()
[all …]
/linux/drivers/nvme/target/
H A Dloop.c36 struct nvme_ctrl ctrl; member
44 static inline struct nvme_loop_ctrl *to_loop_ctrl(struct nvme_ctrl *ctrl) in to_loop_ctrl() argument
46 return container_of(ctrl, struct nvme_loop_ctrl, ctrl); in to_loop_ctrl()
56 struct nvme_loop_ctrl *ctrl; member
67 static void nvme_loop_delete_ctrl(struct nvmet_ctrl *ctrl);
73 return queue - queue->ctrl->queues; in nvme_loop_queue_idx()
89 return queue->ctrl->admin_tag_set.tags[queue_idx]; in nvme_loop_tagset()
90 return queue->ctrl->tag_set.tags[queue_idx - 1]; in nvme_loop_tagset()
107 nvme_complete_async_event(&queue->ctrl->ctrl, cqe->status, in nvme_loop_queue_response()
114 dev_err(queue->ctrl->ctrl.device, in nvme_loop_queue_response()
[all …]
H A Dauth.c74 int nvmet_setup_dhgroup(struct nvmet_ctrl *ctrl, u8 dhgroup_id) in nvmet_setup_dhgroup() argument
79 pr_debug("%s: ctrl %d selecting dhgroup %d\n", in nvmet_setup_dhgroup()
80 __func__, ctrl->cntlid, dhgroup_id); in nvmet_setup_dhgroup()
82 if (ctrl->dh_tfm) { in nvmet_setup_dhgroup()
83 if (ctrl->dh_gid == dhgroup_id) { in nvmet_setup_dhgroup()
84 pr_debug("%s: ctrl %d reuse existing DH group %d\n", in nvmet_setup_dhgroup()
85 __func__, ctrl->cntlid, dhgroup_id); in nvmet_setup_dhgroup()
88 crypto_free_kpp(ctrl->dh_tfm); in nvmet_setup_dhgroup()
89 ctrl->dh_tfm = NULL; in nvmet_setup_dhgroup()
90 ctrl->dh_gid = 0; in nvmet_setup_dhgroup()
[all …]
H A Dcore.c134 static void nvmet_async_events_failall(struct nvmet_ctrl *ctrl) in nvmet_async_events_failall() argument
138 mutex_lock(&ctrl->lock); in nvmet_async_events_failall()
139 while (ctrl->nr_async_event_cmds) { in nvmet_async_events_failall()
140 req = ctrl->async_event_cmds[--ctrl->nr_async_event_cmds]; in nvmet_async_events_failall()
141 mutex_unlock(&ctrl->lock); in nvmet_async_events_failall()
143 mutex_lock(&ctrl->lock); in nvmet_async_events_failall()
145 mutex_unlock(&ctrl->lock); in nvmet_async_events_failall()
148 static void nvmet_async_events_process(struct nvmet_ctrl *ctrl) in nvmet_async_events_process() argument
153 mutex_lock(&ctrl->lock); in nvmet_async_events_process()
154 while (ctrl->nr_async_event_cmds && !list_empty(&ctrl->async_events)) { in nvmet_async_events_process()
[all …]
H A Dpci-epf.c84 struct nvmet_pci_epf_ctrl *ctrl; member
129 struct nvmet_pci_epf_ctrl *ctrl; member
209 struct nvmet_pci_epf_ctrl ctrl; member
226 static inline u32 nvmet_pci_epf_bar_read32(struct nvmet_pci_epf_ctrl *ctrl, in nvmet_pci_epf_bar_read32() argument
229 __le32 *bar_reg = ctrl->bar + off; in nvmet_pci_epf_bar_read32()
234 static inline void nvmet_pci_epf_bar_write32(struct nvmet_pci_epf_ctrl *ctrl, in nvmet_pci_epf_bar_write32() argument
237 __le32 *bar_reg = ctrl->bar + off; in nvmet_pci_epf_bar_write32()
242 static inline u64 nvmet_pci_epf_bar_read64(struct nvmet_pci_epf_ctrl *ctrl, in nvmet_pci_epf_bar_read64() argument
245 return (u64)nvmet_pci_epf_bar_read32(ctrl, off) | in nvmet_pci_epf_bar_read64()
246 ((u64)nvmet_pci_epf_bar_read32(ctrl, off + 4) << 32); in nvmet_pci_epf_bar_read64()
[all …]
/linux/drivers/soundwire/
H A Dqcom.c208 int (*reg_read)(struct qcom_swrm_ctrl *ctrl, int reg, u32 *val);
209 int (*reg_write)(struct qcom_swrm_ctrl *ctrl, int reg, int val);
280 static int qcom_swrm_ahb_reg_read(struct qcom_swrm_ctrl *ctrl, int reg, in qcom_swrm_ahb_reg_read() argument
283 struct regmap *wcd_regmap = ctrl->regmap; in qcom_swrm_ahb_reg_read()
300 static int qcom_swrm_ahb_reg_write(struct qcom_swrm_ctrl *ctrl, in qcom_swrm_ahb_reg_write() argument
303 struct regmap *wcd_regmap = ctrl->regmap; in qcom_swrm_ahb_reg_write()
320 static int qcom_swrm_cpu_reg_read(struct qcom_swrm_ctrl *ctrl, int reg, in qcom_swrm_cpu_reg_read() argument
323 *val = readl(ctrl->mmio + reg); in qcom_swrm_cpu_reg_read()
327 static int qcom_swrm_cpu_reg_write(struct qcom_swrm_ctrl *ctrl, int reg, in qcom_swrm_cpu_reg_write() argument
330 writel(val, ctrl->mmio + reg); in qcom_swrm_cpu_reg_write()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r570/
H A Ddisp.c36 NV2080_CTRL_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER_PARAMS *ctrl; in r570_disp_chan_set_pushbuf() local
38 ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice, in r570_disp_chan_set_pushbuf()
40 sizeof(*ctrl)); in r570_disp_chan_set_pushbuf()
41 if (IS_ERR(ctrl)) in r570_disp_chan_set_pushbuf()
42 return PTR_ERR(ctrl); in r570_disp_chan_set_pushbuf()
47 ctrl->addressSpace = ADDR_SYSMEM; in r570_disp_chan_set_pushbuf()
48 ctrl->cacheSnoop = 0; in r570_disp_chan_set_pushbuf()
49 ctrl->pbTargetAperture = PHYS_PCI; in r570_disp_chan_set_pushbuf()
52 ctrl->addressSpace = ADDR_SYSMEM; in r570_disp_chan_set_pushbuf()
53 ctrl->cacheSnoop = 1; in r570_disp_chan_set_pushbuf()
[all …]
/linux/drivers/slimbus/
H A Dqcom-ctrl.c103 struct slim_controller ctrl; member
120 static void qcom_slim_queue_tx(struct qcom_slim_ctrl *ctrl, void *buf, in qcom_slim_queue_tx() argument
125 __iowrite32_copy(ctrl->base + tx_reg, buf, count); in qcom_slim_queue_tx()
131 static void *slim_alloc_rxbuf(struct qcom_slim_ctrl *ctrl) in slim_alloc_rxbuf() argument
136 spin_lock_irqsave(&ctrl->rx.lock, flags); in slim_alloc_rxbuf()
137 if ((ctrl->rx.tail + 1) % ctrl->rx.n == ctrl->rx.head) { in slim_alloc_rxbuf()
138 spin_unlock_irqrestore(&ctrl->rx.lock, flags); in slim_alloc_rxbuf()
139 dev_err(ctrl->dev, "RX QUEUE full!"); in slim_alloc_rxbuf()
142 idx = ctrl->rx.tail; in slim_alloc_rxbuf()
143 ctrl->rx.tail = (ctrl->rx.tail + 1) % ctrl->rx.n; in slim_alloc_rxbuf()
[all …]
H A Dqcom-ngd-ctrl.c135 struct qcom_slim_ngd_ctrl *ctrl; member
150 struct slim_controller ctrl; member
347 static int qcom_slim_qmi_send_select_inst_req(struct qcom_slim_ngd_ctrl *ctrl, in qcom_slim_qmi_send_select_inst_req() argument
354 rc = qmi_txn_init(ctrl->qmi.handle, &txn, in qcom_slim_qmi_send_select_inst_req()
357 dev_err(ctrl->dev, "QMI TXN init fail: %d\n", rc); in qcom_slim_qmi_send_select_inst_req()
361 rc = qmi_send_request(ctrl->qmi.handle, NULL, &txn, in qcom_slim_qmi_send_select_inst_req()
366 dev_err(ctrl->dev, "QMI send req fail %d\n", rc); in qcom_slim_qmi_send_select_inst_req()
373 dev_err(ctrl->dev, "QMI TXN wait fail: %d\n", rc); in qcom_slim_qmi_send_select_inst_req()
378 dev_err(ctrl->dev, "QMI request failed 0x%x\n", in qcom_slim_qmi_send_select_inst_req()
400 static int qcom_slim_qmi_send_power_request(struct qcom_slim_ngd_ctrl *ctrl, in qcom_slim_qmi_send_power_request() argument
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/
H A Ddisp.c76 NV2080_CTRL_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER_PARAMS *ctrl; in r535_disp_chan_set_pushbuf() local
78 ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice, in r535_disp_chan_set_pushbuf()
80 sizeof(*ctrl)); in r535_disp_chan_set_pushbuf()
81 if (IS_ERR(ctrl)) in r535_disp_chan_set_pushbuf()
82 return PTR_ERR(ctrl); in r535_disp_chan_set_pushbuf()
87 ctrl->addressSpace = ADDR_SYSMEM; in r535_disp_chan_set_pushbuf()
88 ctrl->cacheSnoop = 0; in r535_disp_chan_set_pushbuf()
91 ctrl->addressSpace = ADDR_SYSMEM; in r535_disp_chan_set_pushbuf()
92 ctrl->cacheSnoop = 1; in r535_disp_chan_set_pushbuf()
95 ctrl->addressSpace = ADDR_FBMEM; in r535_disp_chan_set_pushbuf()
[all …]
/linux/drivers/gpu/drm/msm/dp/
H A Ddp_ctrl.c145 static inline u32 msm_dp_read_ahb(const struct msm_dp_ctrl_private *ctrl, u32 offset) in msm_dp_read_ahb() argument
147 return readl_relaxed(ctrl->ahb_base + offset); in msm_dp_read_ahb()
150 static inline void msm_dp_write_ahb(struct msm_dp_ctrl_private *ctrl, in msm_dp_write_ahb() argument
157 writel(data, ctrl->ahb_base + offset); in msm_dp_write_ahb()
160 static inline u32 msm_dp_read_link(struct msm_dp_ctrl_private *ctrl, u32 offset) in msm_dp_read_link() argument
162 return readl_relaxed(ctrl->link_base + offset); in msm_dp_read_link()
165 static inline void msm_dp_write_link(struct msm_dp_ctrl_private *ctrl, in msm_dp_write_link() argument
172 writel(data, ctrl->link_base + offset); in msm_dp_write_link()
199 struct msm_dp_ctrl_private *ctrl = in msm_dp_ctrl_reset() local
203 sw_reset = msm_dp_read_ahb(ctrl, REG_DP_SW_RESET); in msm_dp_ctrl_reset()
[all …]
/linux/drivers/nvme/host/
H A Drdma.c89 struct nvme_rdma_ctrl *ctrl; member
125 struct nvme_ctrl ctrl; member
130 static inline struct nvme_rdma_ctrl *to_rdma_ctrl(struct nvme_ctrl *ctrl) in to_rdma_ctrl() argument
132 return container_of(ctrl, struct nvme_rdma_ctrl, ctrl); in to_rdma_ctrl()
161 return queue - queue->ctrl->queues; in nvme_rdma_queue_idx()
167 queue->ctrl->io_queues[HCTX_TYPE_DEFAULT] + in nvme_rdma_poll_queue()
168 queue->ctrl->io_queues[HCTX_TYPE_READ]; in nvme_rdma_poll_queue()
297 struct nvme_rdma_ctrl *ctrl = to_rdma_ctrl(set->driver_data); in nvme_rdma_init_request() local
299 int queue_idx = (set == &ctrl->tag_set) ? hctx_idx + 1 : 0; in nvme_rdma_init_request()
300 struct nvme_rdma_queue *queue = &ctrl->queues[queue_idx]; in nvme_rdma_init_request()
[all …]
H A Dfc.c35 struct nvme_fc_ctrl *ctrl; member
99 struct nvme_fc_ctrl *ctrl; member
179 struct nvme_ctrl ctrl; member
183 to_fc_ctrl(struct nvme_ctrl *ctrl) in to_fc_ctrl() argument
185 return container_of(ctrl, struct nvme_fc_ctrl, ctrl); in to_fc_ctrl()
550 nvme_fc_resume_controller(struct nvme_fc_ctrl *ctrl) in nvme_fc_resume_controller() argument
552 switch (nvme_ctrl_state(&ctrl->ctrl)) { in nvme_fc_resume_controller()
559 dev_info(ctrl->ctrl.device, in nvme_fc_resume_controller()
561 "Attempting reconnect\n", ctrl->cnum); in nvme_fc_resume_controller()
563 queue_delayed_work(nvme_wq, &ctrl->connect_work, 0); in nvme_fc_resume_controller()
[all …]
H A Dcore.c151 static void nvme_remove_invalid_namespaces(struct nvme_ctrl *ctrl,
153 static void nvme_update_keep_alive(struct nvme_ctrl *ctrl,
155 static int nvme_get_log_lsi(struct nvme_ctrl *ctrl, u32 nsid, u8 log_page,
158 void nvme_queue_scan(struct nvme_ctrl *ctrl) in nvme_queue_scan() argument
163 if (nvme_ctrl_state(ctrl) == NVME_CTRL_LIVE && ctrl->tagset) in nvme_queue_scan()
164 queue_work(nvme_wq, &ctrl->scan_work); in nvme_queue_scan()
173 int nvme_try_sched_reset(struct nvme_ctrl *ctrl) in nvme_try_sched_reset() argument
175 if (nvme_ctrl_state(ctrl) != NVME_CTRL_RESETTING) in nvme_try_sched_reset()
177 if (!queue_work(nvme_reset_wq, &ctrl->reset_work)) in nvme_try_sched_reset()
185 struct nvme_ctrl *ctrl = container_of(to_delayed_work(work), in nvme_failfast_work() local
[all …]
H A Dtcp.c164 struct nvme_tcp_ctrl *ctrl; member
194 struct nvme_ctrl ctrl; member
209 static inline struct nvme_tcp_ctrl *to_tcp_ctrl(struct nvme_ctrl *ctrl) in to_tcp_ctrl() argument
211 return container_of(ctrl, struct nvme_tcp_ctrl, ctrl); in to_tcp_ctrl()
216 return queue - queue->ctrl->queues; in nvme_tcp_queue_id()
246 static inline bool nvme_tcp_tls_configured(struct nvme_ctrl *ctrl) in nvme_tcp_tls_configured() argument
251 return ctrl->opts->tls || ctrl->opts->concat; in nvme_tcp_tls_configured()
259 return queue->ctrl->admin_tag_set.tags[queue_idx]; in nvme_tcp_tagset()
260 return queue->ctrl->tag_set.tags[queue_idx - 1]; in nvme_tcp_tagset()
294 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req()
[all …]
H A Dauth.c24 struct nvme_ctrl *ctrl; member
53 static inline int ctrl_max_dhchaps(struct nvme_ctrl *ctrl) in ctrl_max_dhchaps() argument
55 return ctrl->opts->nr_io_queues + ctrl->opts->nr_write_queues + in ctrl_max_dhchaps()
56 ctrl->opts->nr_poll_queues + 1; in ctrl_max_dhchaps()
59 static int nvme_auth_submit(struct nvme_ctrl *ctrl, int qid, in nvme_auth_submit() argument
64 struct request_queue *q = ctrl->fabrics_q; in nvme_auth_submit()
69 q = ctrl->connect_q; in nvme_auth_submit()
87 dev_warn(ctrl->device, in nvme_auth_submit()
90 dev_err(ctrl->device, in nvme_auth_submit()
95 static int nvme_auth_receive_validate(struct nvme_ctrl *ctrl, int qid, in nvme_auth_receive_validate() argument
[all …]
/linux/drivers/media/v4l2-core/
H A Dv4l2-ctrls-api.c41 struct v4l2_ctrl *ctrl, in ptr_to_user() argument
46 if (ctrl->is_ptr && !ctrl->is_string) in ptr_to_user()
50 switch (ctrl->type) { in ptr_to_user()
54 c->size = ctrl->elem_size; in ptr_to_user()
70 static int cur_to_user(struct v4l2_ext_control *c, struct v4l2_ctrl *ctrl) in cur_to_user() argument
72 return ptr_to_user(c, ctrl, ctrl->p_cur); in cur_to_user()
77 struct v4l2_ctrl *ctrl) in new_to_user() argument
79 return ptr_to_user(c, ctrl, ctrl->p_new); in new_to_user()
86 return ptr_to_user(c, ref->ctrl, ref->p_req); in req_to_user()
90 static int def_to_user(struct v4l2_ext_control *c, struct v4l2_ctrl *ctrl) in def_to_user() argument
[all …]
H A Dv4l2-ctrls-core.c19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, in fill_event() argument
24 ev->id = ctrl->id; in fill_event()
25 ev->u.ctrl.changes = changes; in fill_event()
26 ev->u.ctrl.type = ctrl->type; in fill_event()
27 ev->u.ctrl.flags = user_flags(ctrl); in fill_event()
28 if (ctrl->is_ptr) in fill_event()
29 ev->u.ctrl.value64 = 0; in fill_event()
31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; in fill_event()
32 ev->u.ctrl.minimum = ctrl->minimum; in fill_event()
33 ev->u.ctrl.maximum = ctrl->maximum; in fill_event()
[all …]
/linux/drivers/mtd/nand/raw/brcmnand/
H A Dbrcmnand.c346 struct brcmnand_controller *ctrl; member
666 static inline bool brcmnand_non_mmio_ops(struct brcmnand_controller *ctrl) in brcmnand_non_mmio_ops() argument
675 static inline u32 nand_readreg(struct brcmnand_controller *ctrl, u32 offs) in nand_readreg() argument
677 if (brcmnand_non_mmio_ops(ctrl)) in nand_readreg()
678 return brcmnand_soc_read(ctrl->soc, offs); in nand_readreg()
679 return brcmnand_readl(ctrl->nand_base + offs); in nand_readreg()
682 static inline void nand_writereg(struct brcmnand_controller *ctrl, u32 offs, in nand_writereg() argument
685 if (brcmnand_non_mmio_ops(ctrl)) in nand_writereg()
686 brcmnand_soc_write(ctrl->soc, val, offs); in nand_writereg()
688 brcmnand_writel(val, ctrl->nand_base + offs); in nand_writereg()
[all …]
/linux/drivers/clk/bcm/
H A Dclk-iproc-pll.c68 const struct iproc_pll_ctrl *ctrl; member
76 const struct iproc_clk_ctrl *ctrl; member
150 const struct iproc_pll_ctrl *ctrl = pll->ctrl; in pll_wait_for_lock() local
153 u32 val = readl(pll->status_base + ctrl->status.offset); in pll_wait_for_lock()
155 if (val & (1 << ctrl->status.shift)) in pll_wait_for_lock()
166 const struct iproc_pll_ctrl *ctrl = pll->ctrl; in iproc_pll_write() local
170 if (unlikely(ctrl->flags & IPROC_CLK_NEEDS_READ_BACK && in iproc_pll_write()
177 const struct iproc_pll_ctrl *ctrl = pll->ctrl; in __pll_disable() local
180 if (ctrl->flags & IPROC_CLK_PLL_ASIU) { in __pll_disable()
181 val = readl(pll->asiu_base + ctrl->asiu.offset); in __pll_disable()
[all …]
/linux/drivers/media/platform/qcom/venus/
H A Dvdec_ctrls.c13 static int vdec_op_s_ctrl(struct v4l2_ctrl *ctrl) in vdec_op_s_ctrl() argument
15 struct venus_inst *inst = ctrl_to_inst(ctrl); in vdec_op_s_ctrl()
18 switch (ctrl->id) { in vdec_op_s_ctrl()
20 ctr->post_loop_deb_mode = ctrl->val; in vdec_op_s_ctrl()
26 ctr->profile = ctrl->val; in vdec_op_s_ctrl()
31 ctr->level = ctrl->val; in vdec_op_s_ctrl()
34 ctr->display_delay = ctrl->val; in vdec_op_s_ctrl()
37 ctr->display_delay_enable = ctrl->val; in vdec_op_s_ctrl()
40 ctr->conceal_color = *ctrl->p_new.p_s64; in vdec_op_s_ctrl()
49 static int vdec_op_g_volatile_ctrl(struct v4l2_ctrl *ctrl) in vdec_op_g_volatile_ctrl() argument
[all …]

12345678910>>...110