Lines Matching full:intr

209 static inline struct dpu_hw_intr_entry *dpu_core_irq_get_entry(struct dpu_hw_intr *intr,  in dpu_core_irq_get_entry()  argument
212 return &intr->irq_tbl[irq_idx - 1]; in dpu_core_irq_get_entry()
248 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq() local
256 if (!intr) in dpu_core_irq()
259 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq()
261 if (!test_bit(reg_idx, &intr->irq_mask)) in dpu_core_irq()
265 irq_status = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].status_off); in dpu_core_irq()
268 enable_mask = DPU_REG_READ(&intr->hw, intr->intr_set[reg_idx].en_off); in dpu_core_irq()
272 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq()
282 * Search through matching intr status. in dpu_core_irq()
301 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq()
306 static int dpu_hw_intr_enable_irq_locked(struct dpu_hw_intr *intr, in dpu_hw_intr_enable_irq_locked() argument
314 if (!intr) in dpu_hw_intr_enable_irq_locked()
328 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_enable_irq_locked()
331 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_enable_irq_locked()
337 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_enable_irq_locked()
345 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_enable_irq_locked()
347 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_enable_irq_locked()
352 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_enable_irq_locked()
362 static int dpu_hw_intr_disable_irq_locked(struct dpu_hw_intr *intr, in dpu_hw_intr_disable_irq_locked() argument
370 if (!intr) in dpu_hw_intr_disable_irq_locked()
384 assert_spin_locked(&intr->irq_lock); in dpu_hw_intr_disable_irq_locked()
387 reg = &intr->intr_set[reg_idx]; in dpu_hw_intr_disable_irq_locked()
389 cache_irq_mask = intr->cache_irq_mask[reg_idx]; in dpu_hw_intr_disable_irq_locked()
397 DPU_REG_WRITE(&intr->hw, reg->en_off, cache_irq_mask); in dpu_hw_intr_disable_irq_locked()
399 DPU_REG_WRITE(&intr->hw, reg->clr_off, DPU_IRQ_MASK(irq_idx)); in dpu_hw_intr_disable_irq_locked()
404 intr->cache_irq_mask[reg_idx] = cache_irq_mask; in dpu_hw_intr_disable_irq_locked()
416 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_clear_irqs() local
419 if (!intr) in dpu_clear_irqs()
423 if (test_bit(i, &intr->irq_mask)) in dpu_clear_irqs()
424 DPU_REG_WRITE(&intr->hw, in dpu_clear_irqs()
425 intr->intr_set[i].clr_off, 0xffffffff); in dpu_clear_irqs()
434 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_disable_all_irqs() local
437 if (!intr) in dpu_disable_all_irqs()
441 if (test_bit(i, &intr->irq_mask)) in dpu_disable_all_irqs()
442 DPU_REG_WRITE(&intr->hw, in dpu_disable_all_irqs()
443 intr->intr_set[i].en_off, 0x00000000); in dpu_disable_all_irqs()
459 struct dpu_hw_intr *intr = dpu_kms->hw_intr; in dpu_core_irq_read() local
464 if (!intr) in dpu_core_irq_read()
472 spin_lock_irqsave(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
475 intr_status = DPU_REG_READ(&intr->hw, in dpu_core_irq_read()
476 intr->intr_set[reg_idx].status_off) & in dpu_core_irq_read()
479 DPU_REG_WRITE(&intr->hw, intr->intr_set[reg_idx].clr_off, in dpu_core_irq_read()
485 spin_unlock_irqrestore(&intr->irq_lock, irq_flags); in dpu_core_irq_read()
500 struct dpu_hw_intr *intr; in dpu_hw_intr_init() local
506 intr = drmm_kzalloc(dev, sizeof(*intr), GFP_KERNEL); in dpu_hw_intr_init()
507 if (!intr) in dpu_hw_intr_init()
511 intr->intr_set = dpu_intr_set_7xxx; in dpu_hw_intr_init()
513 intr->intr_set = dpu_intr_set_legacy; in dpu_hw_intr_init()
515 intr->hw.blk_addr = addr + m->mdp[0].base; in dpu_hw_intr_init()
517 intr->irq_mask = BIT(MDP_SSPP_TOP0_INTR) | in dpu_hw_intr_init()
526 intr->irq_mask |= BIT(MDP_INTFn_INTR(intf->id)); in dpu_hw_intr_init()
529 intr->irq_mask |= BIT(DPU_IRQ_REG(intf->intr_tear_rd_ptr)); in dpu_hw_intr_init()
532 spin_lock_init(&intr->irq_lock); in dpu_hw_intr_init()
534 return intr; in dpu_hw_intr_init()