Lines Matching defs:mdp4_crtc

17 struct mdp4_crtc {
60 #define to_mdp4_crtc(x) container_of(x, struct mdp4_crtc, base)
70 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
72 atomic_or(pending, &mdp4_crtc->pending);
73 mdp_irq_register(&get_kms(crtc)->base, &mdp4_crtc->vblank);
78 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
88 flush |= ovlp2flush(mdp4_crtc->ovlp);
90 DBG("%s: flush=%08x", mdp4_crtc->name, flush);
92 mdp4_crtc->flushed_mask = flush;
100 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
106 event = mdp4_crtc->event;
108 mdp4_crtc->event = NULL;
109 DBG("%s: send event: %p", mdp4_crtc->name, event);
117 struct mdp4_crtc *mdp4_crtc =
118 container_of(work, struct mdp4_crtc, unref_cursor_work);
119 struct mdp4_kms *mdp4_kms = get_kms(&mdp4_crtc->base);
153 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
159 mixer_cfg = mixercfg(mixer_cfg, mdp4_crtc->mixer,
169 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
172 int i, ovlp = mdp4_crtc->ovlp;
217 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
219 enum mdp4_dma dma = mdp4_crtc->dma;
220 int ovlp = mdp4_crtc->ovlp;
229 mdp4_crtc->name, DRM_MODE_ARG(mode));
260 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
264 DBG("%s", mdp4_crtc->name);
266 if (WARN_ON(!mdp4_crtc->enabled))
272 mdp_irq_unregister(&mdp4_kms->base, &mdp4_crtc->err);
276 WARN_ON(mdp4_crtc->event);
283 mdp4_crtc->enabled = false;
289 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
292 DBG("%s", mdp4_crtc->name);
294 if (WARN_ON(mdp4_crtc->enabled))
302 mdp_irq_register(&mdp4_kms->base, &mdp4_crtc->err);
306 mdp4_crtc->enabled = true;
312 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
313 DBG("%s: check", mdp4_crtc->name);
321 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
322 DBG("%s: begin", mdp4_crtc->name);
328 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
332 DBG("%s: event: %p", mdp4_crtc->name, crtc->state->event);
334 WARN_ON(mdp4_crtc->event);
337 mdp4_crtc->event = crtc->state->event;
356 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
359 enum mdp4_dma dma = mdp4_crtc->dma;
362 spin_lock_irqsave(&mdp4_crtc->cursor.lock, flags);
363 if (mdp4_crtc->cursor.stale) {
364 struct drm_gem_object *next_bo = mdp4_crtc->cursor.next_bo;
365 struct drm_gem_object *prev_bo = mdp4_crtc->cursor.scanout_bo;
366 uint64_t iova = mdp4_crtc->cursor.next_iova;
375 MDP4_DMA_CURSOR_SIZE_WIDTH(mdp4_crtc->cursor.width) |
376 MDP4_DMA_CURSOR_SIZE_HEIGHT(mdp4_crtc->cursor.height));
389 drm_flip_work_queue(&mdp4_crtc->unref_cursor_work, prev_bo);
391 mdp4_crtc->cursor.scanout_bo = next_bo;
392 mdp4_crtc->cursor.stale = false;
396 MDP4_DMA_CURSOR_POS_X(mdp4_crtc->cursor.x) |
397 MDP4_DMA_CURSOR_POS_Y(mdp4_crtc->cursor.y));
399 spin_unlock_irqrestore(&mdp4_crtc->cursor.lock, flags);
406 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
436 spin_lock_irqsave(&mdp4_crtc->cursor.lock, flags);
437 old_bo = mdp4_crtc->cursor.next_bo;
438 mdp4_crtc->cursor.next_bo = cursor_bo;
439 mdp4_crtc->cursor.next_iova = iova;
440 mdp4_crtc->cursor.width = width;
441 mdp4_crtc->cursor.height = height;
442 mdp4_crtc->cursor.stale = true;
443 spin_unlock_irqrestore(&mdp4_crtc->cursor.lock, flags);
447 drm_flip_work_queue(&mdp4_crtc->unref_cursor_work, old_bo);
461 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
464 spin_lock_irqsave(&mdp4_crtc->cursor.lock, flags);
465 mdp4_crtc->cursor.x = x;
466 mdp4_crtc->cursor.y = y;
467 spin_unlock_irqrestore(&mdp4_crtc->cursor.lock, flags);
498 struct mdp4_crtc *mdp4_crtc = container_of(irq, struct mdp4_crtc, vblank);
499 struct drm_crtc *crtc = &mdp4_crtc->base;
503 mdp_irq_unregister(&get_kms(crtc)->base, &mdp4_crtc->vblank);
505 pending = atomic_xchg(&mdp4_crtc->pending, 0);
513 drm_flip_work_commit(&mdp4_crtc->unref_cursor_work, priv->kms->wq);
519 struct mdp4_crtc *mdp4_crtc = container_of(irq, struct mdp4_crtc, err);
520 struct drm_crtc *crtc = &mdp4_crtc->base;
521 DBG("%s: error: %08x", mdp4_crtc->name, irqstatus);
528 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
538 mdp4_crtc->flushed_mask),
541 dev_warn(dev->dev, "vblank time out, crtc=%s\n", mdp4_crtc->base.name);
543 mdp4_crtc->flushed_mask = 0;
550 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
551 return mdp4_crtc->vblank.irqmask;
557 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
560 mdp4_write(mdp4_kms, REG_MDP4_DMA_CONFIG(mdp4_crtc->dma), config);
566 struct mdp4_crtc *mdp4_crtc = to_mdp4_crtc(crtc);
572 switch (mdp4_crtc->dma) {
595 mdp4_crtc->mixer = mixer;
599 DBG("%s: intf_sel=%08x", mdp4_crtc->name, intf_sel);
619 struct mdp4_crtc *mdp4_crtc = ptr;
621 drm_flip_work_cleanup(&mdp4_crtc->unref_cursor_work);
630 struct mdp4_crtc *mdp4_crtc;
633 mdp4_crtc = drmm_crtc_alloc_with_planes(dev, struct mdp4_crtc, base,
636 if (IS_ERR(mdp4_crtc))
637 return ERR_CAST(mdp4_crtc);
639 crtc = &mdp4_crtc->base;
641 mdp4_crtc->ovlp = ovlp_id;
642 mdp4_crtc->dma = dma_id;
644 mdp4_crtc->vblank.irqmask = dma2irq(mdp4_crtc->dma);
645 mdp4_crtc->vblank.irq = mdp4_crtc_vblank_irq;
647 mdp4_crtc->err.irqmask = dma2err(mdp4_crtc->dma);
648 mdp4_crtc->err.irq = mdp4_crtc_err_irq;
650 snprintf(mdp4_crtc->name, sizeof(mdp4_crtc->name), "%s:%d",
653 spin_lock_init(&mdp4_crtc->cursor.lock);
655 drm_flip_work_init(&mdp4_crtc->unref_cursor_work,
657 ret = drmm_add_action_or_reset(dev, mdp4_crtc_flip_cleanup, mdp4_crtc);