Lines Matching +full:sync +full:- +full:update +full:- +full:mask

31 #include <linux/dma-mapping.h>
83 chan->device = device; in nv50_chan_create()
94 &chan->user); in nv50_chan_create()
96 nvif_object_map(&chan->user, NULL, 0); in nv50_chan_create()
105 return -ENOSYS; in nv50_chan_create()
111 nvif_object_dtor(&chan->user); in nv50_chan_destroy()
121 nvif_object_dtor(&dmac->vram); in nv50_dmac_destroy()
122 nvif_object_dtor(&dmac->sync); in nv50_dmac_destroy()
124 nv50_chan_destroy(&dmac->base); in nv50_dmac_destroy()
126 nvif_mem_dtor(&dmac->_push.mem); in nv50_dmac_destroy()
134 dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr; in nv50_dmac_kick()
135 if (dmac->put != dmac->cur) { in nv50_dmac_kick()
139 if (dmac->push->mem.type & NVIF_MEM_VRAM) { in nv50_dmac_kick()
140 struct nvif_device *device = dmac->base.device; in nv50_dmac_kick()
141 nvif_wr32(&device->object, 0x070000, 0x00000001); in nv50_dmac_kick()
143 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002)) in nv50_dmac_kick()
148 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur); in nv50_dmac_kick()
149 dmac->put = dmac->cur; in nv50_dmac_kick()
152 push->bgn = push->cur; in nv50_dmac_kick()
158 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_free()
159 if (get > dmac->cur) /* NVIDIA stay 5 away from GET, do the same. */ in nv50_dmac_free()
160 return get - dmac->cur - 5; in nv50_dmac_free()
161 return dmac->max - dmac->cur; in nv50_dmac_free()
170 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_wind()
172 /* Corner-case, HW idle, but non-committed work pending. */ in nv50_dmac_wind()
173 if (dmac->put == 0) in nv50_dmac_wind()
174 nv50_dmac_kick(dmac->push); in nv50_dmac_wind()
176 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wind()
177 if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0)) in nv50_dmac_wind()
180 return -ETIMEDOUT; in nv50_dmac_wind()
183 PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0)); in nv50_dmac_wind()
184 dmac->cur = 0; in nv50_dmac_wind()
194 if (WARN_ON(size > dmac->max)) in nv50_dmac_wait()
195 return -EINVAL; in nv50_dmac_wait()
197 dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
198 if (dmac->cur + size >= dmac->max) { in nv50_dmac_wait()
203 push->cur = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
204 push->cur = push->cur + dmac->cur; in nv50_dmac_wait()
208 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wait()
213 return -ETIMEDOUT; in nv50_dmac_wait()
216 push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
217 push->bgn = push->bgn + dmac->cur; in nv50_dmac_wait()
218 push->cur = push->bgn; in nv50_dmac_wait()
219 push->end = push->cur + free; in nv50_dmac_wait()
224 static int nv50_dmac_vram_pushbuf = -1;
232 struct nouveau_cli *cli = (void *)device->object.client; in nv50_dmac_create()
237 mutex_init(&dmac->lock); in nv50_dmac_create()
239 /* Pascal added support for 47-bit physical addresses, but some in nv50_dmac_create()
240 * parts of EVO still only accept 40-bit PAs. in nv50_dmac_create()
249 (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL)) in nv50_dmac_create()
252 ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000, in nv50_dmac_create()
253 &dmac->_push.mem); in nv50_dmac_create()
257 dmac->ptr = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
258 dmac->_push.wait = nv50_dmac_wait; in nv50_dmac_create()
259 dmac->_push.kick = nv50_dmac_kick; in nv50_dmac_create()
260 dmac->push = &dmac->_push; in nv50_dmac_create()
261 dmac->push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
262 dmac->push->cur = dmac->push->bgn; in nv50_dmac_create()
263 dmac->push->end = dmac->push->bgn; in nv50_dmac_create()
264 dmac->max = 0x1000/4 - 1; in nv50_dmac_create()
269 if (disp->oclass < GV100_DISP) in nv50_dmac_create()
270 dmac->max -= 12; in nv50_dmac_create()
272 args->pushbuf = nvif_handle(&dmac->_push.mem.object); in nv50_dmac_create()
275 &dmac->base); in nv50_dmac_create()
282 ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF, in nv50_dmac_create()
290 &dmac->sync); in nv50_dmac_create()
294 ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM, in nv50_dmac_create()
300 .limit = device->info.ram_user - 1, in nv50_dmac_create()
302 &dmac->vram); in nv50_dmac_create()
317 outp->base.base.name, outp->caps.dp_interlace); in nv50_outp_dump_caps()
326 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; in nv50_outp_atomic_check_view()
327 struct drm_display_mode *mode = &crtc_state->mode; in nv50_outp_atomic_check_view()
328 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check_view()
330 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_outp_atomic_check_view()
332 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); in nv50_outp_atomic_check_view()
333 asyc->scaler.full = false; in nv50_outp_atomic_check_view()
337 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { in nv50_outp_atomic_check_view()
338 switch (connector->connector_type) { in nv50_outp_atomic_check_view()
345 if (mode->hdisplay == native_mode->hdisplay && in nv50_outp_atomic_check_view()
346 mode->vdisplay == native_mode->vdisplay && in nv50_outp_atomic_check_view()
347 mode->type & DRM_MODE_TYPE_DRIVER) in nv50_outp_atomic_check_view()
350 asyc->scaler.full = true; in nv50_outp_atomic_check_view()
361 crtc_state->mode_changed = true; in nv50_outp_atomic_check_view()
372 struct drm_display_mode *mode = &asyh->state.adjusted_mode; in nv50_outp_atomic_fix_depth()
375 switch (nv_encoder->dcb->type) { in nv50_outp_atomic_fix_depth()
377 max_rate = nv_encoder->dp.link_nr * nv_encoder->dp.link_bw; in nv50_outp_atomic_fix_depth()
380 asyh->or.bpc = min_t(u8, asyh->or.bpc, 10); in nv50_outp_atomic_fix_depth()
383 while (asyh->or.bpc > 6) { in nv50_outp_atomic_fix_depth()
384 mode_rate = DIV_ROUND_UP(mode->clock * asyh->or.bpc * 3, 8); in nv50_outp_atomic_fix_depth()
388 asyh->or.bpc -= 2; in nv50_outp_atomic_fix_depth()
401 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check()
407 nv_connector->native_mode); in nv50_outp_atomic_check()
411 if (crtc_state->mode_changed || crtc_state->connectors_changed) in nv50_outp_atomic_check()
412 asyh->or.bpc = connector->display_info.bpc; in nv50_outp_atomic_check()
429 if (connector_state->best_encoder == encoder) in nv50_outp_get_new_connector()
445 if (connector_state->best_encoder == encoder) in nv50_outp_get_old_connector()
457 const u32 mask = drm_encoder_mask(&outp->base.base); in nv50_outp_get_new_crtc() local
461 if (crtc_state->encoder_mask & mask) in nv50_outp_get_new_crtc()
475 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_disable()
478 core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL); in nv50_dac_atomic_disable()
479 nv_encoder->crtc = NULL; in nv50_dac_atomic_disable()
488 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_dac_atomic_enable()
489 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_enable()
492 switch (nv_crtc->index) { in nv50_dac_atomic_enable()
504 if (!nvif_outp_acquired(&nv_encoder->outp)) in nv50_dac_atomic_enable()
505 nvif_outp_acquire_dac(&nv_encoder->outp); in nv50_dac_atomic_enable()
507 core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh); in nv50_dac_atomic_enable()
508 asyh->or.depth = 0; in nv50_dac_atomic_enable()
510 nv_encoder->crtc = &nv_crtc->base; in nv50_dac_atomic_enable()
520 loadval = nouveau_drm(encoder->dev)->vbios.dactestval; in nv50_dac_detect()
524 ret = nvif_outp_load_detect(&nv_encoder->outp, loadval); in nv50_dac_detect()
544 nvif_outp_dtor(&nv_encoder->outp); in nv50_dac_destroy()
558 struct drm_connector *connector = &nv_encoder->conn->base; in nv50_dac_create()
559 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_dac_create()
560 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_dac_create()
563 struct dcb_output *dcbe = nv_encoder->dcb; in nv50_dac_create()
566 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_dac_create()
568 nv_encoder->i2c = &bus->i2c; in nv50_dac_create()
571 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, in nv50_dac_create()
572 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_dac_create()
586 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify) in nv50_audio_component_eld_notify()
587 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr, in nv50_audio_component_eld_notify()
604 mutex_lock(&drm->audio.lock); in nv50_audio_component_get_eld()
606 drm_for_each_encoder(encoder, drm->dev) { in nv50_audio_component_get_eld()
609 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) in nv50_audio_component_get_eld()
613 nv_connector = nv_encoder->conn; in nv50_audio_component_get_eld()
614 nv_crtc = nouveau_crtc(nv_encoder->crtc); in nv50_audio_component_get_eld()
616 if (!nv_crtc || nv_encoder->outp.or.id != port || nv_crtc->index != dev_id) in nv50_audio_component_get_eld()
619 *enabled = nv_encoder->audio.enabled; in nv50_audio_component_get_eld()
621 ret = drm_eld_size(nv_connector->base.eld); in nv50_audio_component_get_eld()
622 memcpy(buf, nv_connector->base.eld, in nv50_audio_component_get_eld()
628 mutex_unlock(&drm->audio.lock); in nv50_audio_component_get_eld()
646 return -ENOMEM; in nv50_audio_component_bind()
649 acomp->ops = &nv50_audio_component_ops; in nv50_audio_component_bind()
650 acomp->dev = kdev; in nv50_audio_component_bind()
651 drm->audio.component = acomp; in nv50_audio_component_bind()
665 drm->audio.component = NULL; in nv50_audio_component_unbind()
666 acomp->ops = NULL; in nv50_audio_component_unbind()
667 acomp->dev = NULL; in nv50_audio_component_unbind()
679 if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops)) in nv50_audio_component_init()
682 drm->audio.component_registered = true; in nv50_audio_component_init()
683 mutex_init(&drm->audio.lock); in nv50_audio_component_init()
689 if (!drm->audio.component_registered) in nv50_audio_component_fini()
692 component_del(drm->dev->dev, &nv50_audio_component_bind_ops); in nv50_audio_component_fini()
693 drm->audio.component_registered = false; in nv50_audio_component_fini()
694 mutex_destroy(&drm->audio.lock); in nv50_audio_component_fini()
703 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_audio_supported()
705 if (disp->disp->object.oclass <= GT200_DISP || in nv50_audio_supported()
706 disp->disp->object.oclass == GT206_DISP) in nv50_audio_supported()
709 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { in nv50_audio_supported()
712 switch (nv_encoder->dcb->type) { in nv50_audio_supported()
727 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_disable()
729 struct nvif_outp *outp = &nv_encoder->outp; in nv50_audio_disable()
734 mutex_lock(&drm->audio.lock); in nv50_audio_disable()
735 if (nv_encoder->audio.enabled) { in nv50_audio_disable()
736 nv_encoder->audio.enabled = false; in nv50_audio_disable()
737 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, NULL, 0); in nv50_audio_disable()
739 mutex_unlock(&drm->audio.lock); in nv50_audio_disable()
741 nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index); in nv50_audio_disable()
749 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_enable()
751 struct nvif_outp *outp = &nv_encoder->outp; in nv50_audio_enable()
753 if (!nv50_audio_supported(encoder) || !drm_detect_monitor_audio(nv_connector->edid)) in nv50_audio_enable()
756 mutex_lock(&drm->audio.lock); in nv50_audio_enable()
758 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, nv_connector->base.eld, in nv50_audio_enable()
759 drm_eld_size(nv_connector->base.eld)); in nv50_audio_enable()
760 nv_encoder->audio.enabled = true; in nv50_audio_enable()
762 mutex_unlock(&drm->audio.lock); in nv50_audio_enable()
764 nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index); in nv50_audio_enable()
775 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_hdmi_enable()
777 struct drm_hdmi_info *hdmi = &nv_connector->base.display_info.hdmi; in nv50_hdmi_enable()
787 max_ac_packet = mode->htotal - mode->hdisplay; in nv50_hdmi_enable()
788 max_ac_packet -= rekey; in nv50_hdmi_enable()
789 max_ac_packet -= 18; /* constant from tegra */ in nv50_hdmi_enable()
792 if (nv_encoder->i2c && hdmi->scdc.scrambling.supported) { in nv50_hdmi_enable()
793 const bool high_tmds_clock_ratio = mode->clock > 340000; in nv50_hdmi_enable()
796 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &scdc); in nv50_hdmi_enable()
803 if (high_tmds_clock_ratio || hdmi->scdc.scrambling.low_rates) in nv50_hdmi_enable()
808 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, scdc); in nv50_hdmi_enable()
814 ret = nvif_outp_hdmi(&nv_encoder->outp, nv_crtc->index, true, max_ac_packet, rekey, in nv50_hdmi_enable()
815 mode->clock, hdmi->scdc.supported, hdmi->scdc.scrambling.supported, in nv50_hdmi_enable()
816 hdmi->scdc.scrambling.low_rates); in nv50_hdmi_enable()
822 args.infoframe.head = nv_crtc->index; in nv50_hdmi_enable()
824 if (!drm_hdmi_avi_infoframe_from_display_mode(&infoframe.avi, &nv_connector->base, mode)) { in nv50_hdmi_enable()
825 drm_hdmi_avi_infoframe_quant_range(&infoframe.avi, &nv_connector->base, mode, in nv50_hdmi_enable()
833 nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_AVI, &args.infoframe, size); in nv50_hdmi_enable()
838 &nv_connector->base, mode)) in nv50_hdmi_enable()
843 nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_VSI, &args.infoframe, size); in nv50_hdmi_enable()
845 nv_encoder->hdmi.enabled = true; in nv50_hdmi_enable()
880 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_real_outp()
884 if (!msto->mstc) in nv50_real_outp()
886 return msto->mstc->mstm->outp; in nv50_real_outp()
895 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_cleanup()
897 drm_atomic_get_mst_payload_state(new_mst_state, msto->mstc->port); in nv50_msto_cleanup()
901 drm_atomic_get_mst_payload_state(old_mst_state, msto->mstc->port); in nv50_msto_cleanup()
902 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_cleanup()
903 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_cleanup()
905 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); in nv50_msto_cleanup()
907 if (msto->disabled) { in nv50_msto_cleanup()
908 if (msto->head->func->display_id) { in nv50_msto_cleanup()
909 nvif_outp_dp_mst_id_put(&mstm->outp->outp, msto->display_id); in nv50_msto_cleanup()
910 msto->display_id = 0; in nv50_msto_cleanup()
913 msto->mstc = NULL; in nv50_msto_cleanup()
914 msto->disabled = false; in nv50_msto_cleanup()
916 } else if (msto->enabled) { in nv50_msto_cleanup()
918 msto->enabled = false; in nv50_msto_cleanup()
928 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_prepare()
929 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_prepare()
930 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_prepare()
934 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); in nv50_msto_prepare()
936 payload = drm_atomic_get_mst_payload_state(mst_state, mstc->port); in nv50_msto_prepare()
938 if (msto->disabled) { in nv50_msto_prepare()
940 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0); in nv50_msto_prepare()
943 if (msto->enabled) in nv50_msto_prepare()
948 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, in nv50_msto_prepare()
949 payload->vc_start_slot, payload->time_slots, in nv50_msto_prepare()
950 payload->pbn, in nv50_msto_prepare()
951 payload->time_slots * dfixed_trunc(mst_state->pbn_div)); in nv50_msto_prepare()
953 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0); in nv50_msto_prepare()
962 struct drm_atomic_state *state = crtc_state->state; in nv50_msto_atomic_check()
963 struct drm_connector *connector = conn_state->connector; in nv50_msto_atomic_check()
966 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_check()
972 mstc->native); in nv50_msto_atomic_check()
984 if (!state->duplicated) { in nv50_msto_atomic_check()
985 const int clock = crtc_state->adjusted_mode.clock; in nv50_msto_atomic_check()
987 asyh->or.bpc = connector->display_info.bpc; in nv50_msto_atomic_check()
988 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3 << 4); in nv50_msto_atomic_check()
991 mst_state = drm_atomic_get_mst_topology_state(state, &mstm->mgr); in nv50_msto_atomic_check()
995 if (!mst_state->pbn_div.full) { in nv50_msto_atomic_check()
996 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_msto_atomic_check()
998 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&mstm->mgr, in nv50_msto_atomic_check()
999 outp->dp.link_bw, outp->dp.link_nr); in nv50_msto_atomic_check()
1002 slots = drm_dp_atomic_find_time_slots(state, &mstm->mgr, mstc->port, asyh->dp.pbn); in nv50_msto_atomic_check()
1006 asyh->dp.tu = slots; in nv50_msto_atomic_check()
1026 struct nv50_head *head = msto->head; in nv50_msto_atomic_enable()
1028 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base)); in nv50_msto_atomic_enable()
1035 drm_connector_list_iter_begin(encoder->dev, &conn_iter); in nv50_msto_atomic_enable()
1037 if (connector->state->best_encoder == &msto->encoder) { in nv50_msto_atomic_enable()
1039 mstm = mstc->mstm; in nv50_msto_atomic_enable()
1048 if (!mstm->links++) { in nv50_msto_atomic_enable()
1049 nvif_outp_acquire_sor(&mstm->outp->outp, false /*TODO: MST audio... */); in nv50_msto_atomic_enable()
1050 nouveau_dp_train(mstm->outp, true, 0, 0); in nv50_msto_atomic_enable()
1053 if (head->func->display_id) { in nv50_msto_atomic_enable()
1054 if (!WARN_ON(nvif_outp_dp_mst_id_get(&mstm->outp->outp, &msto->display_id))) in nv50_msto_atomic_enable()
1055 head->func->display_id(head, msto->display_id); in nv50_msto_atomic_enable()
1058 if (mstm->outp->outp.or.link & 1) in nv50_msto_atomic_enable()
1063 mstm->outp->update(mstm->outp, head->base.index, asyh, proto, in nv50_msto_atomic_enable()
1064 nv50_dp_bpc_to_depth(asyh->or.bpc)); in nv50_msto_atomic_enable()
1066 msto->mstc = mstc; in nv50_msto_atomic_enable()
1067 msto->enabled = true; in nv50_msto_atomic_enable()
1068 mstm->modified = true; in nv50_msto_atomic_enable()
1075 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_atomic_disable()
1076 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_disable()
1078 if (msto->head->func->display_id) in nv50_msto_atomic_disable()
1079 msto->head->func->display_id(msto->head, 0); in nv50_msto_atomic_disable()
1081 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); in nv50_msto_atomic_disable()
1082 mstm->modified = true; in nv50_msto_atomic_disable()
1083 if (!--mstm->links) in nv50_msto_atomic_disable()
1084 mstm->disabled = true; in nv50_msto_atomic_disable()
1085 msto->disabled = true; in nv50_msto_atomic_disable()
1099 drm_encoder_cleanup(&msto->encoder); in nv50_msto_destroy()
1116 return ERR_PTR(-ENOMEM); in nv50_msto_new()
1118 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, in nv50_msto_new()
1119 DRM_MODE_ENCODER_DPMST, "mst-%d", id); in nv50_msto_new()
1125 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); in nv50_msto_new()
1126 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base); in nv50_msto_new()
1127 msto->head = head; in nv50_msto_new()
1138 struct drm_crtc *crtc = connector_state->crtc; in nv50_mstc_atomic_best_encoder()
1140 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_atomic_best_encoder()
1143 return &nv50_head(crtc)->msto->encoder; in nv50_mstc_atomic_best_encoder()
1151 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_mstc_mode_valid()
1166 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); in nv50_mstc_get_modes()
1167 drm_connector_update_edid_property(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1168 if (mstc->edid) in nv50_mstc_get_modes()
1169 ret = drm_add_edid_modes(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1177 if (connector->display_info.bpc) in nv50_mstc_get_modes()
1178 connector->display_info.bpc = in nv50_mstc_get_modes()
1179 clamp(connector->display_info.bpc, 6U, 8U); in nv50_mstc_get_modes()
1181 connector->display_info.bpc = 8; in nv50_mstc_get_modes()
1183 if (mstc->native) in nv50_mstc_get_modes()
1184 drm_mode_destroy(mstc->connector.dev, mstc->native); in nv50_mstc_get_modes()
1185 mstc->native = nouveau_conn_native_mode(&mstc->connector); in nv50_mstc_get_modes()
1194 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr; in nv50_mstc_atomic_check()
1196 return drm_dp_atomic_release_time_slots(state, mgr, mstc->port); in nv50_mstc_atomic_check()
1209 ret = pm_runtime_get_sync(connector->dev->dev); in nv50_mstc_detect()
1210 if (ret < 0 && ret != -EACCES) { in nv50_mstc_detect()
1211 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1215 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr, in nv50_mstc_detect()
1216 mstc->port); in nv50_mstc_detect()
1221 pm_runtime_mark_last_busy(connector->dev->dev); in nv50_mstc_detect()
1222 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1240 drm_connector_cleanup(&mstc->connector); in nv50_mstc_destroy()
1241 drm_dp_mst_put_port_malloc(mstc->port); in nv50_mstc_destroy()
1261 struct drm_device *dev = mstm->outp->base.base.dev; in nv50_mstc_new()
1267 return -ENOMEM; in nv50_mstc_new()
1268 mstc->mstm = mstm; in nv50_mstc_new()
1269 mstc->port = port; in nv50_mstc_new()
1271 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, in nv50_mstc_new()
1279 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); in nv50_mstc_new()
1281 mstc->connector.funcs->reset(&mstc->connector); in nv50_mstc_new()
1282 nouveau_conn_attach_properties(&mstc->connector); in nv50_mstc_new()
1285 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_new()
1288 drm_connector_attach_encoder(&mstc->connector, in nv50_mstc_new()
1289 &nv50_head(crtc)->msto->encoder); in nv50_mstc_new()
1292 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); in nv50_mstc_new()
1293 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); in nv50_mstc_new()
1294 drm_connector_set_path_property(&mstc->connector, path); in nv50_mstc_new()
1304 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_cleanup()
1307 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); in nv50_mstm_cleanup()
1308 drm_dp_check_act_status(&mstm->mgr); in nv50_mstm_cleanup()
1310 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_cleanup()
1311 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_cleanup()
1313 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_cleanup()
1314 if (mstc && mstc->mstm == mstm) in nv50_mstm_cleanup()
1315 nv50_msto_cleanup(state, mst_state, &mstm->mgr, msto); in nv50_mstm_cleanup()
1319 if (mstm->disabled) { in nv50_mstm_cleanup()
1320 nouveau_dp_power_down(mstm->outp); in nv50_mstm_cleanup()
1321 nvif_outp_release(&mstm->outp->outp); in nv50_mstm_cleanup()
1322 mstm->disabled = false; in nv50_mstm_cleanup()
1325 mstm->modified = false; in nv50_mstm_cleanup()
1333 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_prepare()
1336 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); in nv50_mstm_prepare()
1339 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1340 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1342 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1343 if (mstc && mstc->mstm == mstm && msto->disabled) in nv50_mstm_prepare()
1344 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1351 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1352 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1354 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1355 if (mstc && mstc->mstm == mstm && !msto->disabled) in nv50_mstm_prepare()
1356 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1373 return &mstc->connector; in nv50_mstm_add_connector()
1386 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_mstm_service()
1400 drm_dp_mst_hpd_irq_handle_event(&mstm->mgr, esi, ack, &handled); in nv50_mstm_service()
1411 drm_dp_mst_hpd_irq_send_new_request(&mstm->mgr); in nv50_mstm_service()
1416 nv_connector->base.name, rc); in nv50_mstm_service()
1424 mstm->is_mst = false; in nv50_mstm_remove()
1425 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); in nv50_mstm_remove()
1431 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_detect()
1435 if (!mstm || !mstm->can_mst) in nv50_mstm_detect()
1438 aux = mstm->mgr.aux; in nv50_mstm_detect()
1448 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true); in nv50_mstm_detect()
1452 mstm->is_mst = true; in nv50_mstm_detect()
1459 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_fini()
1466 * path to protect mstm->is_mst without potentially deadlocking in nv50_mstm_fini()
1468 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1469 mstm->suspended = true; in nv50_mstm_fini()
1470 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1472 if (mstm->is_mst) in nv50_mstm_fini()
1473 drm_dp_mst_topology_mgr_suspend(&mstm->mgr); in nv50_mstm_fini()
1479 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_init()
1485 if (mstm->is_mst) { in nv50_mstm_init()
1486 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime); in nv50_mstm_init()
1487 if (ret == -1) in nv50_mstm_init()
1491 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1492 mstm->suspended = false; in nv50_mstm_init()
1493 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1495 if (ret == -1) in nv50_mstm_init()
1496 drm_kms_helper_hotplug_event(mstm->mgr.dev); in nv50_mstm_init()
1504 drm_dp_mst_topology_mgr_destroy(&mstm->mgr); in nv50_mstm_del()
1514 const int max_payloads = hweight8(outp->dcb->heads); in nv50_mstm_new()
1515 struct drm_device *dev = outp->base.base.dev; in nv50_mstm_new()
1520 return -ENOMEM; in nv50_mstm_new()
1521 mstm->outp = outp; in nv50_mstm_new()
1522 mstm->mgr.cbs = &nv50_mstm; in nv50_mstm_new()
1524 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max, in nv50_mstm_new()
1539 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); in nv50_sor_update()
1540 struct nv50_core *core = disp->core; in nv50_sor_update()
1543 nv_encoder->ctrl &= ~BIT(head); in nv50_sor_update()
1544 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE)) in nv50_sor_update()
1545 nv_encoder->ctrl = 0; in nv50_sor_update()
1547 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto); in nv50_sor_update()
1548 nv_encoder->ctrl |= BIT(head); in nv50_sor_update()
1549 asyh->or.depth = depth; in nv50_sor_update()
1552 core->func->sor->ctrl(core, nv_encoder->outp.or.id, nv_encoder->ctrl, asyh); in nv50_sor_update()
1555 /* TODO: Should we extend this to PWM-only backlights?
1564 struct nv50_head *head = nv50_head(nv_encoder->crtc); in nv50_sor_atomic_disable()
1567 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); in nv50_sor_atomic_disable()
1568 struct nouveau_backlight *backlight = nv_connector->backlight; in nv50_sor_atomic_disable()
1569 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_sor_atomic_disable()
1572 if (backlight && backlight->uses_dpcd) { in nv50_sor_atomic_disable()
1573 ret = drm_edp_backlight_disable(aux, &backlight->edp_info); in nv50_sor_atomic_disable()
1576 nv_connector->base.base.id, nv_connector->base.name, ret); in nv50_sor_atomic_disable()
1580 if (nv_encoder->dcb->type == DCB_OUTPUT_TMDS && nv_encoder->hdmi.enabled) { in nv50_sor_atomic_disable()
1581 nvif_outp_hdmi(&nv_encoder->outp, head->base.index, in nv50_sor_atomic_disable()
1583 nv_encoder->hdmi.enabled = false; in nv50_sor_atomic_disable()
1586 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) in nv50_sor_atomic_disable()
1589 if (head->func->display_id) in nv50_sor_atomic_disable()
1590 head->func->display_id(head, 0); in nv50_sor_atomic_disable()
1592 nv_encoder->update(nv_encoder, head->base.index, NULL, 0, 0); in nv50_sor_atomic_disable()
1593 nv50_audio_disable(encoder, &head->base); in nv50_sor_atomic_disable()
1594 nv_encoder->crtc = NULL; in nv50_sor_atomic_disable()
1607 bool enhancedFraming = outp->dp.dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP; in nv50_sor_dp_watermark_sst()
1608 u64 minRate = outp->dp.link_bw * 1000; in nv50_sor_dp_watermark_sst()
1620 unsigned surfaceWidth = asyh->mode.h.blanks - asyh->mode.h.blanke; in nv50_sor_dp_watermark_sst()
1621 unsigned rasterWidth = asyh->mode.h.active; in nv50_sor_dp_watermark_sst()
1622 unsigned depth = asyh->or.bpc * 3; in nv50_sor_dp_watermark_sst()
1624 u64 pixelClockHz = asyh->mode.clock * 1000; in nv50_sor_dp_watermark_sst()
1626 u32 numLanesPerLink = outp->dp.link_nr; in nv50_sor_dp_watermark_sst()
1634 if (outp->outp.info.dp.increased_wm) { in nv50_sor_dp_watermark_sst()
1639 if ((pixelClockHz * depth) >= (8 * minRate * outp->dp.link_nr * DSC_FACTOR)) in nv50_sor_dp_watermark_sst()
1649 ((pixelClockHz * depth) < div_u64(8 * minRate * outp->dp.link_nr * DSC_FACTOR, 64))) in nv50_sor_dp_watermark_sst()
1661 ratioF = div_u64(ratioF, 8 * (u64) minRate * outp->dp.link_nr); in nv50_sor_dp_watermark_sst()
1666 watermarkF = div_u64(ratioF * tuSize * (PrecisionFactor - ratioF), PrecisionFactor); in nv50_sor_dp_watermark_sst()
1672 numSymbolsPerLine = div_u64(surfaceWidth * depth, 8 * outp->dp.link_nr * DSC_FACTOR); in nv50_sor_dp_watermark_sst()
1695 PixelSteeringBits = remain ? div_u64((numLanesPerLink - remain) * depth, DSC_FACTOR) : 0; in nv50_sor_dp_watermark_sst()
1702 if (WARN_ON(MinHBlank > rasterWidth - surfaceWidth)) in nv50_sor_dp_watermark_sst()
1705 // Bug 702290 - Active Width should be greater than 60 in nv50_sor_dp_watermark_sst()
1710 …hblank_symbols = (s32)(div_u64((u64)(rasterWidth - surfaceWidth - MinHBlank) * minRate, pixelClock… in nv50_sor_dp_watermark_sst()
1713 hblank_symbols -= 1; //Stuffer latency to send BS in nv50_sor_dp_watermark_sst()
1714 hblank_symbols -= 3; //SPKT latency to send data to stuffer in nv50_sor_dp_watermark_sst()
1716 hblank_symbols -= numLanesPerLink == 1 ? 9 : numLanesPerLink == 2 ? 6 : 3; in nv50_sor_dp_watermark_sst()
1721 …nk = ((SetRasterBlankEnd.X + SetRasterSize.Width - SetRasterBlankStart.X - 40) * link_clk / pclk) in nv50_sor_dp_watermark_sst()
1729 vblank_symbols = (s32)((div_u64((u64)(surfaceWidth - 40) * minRate, pixelClockHz))) - 1; in nv50_sor_dp_watermark_sst()
1731 vblank_symbols -= numLanesPerLink == 1 ? 39 : numLanesPerLink == 2 ? 21 : 12; in nv50_sor_dp_watermark_sst()
1736 return nvif_outp_dp_sst(&outp->outp, head->base.index, waterMark, hBlankSym, vBlankSym); in nv50_sor_dp_watermark_sst()
1745 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_sor_atomic_enable()
1746 struct drm_display_mode *mode = &asyh->state.adjusted_mode; in nv50_sor_atomic_enable()
1747 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_sor_atomic_enable()
1748 struct nv50_head *head = nv50_head(&nv_crtc->base); in nv50_sor_atomic_enable()
1749 struct nvif_outp *outp = &nv_encoder->outp; in nv50_sor_atomic_enable()
1750 struct drm_device *dev = encoder->dev; in nv50_sor_atomic_enable()
1756 struct nvbios *bios = &drm->vbios; in nv50_sor_atomic_enable()
1762 nv_encoder->crtc = &nv_crtc->base; in nv50_sor_atomic_enable()
1764 if ((disp->disp->object.oclass == GT214_DISP || in nv50_sor_atomic_enable()
1765 disp->disp->object.oclass >= GF110_DISP) && in nv50_sor_atomic_enable()
1766 nv_encoder->dcb->type != DCB_OUTPUT_LVDS && in nv50_sor_atomic_enable()
1767 drm_detect_monitor_audio(nv_connector->edid)) in nv50_sor_atomic_enable()
1773 switch (nv_encoder->dcb->type) { in nv50_sor_atomic_enable()
1775 if (disp->disp->object.oclass != NV50_DISP && in nv50_sor_atomic_enable()
1776 drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_sor_atomic_enable()
1779 if (nv_encoder->outp.or.link & 1) { in nv50_sor_atomic_enable()
1781 /* Only enable dual-link if: in nv50_sor_atomic_enable()
1782 * - Need to (i.e. rate > 165MHz) in nv50_sor_atomic_enable()
1783 * - DCB says we can in nv50_sor_atomic_enable()
1784 * - Not an HDMI monitor, since there's no dual-link in nv50_sor_atomic_enable()
1787 if (mode->clock >= 165000 && in nv50_sor_atomic_enable()
1788 nv_encoder->dcb->duallink_possible && in nv50_sor_atomic_enable()
1789 !drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_sor_atomic_enable()
1798 if (bios->fp_no_ddc) { in nv50_sor_atomic_enable()
1799 lvds_dual = bios->fp.dual_link; in nv50_sor_atomic_enable()
1800 lvds_8bpc = bios->fp.if_is_24bit; in nv50_sor_atomic_enable()
1802 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { in nv50_sor_atomic_enable()
1803 if (((u8 *)nv_connector->edid)[121] == 2) in nv50_sor_atomic_enable()
1806 if (mode->clock >= bios->fp.duallink_transition_clk) { in nv50_sor_atomic_enable()
1811 if (bios->fp.strapless_is_24bit & 2) in nv50_sor_atomic_enable()
1814 if (bios->fp.strapless_is_24bit & 1) in nv50_sor_atomic_enable()
1818 if (asyh->or.bpc == 8) in nv50_sor_atomic_enable()
1822 nvif_outp_lvds(&nv_encoder->outp, lvds_dual, lvds_8bpc); in nv50_sor_atomic_enable()
1825 nouveau_dp_train(nv_encoder, false, mode->clock, asyh->or.bpc); in nv50_sor_atomic_enable()
1827 depth = nv50_dp_bpc_to_depth(asyh->or.bpc); in nv50_sor_atomic_enable()
1829 if (nv_encoder->outp.or.link & 1) in nv50_sor_atomic_enable()
1835 backlight = nv_connector->backlight; in nv50_sor_atomic_enable()
1836 if (backlight && backlight->uses_dpcd) in nv50_sor_atomic_enable()
1837 drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info, in nv50_sor_atomic_enable()
1838 (u16)backlight->dev->props.brightness); in nv50_sor_atomic_enable()
1847 if (head->func->display_id) in nv50_sor_atomic_enable()
1848 head->func->display_id(head, BIT(nv_encoder->outp.id)); in nv50_sor_atomic_enable()
1850 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth); in nv50_sor_atomic_enable()
1865 nv50_mstm_del(&nv_encoder->dp.mstm); in nv50_sor_destroy()
1868 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) in nv50_sor_destroy()
1869 mutex_destroy(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_destroy()
1871 nvif_outp_dtor(&nv_encoder->outp); in nv50_sor_destroy()
1883 struct drm_connector *connector = &nv_encoder->conn->base; in nv50_sor_create()
1885 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_sor_create()
1886 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_sor_create()
1888 struct dcb_output *dcbe = nv_encoder->dcb; in nv50_sor_create()
1889 struct nv50_disp *disp = nv50_disp(connector->dev); in nv50_sor_create()
1892 switch (dcbe->type) { in nv50_sor_create()
1901 nv_encoder->update = nv50_sor_update; in nv50_sor_create()
1904 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, in nv50_sor_create()
1905 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_sor_create()
1910 disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_sor_create()
1913 if (dcbe->type == DCB_OUTPUT_DP) { in nv50_sor_create()
1914 mutex_init(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_create()
1916 if (disp->disp->object.oclass < GF110_DISP) { in nv50_sor_create()
1917 /* HW has no support for address-only in nv50_sor_create()
1919 * use custom I2C-over-AUX code. in nv50_sor_create()
1923 aux = nvkm_i2c_aux_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1925 return -EINVAL; in nv50_sor_create()
1927 nv_encoder->i2c = &aux->i2c; in nv50_sor_create()
1929 nv_encoder->i2c = &nv_connector->aux.ddc; in nv50_sor_create()
1932 if (nv_connector->type != DCB_CONNECTOR_eDP && nv_encoder->outp.info.dp.mst) { in nv50_sor_create()
1933 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, in nv50_sor_create()
1934 16, nv_connector->base.base.id, in nv50_sor_create()
1935 &nv_encoder->dp.mstm); in nv50_sor_create()
1940 if (nv_encoder->outp.info.ddc != NVIF_OUTP_DDC_INVALID) { in nv50_sor_create()
1942 nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1944 nv_encoder->i2c = &bus->i2c; in nv50_sor_create()
1961 crtc_state->adjusted_mode.clock *= 2; in nv50_pior_atomic_check()
1969 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_disable()
1972 core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL); in nv50_pior_atomic_disable()
1973 nv_encoder->crtc = NULL; in nv50_pior_atomic_disable()
1982 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_pior_atomic_enable()
1983 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_enable()
1986 switch (nv_crtc->index) { in nv50_pior_atomic_enable()
1994 switch (asyh->or.bpc) { in nv50_pior_atomic_enable()
1995 case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break; in nv50_pior_atomic_enable()
1996 case 8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break; in nv50_pior_atomic_enable()
1997 case 6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break; in nv50_pior_atomic_enable()
1998 default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break; in nv50_pior_atomic_enable()
2001 if (!nvif_outp_acquired(&nv_encoder->outp)) in nv50_pior_atomic_enable()
2002 nvif_outp_acquire_pior(&nv_encoder->outp); in nv50_pior_atomic_enable()
2004 switch (nv_encoder->dcb->type) { in nv50_pior_atomic_enable()
2010 nouveau_dp_train(nv_encoder, false, asyh->state.adjusted_mode.clock, 6); in nv50_pior_atomic_enable()
2017 core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh); in nv50_pior_atomic_enable()
2018 nv_encoder->crtc = &nv_crtc->base; in nv50_pior_atomic_enable()
2033 nvif_outp_dtor(&nv_encoder->outp); in nv50_pior_destroy()
2037 mutex_destroy(&nv_encoder->dp.hpd_irq_lock); in nv50_pior_destroy()
2049 struct drm_connector *connector = &nv_encoder->conn->base; in nv50_pior_create()
2050 struct drm_device *dev = connector->dev; in nv50_pior_create()
2053 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_pior_create()
2058 struct dcb_output *dcbe = nv_encoder->dcb; in nv50_pior_create()
2061 switch (dcbe->type) { in nv50_pior_create()
2063 bus = nvkm_i2c_bus_find(i2c, nv_encoder->outp.info.ddc); in nv50_pior_create()
2064 ddc = bus ? &bus->i2c : NULL; in nv50_pior_create()
2068 aux = nvkm_i2c_aux_find(i2c, nv_encoder->outp.info.dp.aux); in nv50_pior_create()
2069 ddc = aux ? &aux->i2c : NULL; in nv50_pior_create()
2073 return -ENODEV; in nv50_pior_create()
2076 nv_encoder->i2c = ddc; in nv50_pior_create()
2078 mutex_init(&nv_encoder->dp.hpd_irq_lock); in nv50_pior_create()
2081 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, in nv50_pior_create()
2082 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_pior_create()
2087 disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_pior_create()
2102 struct nouveau_drm *drm = nouveau_drm(state->dev); in nv50_disp_atomic_commit_core()
2103 struct nv50_disp *disp = nv50_disp(drm->dev); in nv50_disp_atomic_commit_core()
2105 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_core()
2114 if (mstm->modified) in nv50_disp_atomic_commit_core()
2118 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY); in nv50_disp_atomic_commit_core()
2119 core->func->update(core, interlock, true); in nv50_disp_atomic_commit_core()
2120 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY, in nv50_disp_atomic_commit_core()
2121 disp->core->chan.base.device)) in nv50_disp_atomic_commit_core()
2126 if (mstm->modified) in nv50_disp_atomic_commit_core()
2130 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_core()
2131 if (outp->encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { in nv50_disp_atomic_commit_core()
2132 struct nouveau_encoder *nv_encoder = nouveau_encoder(outp->encoder); in nv50_disp_atomic_commit_core()
2134 if (outp->enabled) { in nv50_disp_atomic_commit_core()
2135 nv50_audio_enable(outp->encoder, nouveau_crtc(nv_encoder->crtc), in nv50_disp_atomic_commit_core()
2136 nv_encoder->conn, NULL, NULL); in nv50_disp_atomic_commit_core()
2137 outp->enabled = outp->disabled = false; in nv50_disp_atomic_commit_core()
2139 if (outp->disabled) { in nv50_disp_atomic_commit_core()
2140 nvif_outp_release(&nv_encoder->outp); in nv50_disp_atomic_commit_core()
2141 outp->disabled = false; in nv50_disp_atomic_commit_core()
2157 if (interlock[wndw->interlock.type] & wndw->interlock.data) { in nv50_disp_atomic_commit_wndw()
2158 if (wndw->func->update) in nv50_disp_atomic_commit_wndw()
2159 wndw->func->update(wndw, interlock); in nv50_disp_atomic_commit_wndw()
2167 struct drm_device *dev = state->dev; in nv50_disp_atomic_commit_tail()
2175 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_tail()
2181 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2189 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2190 mutex_lock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2197 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2198 asyh->clr.mask, asyh->set.mask); in nv50_disp_atomic_commit_tail()
2200 if (old_crtc_state->active && !new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2201 pm_runtime_put_noidle(dev->dev); in nv50_disp_atomic_commit_tail()
2205 if (asyh->clr.mask) { in nv50_disp_atomic_commit_tail()
2206 nv50_head_flush_clr(head, asyh, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2216 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2217 asyw->clr.mask, asyw->set.mask); in nv50_disp_atomic_commit_tail()
2218 if (!asyw->clr.mask) in nv50_disp_atomic_commit_tail()
2221 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); in nv50_disp_atomic_commit_tail()
2225 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2229 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2230 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2232 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2233 outp->clr.mask, outp->set.mask); in nv50_disp_atomic_commit_tail()
2235 if (outp->clr.mask) { in nv50_disp_atomic_commit_tail()
2236 help->atomic_disable(encoder, state); in nv50_disp_atomic_commit_tail()
2237 outp->disabled = true; in nv50_disp_atomic_commit_tail()
2244 if (atom->flush_disable) { in nv50_disp_atomic_commit_tail()
2257 /* Update output path(s). */ in nv50_disp_atomic_commit_tail()
2258 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2262 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2263 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2265 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2266 outp->set.mask, outp->clr.mask); in nv50_disp_atomic_commit_tail()
2268 if (outp->set.mask) { in nv50_disp_atomic_commit_tail()
2269 help->atomic_enable(encoder, state); in nv50_disp_atomic_commit_tail()
2270 outp->enabled = true; in nv50_disp_atomic_commit_tail()
2275 /* Update head(s). */ in nv50_disp_atomic_commit_tail()
2280 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2281 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2283 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2288 if (new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2289 if (!old_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2291 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit_tail()
2293 if (new_crtc_state->event) in nv50_disp_atomic_commit_tail()
2298 /* Update window->head assignment. in nv50_disp_atomic_commit_tail()
2300 * This has to happen in an update that's not interlocked with in nv50_disp_atomic_commit_tail()
2304 * supports non-fixed mappings). in nv50_disp_atomic_commit_tail()
2306 if (core->assign_windows) { in nv50_disp_atomic_commit_tail()
2307 core->func->wndw.owner(core); in nv50_disp_atomic_commit_tail()
2309 core->assign_windows = false; in nv50_disp_atomic_commit_tail()
2319 * different output format to what we do, and the core channel update in nv50_disp_atomic_commit_tail()
2322 * Delay some of the head update until after that point to workaround in nv50_disp_atomic_commit_tail()
2331 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2332 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2334 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2340 /* Update plane(s). */ in nv50_disp_atomic_commit_tail()
2345 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2346 asyw->set.mask, asyw->clr.mask); in nv50_disp_atomic_commit_tail()
2347 if ( !asyw->set.mask && in nv50_disp_atomic_commit_tail()
2348 (!asyw->clr.mask || atom->flush_disable)) in nv50_disp_atomic_commit_tail()
2354 /* Flush update. */ in nv50_disp_atomic_commit_tail()
2361 !atom->state.legacy_cursor_update) in nv50_disp_atomic_commit_tail()
2364 disp->core->func->update(disp->core, interlock, false); in nv50_disp_atomic_commit_tail()
2367 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2368 mutex_unlock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2370 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2371 list_del(&outp->head); in nv50_disp_atomic_commit_tail()
2381 NV_ERROR(drm, "%s: timeout\n", plane->name); in nv50_disp_atomic_commit_tail()
2385 if (new_crtc_state->event) { in nv50_disp_atomic_commit_tail()
2388 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2390 spin_lock_irqsave(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2391 drm_crtc_send_vblank_event(crtc, new_crtc_state->event); in nv50_disp_atomic_commit_tail()
2392 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2394 new_crtc_state->event = NULL; in nv50_disp_atomic_commit_tail()
2395 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2410 pm_runtime_mark_last_busy(dev->dev); in nv50_disp_atomic_commit_tail()
2411 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit_tail()
2430 ret = pm_runtime_get_sync(dev->dev); in nv50_disp_atomic_commit()
2431 if (ret < 0 && ret != -EACCES) { in nv50_disp_atomic_commit()
2432 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2440 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); in nv50_disp_atomic_commit()
2460 if (asyw->set.image) in nv50_disp_atomic_commit()
2470 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit()
2473 queue_work(system_unbound_wq, &state->commit_work); in nv50_disp_atomic_commit()
2481 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2490 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_outp_atomic_add()
2491 if (outp->encoder == encoder) in nv50_disp_outp_atomic_add()
2497 return ERR_PTR(-ENOMEM); in nv50_disp_outp_atomic_add()
2499 list_add(&outp->head, &atom->outp); in nv50_disp_outp_atomic_add()
2500 outp->encoder = encoder; in nv50_disp_outp_atomic_add()
2508 struct drm_encoder *encoder = old_connector_state->best_encoder; in nv50_disp_outp_atomic_check_clr()
2513 if (!(crtc = old_connector_state->crtc)) in nv50_disp_outp_atomic_check_clr()
2516 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2517 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2518 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_clr()
2523 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST || in nv50_disp_outp_atomic_check_clr()
2524 nouveau_encoder(outp->encoder)->dcb->type == DCB_OUTPUT_DP) in nv50_disp_outp_atomic_check_clr()
2525 atom->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2526 outp->clr.ctrl = true; in nv50_disp_outp_atomic_check_clr()
2527 atom->lock_core = true; in nv50_disp_outp_atomic_check_clr()
2537 struct drm_encoder *encoder = connector_state->best_encoder; in nv50_disp_outp_atomic_check_set()
2542 if (!(crtc = connector_state->crtc)) in nv50_disp_outp_atomic_check_set()
2545 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_set()
2546 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_set()
2551 outp->set.ctrl = true; in nv50_disp_outp_atomic_check_set()
2552 atom->lock_core = true; in nv50_disp_outp_atomic_check_set()
2562 struct nv50_core *core = nv50_disp(dev)->core; in nv50_disp_atomic_check()
2571 if (core->assign_windows && core->func->head->static_wndw_map) { in nv50_disp_atomic_check()
2580 core->func->head->static_wndw_map(head, asyh); in nv50_disp_atomic_check()
2584 /* We need to handle colour management on a per-plane basis. */ in nv50_disp_atomic_check()
2586 if (new_crtc_state->color_mgmt_changed) { in nv50_disp_atomic_check()
2622 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_state_clear()
2623 list_del(&outp->head); in nv50_disp_atomic_state_clear()
2634 drm_atomic_state_default_release(&atom->state); in nv50_disp_atomic_state_free()
2643 drm_atomic_state_init(dev, &atom->state) < 0) { in nv50_disp_atomic_state_alloc()
2647 INIT_LIST_HEAD(&atom->outp); in nv50_disp_atomic_state_alloc()
2648 return &atom->state; in nv50_disp_atomic_state_alloc()
2677 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_fini()
2678 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_display_fini()
2683 cancel_work_sync(&drm->hpd_work); in nv50_display_fini()
2694 const u32 encoder_mask = drm_encoder_mask(&outp->base.base); in nv50_display_read_hw_or_state()
2700 switch (outp->dcb->type) { in nv50_display_read_hw_or_state()
2702 ret = nvif_outp_inherit_tmds(&outp->outp, &proto); in nv50_display_read_hw_or_state()
2705 ret = nvif_outp_inherit_dp(&outp->outp, &proto); in nv50_display_read_hw_or_state()
2708 ret = nvif_outp_inherit_lvds(&outp->outp, &proto); in nv50_display_read_hw_or_state()
2711 ret = nvif_outp_inherit_rgb_crt(&outp->outp, &proto); in nv50_display_read_hw_or_state()
2715 outp->base.base.name); in nv50_display_read_hw_or_state()
2726 if (crtc->index != head_idx) in nv50_display_read_hw_or_state()
2729 armh = nv50_head_atom(crtc->state); in nv50_display_read_hw_or_state()
2739 if (nouveau_connector(conn)->index == outp->dcb->connector) { in nv50_display_read_hw_or_state()
2748 armh->state.encoder_mask = encoder_mask; in nv50_display_read_hw_or_state()
2749 armh->state.connector_mask = drm_connector_mask(conn); in nv50_display_read_hw_or_state()
2750 armh->state.active = true; in nv50_display_read_hw_or_state()
2751 armh->state.enable = true; in nv50_display_read_hw_or_state()
2752 pm_runtime_get_noresume(dev->dev); in nv50_display_read_hw_or_state()
2754 outp->crtc = crtc; in nv50_display_read_hw_or_state()
2755 outp->ctrl = NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto) | BIT(crtc->index); in nv50_display_read_hw_or_state()
2758 conn->state->crtc = crtc; in nv50_display_read_hw_or_state()
2759 conn->state->best_encoder = &outp->base.base; in nv50_display_read_hw_or_state()
2766 struct drm_device *dev = drm->dev; in nv50_display_read_hw_state()
2775 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) in nv50_display_read_hw_state()
2787 struct nv50_core *core = nv50_disp(dev)->core; in nv50_display_init()
2791 core->func->init(core); in nv50_display_init()
2793 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_init()
2794 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { in nv50_display_init()
2814 nvif_object_unmap(&disp->caps); in nv50_display_destroy()
2815 nvif_object_dtor(&disp->caps); in nv50_display_destroy()
2816 nv50_core_del(&disp->core); in nv50_display_destroy()
2818 nouveau_bo_unmap(disp->sync); in nv50_display_destroy()
2819 if (disp->sync) in nv50_display_destroy()
2820 nouveau_bo_unpin(disp->sync); in nv50_display_destroy()
2821 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_destroy()
2823 nouveau_display(dev)->priv = NULL; in nv50_display_destroy()
2838 return -ENOMEM; in nv50_display_create()
2840 mutex_init(&disp->mutex); in nv50_display_create()
2842 nouveau_display(dev)->priv = disp; in nv50_display_create()
2843 nouveau_display(dev)->dtor = nv50_display_destroy; in nv50_display_create()
2844 nouveau_display(dev)->init = nv50_display_init; in nv50_display_create()
2845 nouveau_display(dev)->fini = nv50_display_fini; in nv50_display_create()
2846 disp->disp = &nouveau_display(dev)->disp; in nv50_display_create()
2847 dev->mode_config.funcs = &nv50_disp_func; in nv50_display_create()
2848 dev->mode_config.helper_private = &nv50_disp_helper_func; in nv50_display_create()
2849 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true; in nv50_display_create()
2850 dev->mode_config.normalize_zpos = true; in nv50_display_create()
2853 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, in nv50_display_create()
2855 0, 0x0000, NULL, NULL, &disp->sync); in nv50_display_create()
2857 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true); in nv50_display_create()
2859 ret = nouveau_bo_map(disp->sync); in nv50_display_create()
2861 nouveau_bo_unpin(disp->sync); in nv50_display_create()
2864 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_create()
2871 ret = nv50_core_new(drm, &disp->core); in nv50_display_create()
2875 disp->core->func->init(disp->core); in nv50_display_create()
2876 if (disp->core->func->caps_init) { in nv50_display_create()
2877 ret = disp->core->func->caps_init(drm, disp); in nv50_display_create()
2883 if (disp->disp->object.oclass >= TU102_DISP) in nv50_display_create()
2884 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers; in nv50_display_create()
2886 if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI) in nv50_display_create()
2887 nouveau_display(dev)->format_modifiers = disp90xx_modifiers; in nv50_display_create()
2889 nouveau_display(dev)->format_modifiers = disp50xx_modifiers; in nv50_display_create()
2896 * But until then, just limit cursors to 128x128 - which is small enough to avoid ever using in nv50_display_create()
2899 if (disp->disp->object.oclass >= GM107_DISP) { in nv50_display_create()
2900 dev->mode_config.cursor_width = 256; in nv50_display_create()
2901 dev->mode_config.cursor_height = 256; in nv50_display_create()
2902 } else if (disp->disp->object.oclass >= GK104_DISP) { in nv50_display_create()
2903 dev->mode_config.cursor_width = 128; in nv50_display_create()
2904 dev->mode_config.cursor_height = 128; in nv50_display_create()
2906 dev->mode_config.cursor_width = 64; in nv50_display_create()
2907 dev->mode_config.cursor_height = 64; in nv50_display_create()
2911 for_each_set_bit(i, &disp->disp->outp_mask, sizeof(disp->disp->outp_mask) * 8) { in nv50_display_create()
2918 ret = nvif_outp_ctor(disp->disp, "kmsOutp", i, &outp->outp); in nv50_display_create()
2924 connector = nouveau_connector_create(dev, outp->outp.info.conn); in nv50_display_create()
2926 nvif_outp_dtor(&outp->outp); in nv50_display_create()
2931 outp->base.base.possible_crtcs = outp->outp.info.heads; in nv50_display_create()
2932 outp->base.base.possible_clones = 0; in nv50_display_create()
2933 outp->conn = nouveau_connector(connector); in nv50_display_create()
2935 outp->dcb = kzalloc(sizeof(*outp->dcb), GFP_KERNEL); in nv50_display_create()
2936 if (!outp->dcb) in nv50_display_create()
2939 switch (outp->outp.info.proto) { in nv50_display_create()
2941 outp->dcb->type = DCB_OUTPUT_ANALOG; in nv50_display_create()
2942 outp->dcb->crtconf.maxfreq = outp->outp.info.rgb_crt.freq_max; in nv50_display_create()
2945 outp->dcb->type = DCB_OUTPUT_TMDS; in nv50_display_create()
2946 outp->dcb->duallink_possible = outp->outp.info.tmds.dual; in nv50_display_create()
2949 outp->dcb->type = DCB_OUTPUT_LVDS; in nv50_display_create()
2950 outp->dcb->lvdsconf.use_acpi_for_edid = outp->outp.info.lvds.acpi_edid; in nv50_display_create()
2953 outp->dcb->type = DCB_OUTPUT_DP; in nv50_display_create()
2954 outp->dcb->dpconf.link_nr = outp->outp.info.dp.link_nr; in nv50_display_create()
2955 outp->dcb->dpconf.link_bw = outp->outp.info.dp.link_bw; in nv50_display_create()
2956 if (outp->outp.info.dp.mst) in nv50_display_create()
2964 outp->dcb->heads = outp->outp.info.heads; in nv50_display_create()
2965 outp->dcb->connector = outp->outp.info.conn; in nv50_display_create()
2966 outp->dcb->i2c_index = outp->outp.info.ddc; in nv50_display_create()
2968 switch (outp->outp.info.type) { in nv50_display_create()
2979 i, outp->outp.info.type, outp->outp.info.proto, ret); in nv50_display_create()
2984 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { in nv50_display_create()
2985 if (connector->possible_encoders) in nv50_display_create()
2989 connector->name); in nv50_display_create()
2990 connector->funcs->destroy(connector); in nv50_display_create()
2994 for_each_set_bit(i, &disp->disp->head_mask, sizeof(disp->disp->head_mask) * 8) { in nv50_display_create()
3004 head->msto = nv50_msto_new(dev, head, i); in nv50_display_create()
3005 if (IS_ERR(head->msto)) { in nv50_display_create()
3006 ret = PTR_ERR(head->msto); in nv50_display_create()
3007 head->msto = NULL; in nv50_display_create()
3021 head->msto->encoder.possible_crtcs = disp->disp->head_mask; in nv50_display_create()
3025 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */ in nv50_display_create()
3026 dev->vblank_disable_immediate = true; in nv50_display_create()
3041 * Log2(block height) ----------------------------+ *
3042 * Page Kind ----------------------------------+ | *
3043 * Gob Height/Page Kind Generation ------+ | | *
3044 * Sector layout -------+ | | | *
3045 * Compression ------+ | | | | */
3070 * Log2(block height) ----------------------------+ *
3071 * Page Kind ----------------------------------+ | *
3072 * Gob Height/Page Kind Generation ------+ | | *
3073 * Sector layout -------+ | | | *
3074 * Compression ------+ | | | | */