xref: /linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/disp.c (revision 7b1166dee847d5018c1f3cc781218e806078f752)
1 /*
2  * Copyright 2023 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  */
22 #include <engine/disp/priv.h>
23 #include <engine/disp/chan.h>
24 #include <engine/disp/conn.h>
25 #include <engine/disp/dp.h>
26 #include <engine/disp/head.h>
27 #include <engine/disp/ior.h>
28 #include <engine/disp/outp.h>
29 
30 #include <core/ramht.h>
31 #include <subdev/bios.h>
32 #include <subdev/bios/conn.h>
33 #include <subdev/gsp.h>
34 #include <subdev/mmu.h>
35 #include <subdev/vfn.h>
36 
37 #include <rm/gpu.h>
38 
39 #include <nvhw/drf.h>
40 
41 #include "nvrm/disp.h"
42 
43 #include <linux/acpi.h>
44 
45 static u64
r535_chan_user(struct nvkm_disp_chan * chan,u64 * psize)46 r535_chan_user(struct nvkm_disp_chan *chan, u64 *psize)
47 {
48 	switch (chan->object.oclass & 0xff) {
49 	case 0x7d: *psize = 0x10000; return 0x680000;
50 	case 0x7e: *psize = 0x01000; return 0x690000 + (chan->head * *psize);
51 	case 0x7b: *psize = 0x01000; return 0x6b0000 + (chan->head * *psize);
52 	case 0x7a: *psize = 0x01000; return 0x6d8000 + (chan->head * *psize);
53 	default:
54 		BUG_ON(1);
55 		break;
56 	}
57 
58 	return 0ULL;
59 }
60 
61 static void
r535_chan_intr(struct nvkm_disp_chan * chan,bool en)62 r535_chan_intr(struct nvkm_disp_chan *chan, bool en)
63 {
64 }
65 
66 static void
r535_chan_fini(struct nvkm_disp_chan * chan)67 r535_chan_fini(struct nvkm_disp_chan *chan)
68 {
69 	nvkm_gsp_rm_free(&chan->rm.object);
70 }
71 
72 static int
r535_disp_chan_set_pushbuf(struct nvkm_disp * disp,s32 oclass,int inst,struct nvkm_memory * memory)73 r535_disp_chan_set_pushbuf(struct nvkm_disp *disp, s32 oclass, int inst, struct nvkm_memory *memory)
74 {
75 	struct nvkm_gsp *gsp = disp->rm.objcom.client->gsp;
76 	NV2080_CTRL_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER_PARAMS *ctrl;
77 
78 	ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
79 				    NV2080_CTRL_CMD_INTERNAL_DISPLAY_CHANNEL_PUSHBUFFER,
80 				    sizeof(*ctrl));
81 	if (IS_ERR(ctrl))
82 		return PTR_ERR(ctrl);
83 
84 	if (memory) {
85 		switch (nvkm_memory_target(memory)) {
86 		case NVKM_MEM_TARGET_NCOH:
87 			ctrl->addressSpace = ADDR_SYSMEM;
88 			ctrl->cacheSnoop = 0;
89 			break;
90 		case NVKM_MEM_TARGET_HOST:
91 			ctrl->addressSpace = ADDR_SYSMEM;
92 			ctrl->cacheSnoop = 1;
93 			break;
94 		case NVKM_MEM_TARGET_VRAM:
95 			ctrl->addressSpace = ADDR_FBMEM;
96 			break;
97 		default:
98 			WARN_ON(1);
99 			return -EINVAL;
100 		}
101 
102 		ctrl->physicalAddr = nvkm_memory_addr(memory);
103 		ctrl->limit = nvkm_memory_size(memory) - 1;
104 	}
105 
106 	ctrl->hclass = oclass;
107 	ctrl->channelInstance = inst;
108 	ctrl->valid = ((oclass & 0xff) != 0x7a) ? 1 : 0;
109 
110 	return nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
111 }
112 
113 static int
r535_curs_init(struct nvkm_disp_chan * chan)114 r535_curs_init(struct nvkm_disp_chan *chan)
115 {
116 	const struct nvkm_rm_api *rmapi = chan->disp->rm.objcom.client->gsp->rm->api;
117 	NV50VAIO_CHANNELPIO_ALLOCATION_PARAMETERS *args;
118 	int ret;
119 
120 	ret = rmapi->disp->chan.set_pushbuf(chan->disp, chan->object.oclass, chan->head, NULL);
121 	if (ret)
122 		return ret;
123 
124 	args = nvkm_gsp_rm_alloc_get(&chan->disp->rm.object,
125 				     (chan->object.oclass << 16) | chan->head,
126 				     chan->object.oclass, sizeof(*args), &chan->rm.object);
127 	if (IS_ERR(args))
128 		return PTR_ERR(args);
129 
130 	args->channelInstance = chan->head;
131 
132 	return nvkm_gsp_rm_alloc_wr(&chan->rm.object, args);
133 }
134 
135 static const struct nvkm_disp_chan_func
136 r535_curs_func = {
137 	.init = r535_curs_init,
138 	.fini = r535_chan_fini,
139 	.intr = r535_chan_intr,
140 	.user = r535_chan_user,
141 };
142 
143 static const struct nvkm_disp_chan_user
144 r535_curs = {
145 	.func = &r535_curs_func,
146 	.user = 73,
147 };
148 
149 static int
r535_dmac_bind(struct nvkm_disp_chan * chan,struct nvkm_object * object,u32 handle)150 r535_dmac_bind(struct nvkm_disp_chan *chan, struct nvkm_object *object, u32 handle)
151 {
152 	return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle,
153 				 chan->chid.user << 25 |
154 				 (chan->disp->rm.client.object.handle & 0x3fff));
155 }
156 
157 static void
r535_dmac_fini(struct nvkm_disp_chan * chan)158 r535_dmac_fini(struct nvkm_disp_chan *chan)
159 {
160 	struct nvkm_device *device = chan->disp->engine.subdev.device;
161 	const u32 uoff = (chan->chid.user - 1) * 0x1000;
162 
163 	chan->suspend_put = nvkm_rd32(device, 0x690000 + uoff);
164 	r535_chan_fini(chan);
165 }
166 
167 static int
r535_dmac_alloc(struct nvkm_disp * disp,u32 oclass,int inst,u32 put_offset,struct nvkm_gsp_object * dmac)168 r535_dmac_alloc(struct nvkm_disp *disp, u32 oclass, int inst, u32 put_offset,
169 		struct nvkm_gsp_object *dmac)
170 {
171 	NV50VAIO_CHANNELDMA_ALLOCATION_PARAMETERS *args;
172 
173 	args = nvkm_gsp_rm_alloc_get(&disp->rm.object, (oclass << 16) | inst, oclass,
174 				     sizeof(*args), dmac);
175 	if (IS_ERR(args))
176 		return PTR_ERR(args);
177 
178 	args->channelInstance = inst;
179 	args->offset = put_offset;
180 
181 	return nvkm_gsp_rm_alloc_wr(dmac, args);
182 }
183 
184 static int
r535_dmac_init(struct nvkm_disp_chan * chan)185 r535_dmac_init(struct nvkm_disp_chan *chan)
186 {
187 	const struct nvkm_rm_api *rmapi = chan->disp->rm.objcom.client->gsp->rm->api;
188 	int ret;
189 
190 	ret = rmapi->disp->chan.set_pushbuf(chan->disp, chan->object.oclass, chan->head, chan->memory);
191 	if (ret)
192 		return ret;
193 
194 	return rmapi->disp->chan.dmac_alloc(chan->disp, chan->object.oclass, chan->head,
195 					    chan->suspend_put, &chan->rm.object);
196 }
197 
198 static int
r535_dmac_push(struct nvkm_disp_chan * chan,u64 memory)199 r535_dmac_push(struct nvkm_disp_chan *chan, u64 memory)
200 {
201 	chan->memory = nvkm_umem_search(chan->object.client, memory);
202 	if (IS_ERR(chan->memory))
203 		return PTR_ERR(chan->memory);
204 
205 	return 0;
206 }
207 
208 static const struct nvkm_disp_chan_func
209 r535_dmac_func = {
210 	.push = r535_dmac_push,
211 	.init = r535_dmac_init,
212 	.fini = r535_dmac_fini,
213 	.intr = r535_chan_intr,
214 	.user = r535_chan_user,
215 	.bind = r535_dmac_bind,
216 };
217 
218 static const struct nvkm_disp_chan_func
219 r535_wimm_func = {
220 	.push = r535_dmac_push,
221 	.init = r535_dmac_init,
222 	.fini = r535_dmac_fini,
223 	.intr = r535_chan_intr,
224 	.user = r535_chan_user,
225 };
226 
227 static const struct nvkm_disp_chan_user
228 r535_wimm = {
229 	.func = &r535_wimm_func,
230 	.user = 33,
231 };
232 
233 static const struct nvkm_disp_chan_user
234 r535_wndw = {
235 	.func = &r535_dmac_func,
236 	.user = 1,
237 };
238 
239 static void
r535_core_fini(struct nvkm_disp_chan * chan)240 r535_core_fini(struct nvkm_disp_chan *chan)
241 {
242 	struct nvkm_device *device = chan->disp->engine.subdev.device;
243 
244 	chan->suspend_put = nvkm_rd32(device, 0x680000);
245 	r535_chan_fini(chan);
246 }
247 
248 static const struct nvkm_disp_chan_func
249 r535_core_func = {
250 	.push = r535_dmac_push,
251 	.init = r535_dmac_init,
252 	.fini = r535_core_fini,
253 	.intr = r535_chan_intr,
254 	.user = r535_chan_user,
255 	.bind = r535_dmac_bind,
256 };
257 
258 static const struct nvkm_disp_chan_user
259 r535_core = {
260 	.func = &r535_core_func,
261 	.user = 0,
262 };
263 
264 static int
r535_bl_ctrl(struct nvkm_disp * disp,unsigned display_id,bool set,int * pval)265 r535_bl_ctrl(struct nvkm_disp *disp, unsigned display_id, bool set, int *pval)
266 {
267 	u32 cmd = set ? NV0073_CTRL_CMD_SPECIFIC_SET_BACKLIGHT_BRIGHTNESS :
268 			NV0073_CTRL_CMD_SPECIFIC_GET_BACKLIGHT_BRIGHTNESS;
269 	NV0073_CTRL_SPECIFIC_BACKLIGHT_BRIGHTNESS_PARAMS *ctrl;
270 	int ret;
271 
272 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, cmd, sizeof(*ctrl));
273 	if (IS_ERR(ctrl))
274 		return PTR_ERR(ctrl);
275 
276 	ctrl->displayId = BIT(display_id);
277 	ctrl->brightness = *pval;
278 
279 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
280 	if (ret)
281 		return ret;
282 
283 	*pval = ctrl->brightness;
284 
285 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
286 	return 0;
287 }
288 
289 static int
r535_sor_bl_set(struct nvkm_ior * sor,int lvl)290 r535_sor_bl_set(struct nvkm_ior *sor, int lvl)
291 {
292 	struct nvkm_disp *disp = sor->disp;
293 	const struct nvkm_rm_api *rmapi = disp->engine.subdev.device->gsp->rm->api;
294 
295 	return rmapi->disp->bl_ctrl(disp, sor->asy.outp->index, true, &lvl);
296 }
297 
298 static int
r535_sor_bl_get(struct nvkm_ior * sor)299 r535_sor_bl_get(struct nvkm_ior *sor)
300 {
301 	struct nvkm_disp *disp = sor->disp;
302 	const struct nvkm_rm_api *rmapi = disp->engine.subdev.device->gsp->rm->api;
303 	int lvl, ret = rmapi->disp->bl_ctrl(disp, sor->asy.outp->index, false, &lvl);
304 
305 	return (ret == 0) ? lvl : ret;
306 }
307 
308 static const struct nvkm_ior_func_bl
309 r535_sor_bl = {
310 	.get = r535_sor_bl_get,
311 	.set = r535_sor_bl_set,
312 };
313 
314 static void
r535_sor_hda_eld(struct nvkm_ior * sor,int head,u8 * data,u8 size)315 r535_sor_hda_eld(struct nvkm_ior *sor, int head, u8 *data, u8 size)
316 {
317 	struct nvkm_disp *disp = sor->disp;
318 	NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
319 
320 	if (WARN_ON(size > sizeof(ctrl->bufferELD)))
321 		return;
322 
323 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
324 				    NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
325 	if (WARN_ON(IS_ERR(ctrl)))
326 		return;
327 
328 	ctrl->displayId = BIT(sor->asy.outp->index);
329 	ctrl->numELDSize = size;
330 	memcpy(ctrl->bufferELD, data, size);
331 	ctrl->maxFreqSupported = 0; //XXX
332 	ctrl->ctrl  = NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, PD, TRUE);
333 	ctrl->ctrl |= NVDEF(NV0073, CTRL_DFP_ELD_AUDIO_CAPS_CTRL, ELDV, TRUE);
334 	ctrl->deviceEntry = head;
335 
336 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
337 }
338 
339 static void
r535_sor_hda_hpd(struct nvkm_ior * sor,int head,bool present)340 r535_sor_hda_hpd(struct nvkm_ior *sor, int head, bool present)
341 {
342 	struct nvkm_disp *disp = sor->disp;
343 	NV0073_CTRL_DFP_SET_ELD_AUDIO_CAP_PARAMS *ctrl;
344 
345 	if (present)
346 		return;
347 
348 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
349 				    NV0073_CTRL_CMD_DFP_SET_ELD_AUDIO_CAPS, sizeof(*ctrl));
350 	if (WARN_ON(IS_ERR(ctrl)))
351 		return;
352 
353 	ctrl->displayId = BIT(sor->asy.outp->index);
354 	ctrl->deviceEntry = head;
355 
356 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
357 }
358 
359 static const struct nvkm_ior_func_hda
360 r535_sor_hda = {
361 	.hpd = r535_sor_hda_hpd,
362 	.eld = r535_sor_hda_eld,
363 };
364 
365 static void
r535_sor_dp_audio_mute(struct nvkm_ior * sor,bool mute)366 r535_sor_dp_audio_mute(struct nvkm_ior *sor, bool mute)
367 {
368 	struct nvkm_disp *disp = sor->disp;
369 	NV0073_CTRL_DP_SET_AUDIO_MUTESTREAM_PARAMS *ctrl;
370 
371 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
372 				    NV0073_CTRL_CMD_DP_SET_AUDIO_MUTESTREAM, sizeof(*ctrl));
373 	if (WARN_ON(IS_ERR(ctrl)))
374 		return;
375 
376 	ctrl->displayId = BIT(sor->asy.outp->index);
377 	ctrl->mute = mute;
378 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
379 }
380 
381 static void
r535_sor_dp_audio(struct nvkm_ior * sor,int head,bool enable)382 r535_sor_dp_audio(struct nvkm_ior *sor, int head, bool enable)
383 {
384 	struct nvkm_disp *disp = sor->disp;
385 	NV0073_CTRL_DFP_SET_AUDIO_ENABLE_PARAMS *ctrl;
386 
387 	if (!enable)
388 		r535_sor_dp_audio_mute(sor, true);
389 
390 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
391 				    NV0073_CTRL_CMD_DFP_SET_AUDIO_ENABLE, sizeof(*ctrl));
392 	if (WARN_ON(IS_ERR(ctrl)))
393 		return;
394 
395 	ctrl->displayId = BIT(sor->asy.outp->index);
396 	ctrl->enable = enable;
397 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
398 
399 	if (enable)
400 		r535_sor_dp_audio_mute(sor, false);
401 }
402 
403 static void
r535_sor_dp_vcpi(struct nvkm_ior * sor,int head,u8 slot,u8 slot_nr,u16 pbn,u16 aligned_pbn)404 r535_sor_dp_vcpi(struct nvkm_ior *sor, int head, u8 slot, u8 slot_nr, u16 pbn, u16 aligned_pbn)
405 {
406 	struct nvkm_disp *disp = sor->disp;
407 	struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
408 
409 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
410 				    NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
411 	if (WARN_ON(IS_ERR(ctrl)))
412 		return;
413 
414 	ctrl->subDeviceInstance = 0;
415 	ctrl->head = head;
416 	ctrl->sorIndex = sor->id;
417 	ctrl->dpLink = sor->asy.link == 2;
418 	ctrl->bEnableOverride = 1;
419 	ctrl->bMST = 1;
420 	ctrl->hBlankSym = 0;
421 	ctrl->vBlankSym = 0;
422 	ctrl->colorFormat = 0;
423 	ctrl->bEnableTwoHeadOneOr = 0;
424 	ctrl->singleHeadMultistreamMode = 0;
425 	ctrl->MST.slotStart = slot;
426 	ctrl->MST.slotEnd = slot + slot_nr - 1;
427 	ctrl->MST.PBN = pbn;
428 	ctrl->MST.Timeslice = aligned_pbn;
429 	ctrl->MST.sendACT = 0;
430 	ctrl->MST.singleHeadMSTPipeline = 0;
431 	ctrl->MST.bEnableAudioOverRightPanel = 0;
432 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
433 }
434 
435 static int
r535_sor_dp_sst(struct nvkm_ior * sor,int head,bool ef,u32 watermark,u32 hblanksym,u32 vblanksym)436 r535_sor_dp_sst(struct nvkm_ior *sor, int head, bool ef,
437 		u32 watermark, u32 hblanksym, u32 vblanksym)
438 {
439 	struct nvkm_disp *disp = sor->disp;
440 	struct NV0073_CTRL_CMD_DP_CONFIG_STREAM_PARAMS *ctrl;
441 
442 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
443 				    NV0073_CTRL_CMD_DP_CONFIG_STREAM, sizeof(*ctrl));
444 	if (IS_ERR(ctrl))
445 		return PTR_ERR(ctrl);
446 
447 	ctrl->subDeviceInstance = 0;
448 	ctrl->head = head;
449 	ctrl->sorIndex = sor->id;
450 	ctrl->dpLink = sor->asy.link == 2;
451 	ctrl->bEnableOverride = 1;
452 	ctrl->bMST = 0;
453 	ctrl->hBlankSym = hblanksym;
454 	ctrl->vBlankSym = vblanksym;
455 	ctrl->colorFormat = 0;
456 	ctrl->bEnableTwoHeadOneOr = 0;
457 	ctrl->SST.bEnhancedFraming = ef;
458 	ctrl->SST.tuSize = 64;
459 	ctrl->SST.waterMark = watermark;
460 	ctrl->SST.bEnableAudioOverRightPanel = 0;
461 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
462 }
463 
464 static const struct nvkm_ior_func_dp
465 r535_sor_dp = {
466 	.sst = r535_sor_dp_sst,
467 	.vcpi = r535_sor_dp_vcpi,
468 	.audio = r535_sor_dp_audio,
469 };
470 
471 static void
r535_sor_hdmi_scdc(struct nvkm_ior * sor,u32 khz,bool support,bool scrambling,bool scrambling_low_rates)472 r535_sor_hdmi_scdc(struct nvkm_ior *sor, u32 khz, bool support, bool scrambling,
473 		   bool scrambling_low_rates)
474 {
475 	struct nvkm_outp *outp = sor->asy.outp;
476 	struct nvkm_disp *disp = outp->disp;
477 	NV0073_CTRL_SPECIFIC_SET_HDMI_SINK_CAPS_PARAMS *ctrl;
478 
479 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
480 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_SINK_CAPS, sizeof(*ctrl));
481 	if (WARN_ON(IS_ERR(ctrl)))
482 		return;
483 
484 	ctrl->displayId = BIT(outp->index);
485 	ctrl->caps = 0;
486 	if (support)
487 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, SCDC_SUPPORTED, TRUE);
488 	if (scrambling)
489 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, GT_340MHZ_CLOCK_SUPPORTED, TRUE);
490 	if (scrambling_low_rates)
491 		ctrl->caps |= NVDEF(NV0073_CTRL_CMD_SPECIFIC, SET_HDMI_SINK_CAPS, LTE_340MHZ_SCRAMBLING_SUPPORTED, TRUE);
492 
493 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
494 }
495 
496 static void
r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp * outp,bool mute)497 r535_sor_hdmi_ctrl_audio_mute(struct nvkm_outp *outp, bool mute)
498 {
499 	struct nvkm_disp *disp = outp->disp;
500 	NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM_PARAMS *ctrl;
501 
502 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
503 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_AUDIO_MUTESTREAM, sizeof(*ctrl));
504 	if (WARN_ON(IS_ERR(ctrl)))
505 		return;
506 
507 	ctrl->displayId = BIT(outp->index);
508 	ctrl->mute = mute;
509 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
510 }
511 
512 static void
r535_sor_hdmi_ctrl_audio(struct nvkm_outp * outp,bool enable)513 r535_sor_hdmi_ctrl_audio(struct nvkm_outp *outp, bool enable)
514 {
515 	struct nvkm_disp *disp = outp->disp;
516 	NV0073_CTRL_SPECIFIC_SET_OD_PACKET_PARAMS *ctrl;
517 
518 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
519 				    NV0073_CTRL_CMD_SPECIFIC_SET_OD_PACKET, sizeof(*ctrl));
520 	if (WARN_ON(IS_ERR(ctrl)))
521 		return;
522 
523 	ctrl->displayId = BIT(outp->index);
524 	ctrl->transmitControl =
525 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ENABLE, YES) |
526 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, OTHER_FRAME, DISABLE) |
527 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, SINGLE_FRAME, DISABLE) |
528 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, ON_HBLANK, DISABLE) |
529 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, VIDEO_FMT, SW_CONTROLLED) |
530 		NVDEF(NV0073_CTRL_SPECIFIC, SET_OD_PACKET_TRANSMIT_CONTROL, RESERVED_LEGACY_MODE, NO);
531 	ctrl->packetSize = 10;
532 	ctrl->aPacket[0] = 0x03;
533 	ctrl->aPacket[1] = 0x00;
534 	ctrl->aPacket[2] = 0x00;
535 	ctrl->aPacket[3] = enable ? 0x10 : 0x01;
536 	ctrl->aPacket[4] = 0x00;
537 	ctrl->aPacket[5] = 0x00;
538 	ctrl->aPacket[6] = 0x00;
539 	ctrl->aPacket[7] = 0x00;
540 	ctrl->aPacket[8] = 0x00;
541 	ctrl->aPacket[9] = 0x00;
542 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
543 }
544 
545 static void
r535_sor_hdmi_audio(struct nvkm_ior * sor,int head,bool enable)546 r535_sor_hdmi_audio(struct nvkm_ior *sor, int head, bool enable)
547 {
548 	struct nvkm_device *device = sor->disp->engine.subdev.device;
549 	const u32 hdmi = head * 0x400;
550 
551 	r535_sor_hdmi_ctrl_audio(sor->asy.outp, enable);
552 	r535_sor_hdmi_ctrl_audio_mute(sor->asy.outp, !enable);
553 
554 	/* General Control (GCP). */
555 	nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000000);
556 	nvkm_wr32(device, 0x6f00cc + hdmi, !enable ? 0x00000001 : 0x00000010);
557 	nvkm_mask(device, 0x6f00c0 + hdmi, 0x00000001, 0x00000001);
558 }
559 
560 static void
r535_sor_hdmi_ctrl(struct nvkm_ior * sor,int head,bool enable,u8 max_ac_packet,u8 rekey)561 r535_sor_hdmi_ctrl(struct nvkm_ior *sor, int head, bool enable, u8 max_ac_packet, u8 rekey)
562 {
563 	struct nvkm_disp *disp = sor->disp;
564 	NV0073_CTRL_SPECIFIC_SET_HDMI_ENABLE_PARAMS *ctrl;
565 
566 	if (!enable)
567 		return;
568 
569 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
570 				    NV0073_CTRL_CMD_SPECIFIC_SET_HDMI_ENABLE, sizeof(*ctrl));
571 	if (WARN_ON(IS_ERR(ctrl)))
572 		return;
573 
574 	ctrl->displayId = BIT(sor->asy.outp->index);
575 	ctrl->enable = enable;
576 
577 	WARN_ON(nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl));
578 }
579 
580 static const struct nvkm_ior_func_hdmi
581 r535_sor_hdmi = {
582 	.ctrl = r535_sor_hdmi_ctrl,
583 	.scdc = r535_sor_hdmi_scdc,
584 	/*TODO: SF_USER -> KMS. */
585 	.infoframe_avi = gv100_sor_hdmi_infoframe_avi,
586 	.infoframe_vsi = gv100_sor_hdmi_infoframe_vsi,
587 	.audio = r535_sor_hdmi_audio,
588 };
589 
590 static const struct nvkm_ior_func
591 r535_sor = {
592 	.hdmi = &r535_sor_hdmi,
593 	.dp = &r535_sor_dp,
594 	.hda = &r535_sor_hda,
595 	.bl = &r535_sor_bl,
596 };
597 
598 static int
r535_sor_new(struct nvkm_disp * disp,int id)599 r535_sor_new(struct nvkm_disp *disp, int id)
600 {
601 	return nvkm_ior_new_(&r535_sor, disp, SOR, id, true/*XXX: hda cap*/);
602 }
603 
604 static int
r535_sor_cnt(struct nvkm_disp * disp,unsigned long * pmask)605 r535_sor_cnt(struct nvkm_disp *disp, unsigned long *pmask)
606 {
607 	*pmask = 0xf;
608 	return 4;
609 }
610 
611 static void
r535_head_vblank_put(struct nvkm_head * head)612 r535_head_vblank_put(struct nvkm_head *head)
613 {
614 	struct nvkm_device *device = head->disp->engine.subdev.device;
615 
616 	nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000000);
617 }
618 
619 static void
r535_head_vblank_get(struct nvkm_head * head)620 r535_head_vblank_get(struct nvkm_head *head)
621 {
622 	struct nvkm_device *device = head->disp->engine.subdev.device;
623 
624 	nvkm_wr32(device, 0x611800 + (head->id * 4), 0x00000002);
625 	nvkm_mask(device, 0x611d80 + (head->id * 4), 0x00000002, 0x00000002);
626 }
627 
628 static void
r535_head_state(struct nvkm_head * head,struct nvkm_head_state * state)629 r535_head_state(struct nvkm_head *head, struct nvkm_head_state *state)
630 {
631 }
632 
633 static const struct nvkm_head_func
634 r535_head = {
635 	.state = r535_head_state,
636 	.vblank_get = r535_head_vblank_get,
637 	.vblank_put = r535_head_vblank_put,
638 };
639 
640 static struct nvkm_conn *
r535_conn_new(struct nvkm_disp * disp,u32 id)641 r535_conn_new(struct nvkm_disp *disp, u32 id)
642 {
643 	NV0073_CTRL_SPECIFIC_GET_CONNECTOR_DATA_PARAMS *ctrl;
644 	struct nvbios_connE dcbE = {};
645 	struct nvkm_conn *conn;
646 	int ret, index;
647 
648 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
649 				    NV0073_CTRL_CMD_SPECIFIC_GET_CONNECTOR_DATA, sizeof(*ctrl));
650 	if (IS_ERR(ctrl))
651 		return ERR_CAST(ctrl);
652 
653 	ctrl->subDeviceInstance = 0;
654 	ctrl->displayId = BIT(id);
655 
656 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
657 	if (ret) {
658 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
659 		return ERR_PTR(ret);
660 	}
661 
662 	list_for_each_entry(conn, &disp->conns, head) {
663 		if (conn->index == ctrl->data[0].index) {
664 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
665 			return conn;
666 		}
667 	}
668 
669 	dcbE.type = ctrl->data[0].type;
670 	index = ctrl->data[0].index;
671 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
672 
673 	ret = nvkm_conn_new(disp, index, &dcbE, &conn);
674 	if (ret)
675 		return ERR_PTR(ret);
676 
677 	list_add_tail(&conn->head, &disp->conns);
678 	return conn;
679 }
680 
681 static void
r535_outp_release(struct nvkm_outp * outp)682 r535_outp_release(struct nvkm_outp *outp)
683 {
684 	outp->disp->rm.assigned_sors &= ~BIT(outp->ior->id);
685 	outp->ior->asy.outp = NULL;
686 	outp->ior = NULL;
687 }
688 
689 static int
r535_outp_acquire(struct nvkm_outp * outp,bool hda)690 r535_outp_acquire(struct nvkm_outp *outp, bool hda)
691 {
692 	struct nvkm_disp *disp = outp->disp;
693 	struct nvkm_ior *ior;
694 	NV0073_CTRL_DFP_ASSIGN_SOR_PARAMS *ctrl;
695 	int ret, or;
696 
697 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
698 				    NV0073_CTRL_CMD_DFP_ASSIGN_SOR, sizeof(*ctrl));
699 	if (IS_ERR(ctrl))
700 		return PTR_ERR(ctrl);
701 
702 	ctrl->subDeviceInstance = 0;
703 	ctrl->displayId = BIT(outp->index);
704 	ctrl->sorExcludeMask = disp->rm.assigned_sors;
705 	if (hda)
706 		ctrl->flags |= NVDEF(NV0073_CTRL, DFP_ASSIGN_SOR_FLAGS, AUDIO, OPTIMAL);
707 
708 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
709 	if (ret) {
710 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
711 		return ret;
712 	}
713 
714 	for (or = 0; or < ARRAY_SIZE(ctrl->sorAssignListWithTag); or++) {
715 		if (ctrl->sorAssignListWithTag[or].displayMask & BIT(outp->index)) {
716 			disp->rm.assigned_sors |= BIT(or);
717 			break;
718 		}
719 	}
720 
721 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
722 
723 	if (WARN_ON(or == ARRAY_SIZE(ctrl->sorAssignListWithTag)))
724 		return -EINVAL;
725 
726 	ior = nvkm_ior_find(disp, SOR, or);
727 	if (WARN_ON(!ior))
728 		return -EINVAL;
729 
730 	nvkm_outp_acquire_ior(outp, NVKM_OUTP_USER, ior);
731 	return 0;
732 }
733 
734 static int
r535_disp_get_active(struct nvkm_disp * disp,unsigned head,u32 * displayid)735 r535_disp_get_active(struct nvkm_disp *disp, unsigned head, u32 *displayid)
736 {
737 	NV0073_CTRL_SYSTEM_GET_ACTIVE_PARAMS *ctrl;
738 	int ret;
739 
740 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
741 				    NV0073_CTRL_CMD_SYSTEM_GET_ACTIVE, sizeof(*ctrl));
742 	if (IS_ERR(ctrl))
743 		return PTR_ERR(ctrl);
744 
745 	ctrl->subDeviceInstance = 0;
746 	ctrl->head = head;
747 
748 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
749 	if (ret) {
750 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
751 		return ret;
752 	}
753 
754 	*displayid = ctrl->displayId;
755 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
756 	return 0;
757 }
758 
759 static struct nvkm_ior *
r535_outp_inherit(struct nvkm_outp * outp)760 r535_outp_inherit(struct nvkm_outp *outp)
761 {
762 	struct nvkm_disp *disp = outp->disp;
763 	struct nvkm_head *head;
764 	u32 displayid;
765 	int ret;
766 
767 	list_for_each_entry(head, &disp->heads, head) {
768 		const struct nvkm_rm_api *rmapi = disp->rm.objcom.client->gsp->rm->api;
769 
770 		ret = rmapi->disp->get_active(disp, head->id, &displayid);
771 		if (WARN_ON(ret))
772 			return NULL;
773 
774 		if (displayid == BIT(outp->index)) {
775 			NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
776 			u32 id, proto;
777 			struct nvkm_ior *ior;
778 
779 			ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
780 						    NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO,
781 						    sizeof(*ctrl));
782 			if (IS_ERR(ctrl))
783 				return NULL;
784 
785 			ctrl->subDeviceInstance = 0;
786 			ctrl->displayId = displayid;
787 
788 			ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
789 			if (ret) {
790 				nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
791 				return NULL;
792 			}
793 
794 			id = ctrl->index;
795 			proto = ctrl->protocol;
796 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
797 
798 			ior = nvkm_ior_find(disp, SOR, id);
799 			if (WARN_ON(!ior))
800 				return NULL;
801 
802 			switch (proto) {
803 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
804 				ior->arm.proto = TMDS;
805 				ior->arm.link = 1;
806 				break;
807 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
808 				ior->arm.proto = TMDS;
809 				ior->arm.link = 2;
810 				break;
811 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
812 				ior->arm.proto = TMDS;
813 				ior->arm.link = 3;
814 				break;
815 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
816 				ior->arm.proto = DP;
817 				ior->arm.link = 1;
818 				break;
819 			case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
820 				ior->arm.proto = DP;
821 				ior->arm.link = 2;
822 				break;
823 			default:
824 				WARN_ON(1);
825 				return NULL;
826 			}
827 
828 			ior->arm.proto_evo = proto;
829 			ior->arm.head = BIT(head->id);
830 			disp->rm.assigned_sors |= BIT(ior->id);
831 			return ior;
832 		}
833 	}
834 
835 	return NULL;
836 }
837 
838 static int
r535_outp_dfp_get_info(struct nvkm_outp * outp)839 r535_outp_dfp_get_info(struct nvkm_outp *outp)
840 {
841 	NV0073_CTRL_DFP_GET_INFO_PARAMS *ctrl;
842 	struct nvkm_disp *disp = outp->disp;
843 	int ret;
844 
845 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DFP_GET_INFO, sizeof(*ctrl));
846 	if (IS_ERR(ctrl))
847 		return PTR_ERR(ctrl);
848 
849 	ctrl->displayId = BIT(outp->index);
850 
851 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
852 	if (ret) {
853 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
854 		return ret;
855 	}
856 
857 	nvkm_debug(&disp->engine.subdev, "DFP %08x: flags:%08x flags2:%08x\n",
858 		   ctrl->displayId, ctrl->flags, ctrl->flags2);
859 
860 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
861 	return 0;
862 }
863 
864 static int
r535_disp_get_connect_state(struct nvkm_disp * disp,unsigned display_id)865 r535_disp_get_connect_state(struct nvkm_disp *disp, unsigned display_id)
866 {
867 	NV0073_CTRL_SYSTEM_GET_CONNECT_STATE_PARAMS *ctrl;
868 	int ret;
869 
870 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
871 				    NV0073_CTRL_CMD_SYSTEM_GET_CONNECT_STATE, sizeof(*ctrl));
872 	if (IS_ERR(ctrl))
873 		return PTR_ERR(ctrl);
874 
875 	ctrl->subDeviceInstance = 0;
876 	ctrl->displayMask = BIT(display_id);
877 
878 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
879 	if (ret == 0 && (ctrl->displayMask & BIT(display_id)))
880 		ret = 1;
881 
882 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
883 	return ret;
884 }
885 
886 static int
r535_outp_detect(struct nvkm_outp * outp)887 r535_outp_detect(struct nvkm_outp *outp)
888 {
889 	const struct nvkm_rm_api *rmapi = outp->disp->rm.objcom.client->gsp->rm->api;
890 	int ret;
891 
892 	ret = rmapi->disp->get_connect_state(outp->disp, outp->index);
893 	if (ret == 1) {
894 		ret = r535_outp_dfp_get_info(outp);
895 		if (ret == 0)
896 			ret = 1;
897 	}
898 
899 	return ret;
900 }
901 
902 static int
r535_dp_mst_id_put(struct nvkm_outp * outp,u32 id)903 r535_dp_mst_id_put(struct nvkm_outp *outp, u32 id)
904 {
905 	NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID_PARAMS *ctrl;
906 	struct nvkm_disp *disp = outp->disp;
907 
908 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
909 				    NV0073_CTRL_CMD_DP_TOPOLOGY_FREE_DISPLAYID, sizeof(*ctrl));
910 	if (IS_ERR(ctrl))
911 		return PTR_ERR(ctrl);
912 
913 	ctrl->subDeviceInstance = 0;
914 	ctrl->displayId = id;
915 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
916 }
917 
918 static int
r535_dp_mst_id_get(struct nvkm_outp * outp,u32 * pid)919 r535_dp_mst_id_get(struct nvkm_outp *outp, u32 *pid)
920 {
921 	NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID_PARAMS *ctrl;
922 	struct nvkm_disp *disp = outp->disp;
923 	int ret;
924 
925 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
926 				    NV0073_CTRL_CMD_DP_TOPOLOGY_ALLOCATE_DISPLAYID,
927 				    sizeof(*ctrl));
928 	if (IS_ERR(ctrl))
929 		return PTR_ERR(ctrl);
930 
931 	ctrl->subDeviceInstance = 0;
932 	ctrl->displayId = BIT(outp->index);
933 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
934 	if (ret) {
935 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
936 		return ret;
937 	}
938 
939 	*pid = ctrl->displayIdAssigned;
940 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
941 	return 0;
942 }
943 
944 static int
r535_dp_drive(struct nvkm_outp * outp,u8 lanes,u8 pe[4],u8 vs[4])945 r535_dp_drive(struct nvkm_outp *outp, u8 lanes, u8 pe[4], u8 vs[4])
946 {
947 	NV0073_CTRL_DP_LANE_DATA_PARAMS *ctrl;
948 	struct nvkm_disp *disp = outp->disp;
949 
950 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
951 				    NV0073_CTRL_CMD_DP_SET_LANE_DATA, sizeof(*ctrl));
952 	if (IS_ERR(ctrl))
953 		return PTR_ERR(ctrl);
954 
955 	ctrl->displayId = BIT(outp->index);
956 	ctrl->numLanes = lanes;
957 	for (int i = 0; i < lanes; i++)
958 		ctrl->data[i] = NVVAL(NV0073_CTRL, DP_LANE_DATA,  PREEMPHASIS, pe[i]) |
959 				NVVAL(NV0073_CTRL, DP_LANE_DATA, DRIVECURRENT, vs[i]);
960 
961 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
962 }
963 
964 static int
r535_dp_train_target(struct nvkm_outp * outp,u8 target,bool mst,u8 link_nr,u8 link_bw)965 r535_dp_train_target(struct nvkm_outp *outp, u8 target, bool mst, u8 link_nr, u8 link_bw)
966 {
967 	struct nvkm_disp *disp = outp->disp;
968 	NV0073_CTRL_DP_CTRL_PARAMS *ctrl;
969 	int ret, retries;
970 	u32 cmd, data;
971 
972 	cmd = NVDEF(NV0073_CTRL, DP_CMD, SET_LANE_COUNT, TRUE) |
973 	      NVDEF(NV0073_CTRL, DP_CMD, SET_LINK_BW, TRUE) |
974 	      NVDEF(NV0073_CTRL, DP_CMD, TRAIN_PHY_REPEATER, YES);
975 	data = NVVAL(NV0073_CTRL, DP_DATA, SET_LANE_COUNT, link_nr) |
976 	       NVVAL(NV0073_CTRL, DP_DATA, SET_LINK_BW, link_bw) |
977 	       NVVAL(NV0073_CTRL, DP_DATA, TARGET, target);
978 
979 	if (mst)
980 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_FORMAT_MODE, MULTI_STREAM);
981 
982 	if (outp->dp.dpcd[DPCD_RC02] & DPCD_RC02_ENHANCED_FRAME_CAP)
983 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, SET_ENHANCED_FRAMING, TRUE);
984 
985 	if (target == 0 &&
986 	     (outp->dp.dpcd[DPCD_RC02] & 0x20) &&
987 	    !(outp->dp.dpcd[DPCD_RC03] & DPCD_RC03_TPS4_SUPPORTED))
988 		cmd |= NVDEF(NV0073_CTRL, DP_CMD, POST_LT_ADJ_REQ_GRANTED, YES);
989 
990 	/* We should retry up to 3 times, but only if GSP asks politely */
991 	for (retries = 0; retries < 3; ++retries) {
992 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_CTRL,
993 					    sizeof(*ctrl));
994 		if (IS_ERR(ctrl))
995 			return PTR_ERR(ctrl);
996 
997 		ctrl->subDeviceInstance = 0;
998 		ctrl->displayId = BIT(outp->index);
999 		ctrl->retryTimeMs = 0;
1000 		ctrl->cmd = cmd;
1001 		ctrl->data = data;
1002 
1003 		ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1004 		if ((ret == -EAGAIN || ret == -EBUSY) && ctrl->retryTimeMs) {
1005 			/*
1006 			 * Device (likely an eDP panel) isn't ready yet, wait for the time specified
1007 			 * by GSP before retrying again
1008 			 */
1009 			nvkm_debug(&disp->engine.subdev,
1010 				   "Waiting %dms for GSP LT panel delay before retrying\n",
1011 				   ctrl->retryTimeMs);
1012 			msleep(ctrl->retryTimeMs);
1013 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1014 		} else {
1015 			/* GSP didn't say to retry, or we were successful */
1016 			if (ctrl->err)
1017 				ret = -EIO;
1018 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1019 			break;
1020 		}
1021 	}
1022 
1023 	return ret;
1024 }
1025 
1026 static int
r535_dp_train(struct nvkm_outp * outp,bool retrain)1027 r535_dp_train(struct nvkm_outp *outp, bool retrain)
1028 {
1029 	for (int target = outp->dp.lttprs; target >= 0; target--) {
1030 		int ret = r535_dp_train_target(outp, target, outp->dp.lt.mst,
1031 							     outp->dp.lt.nr,
1032 							     outp->dp.lt.bw);
1033 		if (ret)
1034 			return ret;
1035 	}
1036 
1037 	return 0;
1038 }
1039 
1040 static int
r535_dp_set_indexed_link_rates(struct nvkm_outp * outp)1041 r535_dp_set_indexed_link_rates(struct nvkm_outp *outp)
1042 {
1043 	NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES_PARAMS *ctrl;
1044 	struct nvkm_disp *disp = outp->disp;
1045 
1046 	if (WARN_ON(outp->dp.rates > ARRAY_SIZE(ctrl->linkRateTbl)))
1047 		return -EINVAL;
1048 
1049 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1050 				    NV0073_CTRL_CMD_DP_CONFIG_INDEXED_LINK_RATES, sizeof(*ctrl));
1051 	if (IS_ERR(ctrl))
1052 		return PTR_ERR(ctrl);
1053 
1054 	ctrl->displayId = BIT(outp->index);
1055 	for (int i = 0; i < outp->dp.rates; i++)
1056 		ctrl->linkRateTbl[outp->dp.rate[i].dpcd] = outp->dp.rate[i].rate * 10 / 200;
1057 
1058 	return nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1059 }
1060 
1061 static int
r535_dp_rates(struct nvkm_outp * outp)1062 r535_dp_rates(struct nvkm_outp *outp)
1063 {
1064 	struct nvkm_rm *rm = outp->disp->rm.objcom.client->gsp->rm;
1065 
1066 	if (outp->conn->info.type != DCB_CONNECTOR_eDP ||
1067 	    !outp->dp.rates || outp->dp.rate[0].dpcd < 0)
1068 		return 0;
1069 
1070 	return rm->api->disp->dp.set_indexed_link_rates(outp);
1071 }
1072 
1073 static int
r535_dp_aux_xfer(struct nvkm_outp * outp,u8 type,u32 addr,u8 * data,u8 * psize)1074 r535_dp_aux_xfer(struct nvkm_outp *outp, u8 type, u32 addr, u8 *data, u8 *psize)
1075 {
1076 	struct nvkm_disp *disp = outp->disp;
1077 	NV0073_CTRL_DP_AUXCH_CTRL_PARAMS *ctrl;
1078 	u8 size = *psize;
1079 	int ret;
1080 	int retries;
1081 
1082 	for (retries = 0; retries < 3; ++retries) {
1083 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom, NV0073_CTRL_CMD_DP_AUXCH_CTRL, sizeof(*ctrl));
1084 		if (IS_ERR(ctrl))
1085 			return PTR_ERR(ctrl);
1086 
1087 		ctrl->subDeviceInstance = 0;
1088 		ctrl->displayId = BIT(outp->index);
1089 		ctrl->bAddrOnly = !size;
1090 		ctrl->cmd = type;
1091 		if (ctrl->bAddrOnly) {
1092 			ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD, REQ_TYPE, WRITE);
1093 			ctrl->cmd = NVDEF_SET(ctrl->cmd, NV0073_CTRL, DP_AUXCH_CMD,  I2C_MOT, FALSE);
1094 		}
1095 		ctrl->addr = addr;
1096 		ctrl->size = !ctrl->bAddrOnly ? (size - 1) : 0;
1097 		memcpy(ctrl->data, data, size);
1098 
1099 		ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1100 		if ((ret == -EAGAIN || ret == -EBUSY) && ctrl->retryTimeMs) {
1101 			/*
1102 			 * Device (likely an eDP panel) isn't ready yet, wait for the time specified
1103 			 * by GSP before retrying again
1104 			 */
1105 			nvkm_debug(&disp->engine.subdev,
1106 				   "Waiting %dms for GSP LT panel delay before retrying in AUX\n",
1107 				   ctrl->retryTimeMs);
1108 			msleep(ctrl->retryTimeMs);
1109 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1110 		} else {
1111 			memcpy(data, ctrl->data, size);
1112 			*psize = ctrl->size;
1113 			ret = ctrl->replyType;
1114 			nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1115 			break;
1116 		}
1117 	}
1118 	return ret;
1119 }
1120 
1121 static int
r535_dp_aux_pwr(struct nvkm_outp * outp,bool pu)1122 r535_dp_aux_pwr(struct nvkm_outp *outp, bool pu)
1123 {
1124 	return 0;
1125 }
1126 
1127 static void
r535_dp_release(struct nvkm_outp * outp)1128 r535_dp_release(struct nvkm_outp *outp)
1129 {
1130 	if (!outp->dp.lt.bw) {
1131 		if (!WARN_ON(!outp->dp.rates))
1132 			outp->dp.lt.bw = outp->dp.rate[0].rate / 27000;
1133 		else
1134 			outp->dp.lt.bw = 0x06;
1135 	}
1136 
1137 	outp->dp.lt.nr = 0;
1138 
1139 	r535_dp_train_target(outp, 0, outp->dp.lt.mst, outp->dp.lt.nr, outp->dp.lt.bw);
1140 	r535_outp_release(outp);
1141 }
1142 
1143 static int
r535_dp_acquire(struct nvkm_outp * outp,bool hda)1144 r535_dp_acquire(struct nvkm_outp *outp, bool hda)
1145 {
1146 	int ret;
1147 
1148 	ret = r535_outp_acquire(outp, hda);
1149 	if (ret)
1150 		return ret;
1151 
1152 	return 0;
1153 }
1154 
1155 static const struct nvkm_outp_func
1156 r535_dp = {
1157 	.detect = r535_outp_detect,
1158 	.inherit = r535_outp_inherit,
1159 	.acquire = r535_dp_acquire,
1160 	.release = r535_dp_release,
1161 	.dp.aux_pwr = r535_dp_aux_pwr,
1162 	.dp.aux_xfer = r535_dp_aux_xfer,
1163 	.dp.mst_id_get = r535_dp_mst_id_get,
1164 	.dp.mst_id_put = r535_dp_mst_id_put,
1165 	.dp.rates = r535_dp_rates,
1166 	.dp.train = r535_dp_train,
1167 	.dp.drive = r535_dp_drive,
1168 };
1169 
1170 static int
r535_dp_get_caps(struct nvkm_disp * disp,int * plink_bw,bool * pmst,bool * pwm)1171 r535_dp_get_caps(struct nvkm_disp *disp, int *plink_bw, bool *pmst, bool *pwm)
1172 {
1173 	NV0073_CTRL_CMD_DP_GET_CAPS_PARAMS *ctrl;
1174 	int ret;
1175 
1176 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1177 				    NV0073_CTRL_CMD_DP_GET_CAPS, sizeof(*ctrl));
1178 	if (IS_ERR(ctrl))
1179 		return PTR_ERR(ctrl);
1180 
1181 	ctrl->sorIndex = ~0;
1182 
1183 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1184 	if (ret) {
1185 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1186 		return ret;
1187 	}
1188 
1189 	switch (NVVAL_GET(ctrl->maxLinkRate, NV0073_CTRL_CMD, DP_GET_CAPS, MAX_LINK_RATE)) {
1190 	case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_1_62:
1191 		*plink_bw = 0x06;
1192 		break;
1193 	case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_2_70:
1194 		*plink_bw = 0x0a;
1195 		break;
1196 	case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_5_40:
1197 		*plink_bw = 0x14;
1198 		break;
1199 	case NV0073_CTRL_CMD_DP_GET_CAPS_MAX_LINK_RATE_8_10:
1200 		*plink_bw = 0x1e;
1201 		break;
1202 	default:
1203 		*plink_bw = 0x00;
1204 		break;
1205 	}
1206 
1207 	*pmst = ctrl->bIsMultistreamSupported;
1208 	*pwm = ctrl->bHasIncreasedWatermarkLimits;
1209 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1210 	return 0;
1211 }
1212 
1213 static int
r535_tmds_edid_get(struct nvkm_outp * outp,u8 * data,u16 * psize)1214 r535_tmds_edid_get(struct nvkm_outp *outp, u8 *data, u16 *psize)
1215 {
1216 	NV0073_CTRL_SPECIFIC_GET_EDID_V2_PARAMS *ctrl;
1217 	struct nvkm_disp *disp = outp->disp;
1218 	int ret = -E2BIG;
1219 
1220 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1221 				    NV0073_CTRL_CMD_SPECIFIC_GET_EDID_V2, sizeof(*ctrl));
1222 	if (IS_ERR(ctrl))
1223 		return PTR_ERR(ctrl);
1224 
1225 	ctrl->subDeviceInstance = 0;
1226 	ctrl->displayId = BIT(outp->index);
1227 
1228 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1229 	if (ret) {
1230 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1231 		return ret;
1232 	}
1233 
1234 	ret = -E2BIG;
1235 	if (ctrl->bufferSize <= *psize) {
1236 		memcpy(data, ctrl->edidBuffer, ctrl->bufferSize);
1237 		*psize = ctrl->bufferSize;
1238 		ret = 0;
1239 	}
1240 
1241 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1242 	return ret;
1243 }
1244 
1245 static const struct nvkm_outp_func
1246 r535_tmds = {
1247 	.detect = r535_outp_detect,
1248 	.inherit = r535_outp_inherit,
1249 	.acquire = r535_outp_acquire,
1250 	.release = r535_outp_release,
1251 	.edid_get = r535_tmds_edid_get,
1252 };
1253 
1254 static int
r535_outp_new(struct nvkm_disp * disp,u32 id)1255 r535_outp_new(struct nvkm_disp *disp, u32 id)
1256 {
1257 	const struct nvkm_rm_api *rmapi = disp->rm.objcom.client->gsp->rm->api;
1258 	NV0073_CTRL_SPECIFIC_OR_GET_INFO_PARAMS *ctrl;
1259 	enum nvkm_ior_proto proto;
1260 	struct dcb_output dcbE = {};
1261 	struct nvkm_conn *conn;
1262 	struct nvkm_outp *outp;
1263 	u8 locn, link = 0;
1264 	int ret;
1265 
1266 	ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1267 				    NV0073_CTRL_CMD_SPECIFIC_OR_GET_INFO, sizeof(*ctrl));
1268 	if (IS_ERR(ctrl))
1269 		return PTR_ERR(ctrl);
1270 
1271 	ctrl->subDeviceInstance = 0;
1272 	ctrl->displayId = BIT(id);
1273 
1274 	ret = nvkm_gsp_rm_ctrl_push(&disp->rm.objcom, &ctrl, sizeof(*ctrl));
1275 	if (ret) {
1276 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1277 		return ret;
1278 	}
1279 
1280 	switch (ctrl->type) {
1281 	case NV0073_CTRL_SPECIFIC_OR_TYPE_NONE:
1282 		return 0;
1283 	case NV0073_CTRL_SPECIFIC_OR_TYPE_SOR:
1284 		switch (ctrl->protocol) {
1285 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_A:
1286 			proto = TMDS;
1287 			link = 1;
1288 			break;
1289 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_SINGLE_TMDS_B:
1290 			proto = TMDS;
1291 			link = 2;
1292 			break;
1293 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DUAL_TMDS:
1294 			proto = TMDS;
1295 			link = 3;
1296 			break;
1297 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_A:
1298 			proto = DP;
1299 			link = 1;
1300 			break;
1301 		case NV0073_CTRL_SPECIFIC_OR_PROTOCOL_SOR_DP_B:
1302 			proto = DP;
1303 			link = 2;
1304 			break;
1305 		default:
1306 			WARN_ON(1);
1307 			return -EINVAL;
1308 		}
1309 
1310 		break;
1311 	default:
1312 		WARN_ON(1);
1313 		return -EINVAL;
1314 	}
1315 
1316 	locn = ctrl->location;
1317 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1318 
1319 	conn = r535_conn_new(disp, id);
1320 	if (IS_ERR(conn))
1321 		return PTR_ERR(conn);
1322 
1323 	switch (proto) {
1324 	case TMDS: dcbE.type = DCB_OUTPUT_TMDS; break;
1325 	case   DP: dcbE.type = DCB_OUTPUT_DP; break;
1326 	default:
1327 		WARN_ON(1);
1328 		return -EINVAL;
1329 	}
1330 
1331 	dcbE.location = locn;
1332 	dcbE.connector = conn->index;
1333 	dcbE.heads = disp->head.mask;
1334 	dcbE.i2c_index = 0xff;
1335 	dcbE.link = dcbE.sorconf.link = link;
1336 
1337 	if (proto == TMDS) {
1338 		ret = nvkm_outp_new_(&r535_tmds, disp, id, &dcbE, &outp);
1339 		if (ret)
1340 			return ret;
1341 	} else {
1342 		bool mst, wm;
1343 
1344 		ret = rmapi->disp->dp.get_caps(disp, &dcbE.dpconf.link_bw, &mst, &wm);
1345 		if (ret)
1346 			return ret;
1347 
1348 		if (WARN_ON(!dcbE.dpconf.link_bw))
1349 			return -EINVAL;
1350 
1351 		dcbE.dpconf.link_nr = 4;
1352 
1353 		ret = nvkm_outp_new_(&r535_dp, disp, id, &dcbE, &outp);
1354 		if (ret)
1355 			return ret;
1356 
1357 		outp->dp.mst = mst;
1358 		outp->dp.increased_wm = wm;
1359 	}
1360 
1361 
1362 	outp->conn = conn;
1363 	list_add_tail(&outp->head, &disp->outps);
1364 	return 0;
1365 }
1366 
1367 static void
r535_disp_irq(struct nvkm_gsp_event * event,void * repv,u32 repc)1368 r535_disp_irq(struct nvkm_gsp_event *event, void *repv, u32 repc)
1369 {
1370 	struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.irq);
1371 	Nv2080DpIrqNotification *irq = repv;
1372 
1373 	if (WARN_ON(repc < sizeof(*irq)))
1374 		return;
1375 
1376 	nvkm_debug(&disp->engine.subdev, "event: dp irq displayId %08x\n", irq->displayId);
1377 
1378 	if (irq->displayId)
1379 		nvkm_event_ntfy(&disp->rm.event, fls(irq->displayId) - 1, NVKM_DPYID_IRQ);
1380 }
1381 
1382 static void
r535_disp_hpd(struct nvkm_gsp_event * event,void * repv,u32 repc)1383 r535_disp_hpd(struct nvkm_gsp_event *event, void *repv, u32 repc)
1384 {
1385 	struct nvkm_disp *disp = container_of(event, typeof(*disp), rm.hpd);
1386 	Nv2080HotplugNotification *hpd = repv;
1387 
1388 	if (WARN_ON(repc < sizeof(*hpd)))
1389 		return;
1390 
1391 	nvkm_debug(&disp->engine.subdev, "event: hpd plug %08x unplug %08x\n",
1392 		   hpd->plugDisplayMask, hpd->unplugDisplayMask);
1393 
1394 	for (int i = 0; i < 31; i++) {
1395 		u32 mask = 0;
1396 
1397 		if (hpd->plugDisplayMask & BIT(i))
1398 			mask |= NVKM_DPYID_PLUG;
1399 		if (hpd->unplugDisplayMask & BIT(i))
1400 			mask |= NVKM_DPYID_UNPLUG;
1401 
1402 		if (mask)
1403 			nvkm_event_ntfy(&disp->rm.event, i, mask);
1404 	}
1405 }
1406 
1407 static const struct nvkm_event_func
1408 r535_disp_event = {
1409 };
1410 
1411 static void
r535_disp_intr_head_timing(struct nvkm_disp * disp,int head)1412 r535_disp_intr_head_timing(struct nvkm_disp *disp, int head)
1413 {
1414 	struct nvkm_subdev *subdev = &disp->engine.subdev;
1415 	struct nvkm_device *device = subdev->device;
1416 	u32 stat = nvkm_rd32(device, 0x611c00 + (head * 0x04));
1417 
1418 	if (stat & 0x00000002) {
1419 		nvkm_disp_vblank(disp, head);
1420 
1421 		nvkm_wr32(device, 0x611800 + (head * 0x04), 0x00000002);
1422 	}
1423 }
1424 
1425 static irqreturn_t
r535_disp_intr(struct nvkm_inth * inth)1426 r535_disp_intr(struct nvkm_inth *inth)
1427 {
1428 	struct nvkm_disp *disp = container_of(inth, typeof(*disp), engine.subdev.inth);
1429 	struct nvkm_subdev *subdev = &disp->engine.subdev;
1430 	struct nvkm_device *device = subdev->device;
1431 	unsigned long mask = nvkm_rd32(device, 0x611ec0) & 0x000000ff;
1432 	int head;
1433 
1434 	for_each_set_bit(head, &mask, 8)
1435 		r535_disp_intr_head_timing(disp, head);
1436 
1437 	return IRQ_HANDLED;
1438 }
1439 
1440 static void
r535_disp_fini(struct nvkm_disp * disp,bool suspend)1441 r535_disp_fini(struct nvkm_disp *disp, bool suspend)
1442 {
1443 	if (!disp->engine.subdev.use.enabled)
1444 		return;
1445 
1446 	nvkm_gsp_rm_free(&disp->rm.object);
1447 
1448 	if (!suspend) {
1449 		nvkm_gsp_event_dtor(&disp->rm.irq);
1450 		nvkm_gsp_event_dtor(&disp->rm.hpd);
1451 		nvkm_event_fini(&disp->rm.event);
1452 
1453 		nvkm_gsp_rm_free(&disp->rm.objcom);
1454 		nvkm_gsp_device_dtor(&disp->rm.device);
1455 		nvkm_gsp_client_dtor(&disp->rm.client);
1456 	}
1457 }
1458 
1459 static int
r535_disp_init(struct nvkm_disp * disp)1460 r535_disp_init(struct nvkm_disp *disp)
1461 {
1462 	int ret;
1463 
1464 	ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, disp->func->root.oclass << 16,
1465 				disp->func->root.oclass, 0, &disp->rm.object);
1466 	if (ret)
1467 		return ret;
1468 
1469 	return 0;
1470 }
1471 
1472 static int
r535_disp_get_supported(struct nvkm_disp * disp,unsigned long * pmask)1473 r535_disp_get_supported(struct nvkm_disp *disp, unsigned long *pmask)
1474 {
1475 	NV0073_CTRL_SYSTEM_GET_SUPPORTED_PARAMS *ctrl;
1476 
1477 	ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1478 				   NV0073_CTRL_CMD_SYSTEM_GET_SUPPORTED, sizeof(*ctrl));
1479 	if (IS_ERR(ctrl))
1480 		return PTR_ERR(ctrl);
1481 
1482 	*pmask = ctrl->displayMask;
1483 	nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1484 	return 0;
1485 }
1486 
1487 static int
r535_disp_get_static_info(struct nvkm_disp * disp)1488 r535_disp_get_static_info(struct nvkm_disp *disp)
1489 {
1490 	NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS *ctrl;
1491 	struct nvkm_gsp *gsp = disp->rm.objcom.client->gsp;
1492 
1493 	ctrl = nvkm_gsp_rm_ctrl_rd(&gsp->internal.device.subdevice,
1494 				   NV2080_CTRL_CMD_INTERNAL_DISPLAY_GET_STATIC_INFO,
1495 				   sizeof(*ctrl));
1496 	if (IS_ERR(ctrl))
1497 		return PTR_ERR(ctrl);
1498 
1499 	disp->wndw.mask = ctrl->windowPresentMask;
1500 	disp->wndw.nr = fls(disp->wndw.mask);
1501 
1502 	nvkm_gsp_rm_ctrl_done(&gsp->internal.device.subdevice, ctrl);
1503 	return 0;
1504 }
1505 
1506 static int
r535_disp_oneinit(struct nvkm_disp * disp)1507 r535_disp_oneinit(struct nvkm_disp *disp)
1508 {
1509 	struct nvkm_device *device = disp->engine.subdev.device;
1510 	struct nvkm_gsp *gsp = device->gsp;
1511 	const struct nvkm_rm_api *rmapi = gsp->rm->api;
1512 	NV2080_CTRL_INTERNAL_DISPLAY_WRITE_INST_MEM_PARAMS *ctrl;
1513 	unsigned long mask;
1514 	int ret, i;
1515 
1516 	/* RAMIN. */
1517 	ret = nvkm_gpuobj_new(device, 0x10000, 0x10000, false, NULL, &disp->inst);
1518 	if (ret)
1519 		return ret;
1520 
1521 	if (WARN_ON(nvkm_memory_target(disp->inst->memory) != NVKM_MEM_TARGET_VRAM))
1522 		return -EINVAL;
1523 
1524 	ctrl = nvkm_gsp_rm_ctrl_get(&gsp->internal.device.subdevice,
1525 				    NV2080_CTRL_CMD_INTERNAL_DISPLAY_WRITE_INST_MEM,
1526 				    sizeof(*ctrl));
1527 	if (IS_ERR(ctrl))
1528 		return PTR_ERR(ctrl);
1529 
1530 	ctrl->instMemPhysAddr = nvkm_memory_addr(disp->inst->memory);
1531 	ctrl->instMemSize = nvkm_memory_size(disp->inst->memory);
1532 	ctrl->instMemAddrSpace = ADDR_FBMEM;
1533 	ctrl->instMemCpuCacheAttr = NV_MEMORY_WRITECOMBINED;
1534 
1535 	ret = nvkm_gsp_rm_ctrl_wr(&gsp->internal.device.subdevice, ctrl);
1536 	if (ret)
1537 		return ret;
1538 
1539 	/* OBJs. */
1540 	ret = nvkm_gsp_client_device_ctor(gsp, &disp->rm.client, &disp->rm.device);
1541 	if (ret)
1542 		return ret;
1543 
1544 	ret = nvkm_gsp_rm_alloc(&disp->rm.device.object, NVKM_RM_DISP, NV04_DISPLAY_COMMON, 0,
1545 				&disp->rm.objcom);
1546 	if (ret)
1547 		return ret;
1548 
1549 	ret = rmapi->disp->get_static_info(disp);
1550 	if (ret)
1551 		return ret;
1552 
1553 	/* */
1554 	{
1555 #if defined(CONFIG_ACPI) && defined(CONFIG_X86)
1556 		NV2080_CTRL_INTERNAL_INIT_BRIGHTC_STATE_LOAD_PARAMS *ctrl;
1557 		struct nvkm_gsp_object *subdevice = &disp->rm.client.gsp->internal.device.subdevice;
1558 
1559 		ctrl = nvkm_gsp_rm_ctrl_get(subdevice,
1560 					    NV2080_CTRL_CMD_INTERNAL_INIT_BRIGHTC_STATE_LOAD,
1561 					    sizeof(*ctrl));
1562 		if (IS_ERR(ctrl))
1563 			return PTR_ERR(ctrl);
1564 
1565 		ctrl->status = 0x56; /* NV_ERR_NOT_SUPPORTED */
1566 
1567 		{
1568 			const guid_t NBCI_DSM_GUID =
1569 				GUID_INIT(0xD4A50B75, 0x65C7, 0x46F7,
1570 					  0xBF, 0xB7, 0x41, 0x51, 0x4C, 0xEA, 0x02, 0x44);
1571 			u64 NBCI_DSM_REV = 0x00000102;
1572 			const guid_t NVHG_DSM_GUID =
1573 				GUID_INIT(0x9D95A0A0, 0x0060, 0x4D48,
1574 					  0xB3, 0x4D, 0x7E, 0x5F, 0xEA, 0x12, 0x9F, 0xD4);
1575 			u64 NVHG_DSM_REV = 0x00000102;
1576 			acpi_handle handle = ACPI_HANDLE(device->dev);
1577 
1578 			if (handle && acpi_has_method(handle, "_DSM")) {
1579 				bool nbci = acpi_check_dsm(handle, &NBCI_DSM_GUID, NBCI_DSM_REV,
1580 						           1ULL << 0x00000014);
1581 				bool nvhg = acpi_check_dsm(handle, &NVHG_DSM_GUID, NVHG_DSM_REV,
1582 						           1ULL << 0x00000014);
1583 
1584 				if (nbci || nvhg) {
1585 					union acpi_object argv4 = {
1586 						.buffer.type    = ACPI_TYPE_BUFFER,
1587 						.buffer.length  = sizeof(ctrl->backLightData),
1588 						.buffer.pointer = kmalloc(argv4.buffer.length, GFP_KERNEL),
1589 					}, *obj;
1590 
1591 					obj = acpi_evaluate_dsm(handle, nbci ? &NBCI_DSM_GUID : &NVHG_DSM_GUID,
1592 								0x00000102, 0x14, &argv4);
1593 					if (!obj) {
1594 						acpi_handle_info(handle, "failed to evaluate _DSM\n");
1595 					} else {
1596 						for (int i = 0; i < obj->package.count; i++) {
1597 							union acpi_object *elt = &obj->package.elements[i];
1598 							u32 size;
1599 
1600 							if (elt->integer.value & ~0xffffffffULL)
1601 								size = 8;
1602 							else
1603 								size = 4;
1604 
1605 							memcpy(&ctrl->backLightData[ctrl->backLightDataSize], &elt->integer.value, size);
1606 							ctrl->backLightDataSize += size;
1607 						}
1608 
1609 						ctrl->status = 0;
1610 						ACPI_FREE(obj);
1611 					}
1612 
1613 					kfree(argv4.buffer.pointer);
1614 				}
1615 			}
1616 		}
1617 
1618 		ret = nvkm_gsp_rm_ctrl_wr(subdevice, ctrl);
1619 		if (ret)
1620 			return ret;
1621 #endif
1622 	}
1623 
1624 	/* */
1625 	{
1626 		NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT_PARAMS *ctrl;
1627 
1628 		ctrl = nvkm_gsp_rm_ctrl_get(&disp->rm.objcom,
1629 					    NV0073_CTRL_CMD_DP_SET_MANUAL_DISPLAYPORT,
1630 					    sizeof(*ctrl));
1631 		if (IS_ERR(ctrl))
1632 			return PTR_ERR(ctrl);
1633 
1634 		ret = nvkm_gsp_rm_ctrl_wr(&disp->rm.objcom, ctrl);
1635 		if (ret)
1636 			return ret;
1637 	}
1638 
1639 	/* */
1640 	{
1641 		NV0073_CTRL_SYSTEM_GET_NUM_HEADS_PARAMS *ctrl;
1642 
1643 		ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1644 					   NV0073_CTRL_CMD_SYSTEM_GET_NUM_HEADS, sizeof(*ctrl));
1645 		if (IS_ERR(ctrl))
1646 			return PTR_ERR(ctrl);
1647 
1648 		disp->head.nr = ctrl->numHeads;
1649 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1650 	}
1651 
1652 	/* */
1653 	{
1654 		NV0073_CTRL_SPECIFIC_GET_ALL_HEAD_MASK_PARAMS *ctrl;
1655 
1656 		ctrl = nvkm_gsp_rm_ctrl_rd(&disp->rm.objcom,
1657 					   NV0073_CTRL_CMD_SPECIFIC_GET_ALL_HEAD_MASK,
1658 					   sizeof(*ctrl));
1659 		if (IS_ERR(ctrl))
1660 			return PTR_ERR(ctrl);
1661 
1662 		disp->head.mask = ctrl->headMask;
1663 		nvkm_gsp_rm_ctrl_done(&disp->rm.objcom, ctrl);
1664 
1665 		for_each_set_bit(i, &disp->head.mask, disp->head.nr) {
1666 			ret = nvkm_head_new_(&r535_head, disp, i);
1667 			if (ret)
1668 				return ret;
1669 		}
1670 	}
1671 
1672 	disp->sor.nr = disp->func->sor.cnt(disp, &disp->sor.mask);
1673 	nvkm_debug(&disp->engine.subdev, "   SOR(s): %d (%02lx)\n", disp->sor.nr, disp->sor.mask);
1674 	for_each_set_bit(i, &disp->sor.mask, disp->sor.nr) {
1675 		ret = disp->func->sor.new(disp, i);
1676 		if (ret)
1677 			return ret;
1678 	}
1679 
1680 	ret = rmapi->disp->get_supported(disp, &mask);
1681 	if (ret)
1682 		return ret;
1683 
1684 	for_each_set_bit(i, &mask, 32) {
1685 		ret = r535_outp_new(disp, i);
1686 		if (ret)
1687 			return ret;
1688 	}
1689 
1690 	ret = nvkm_event_init(&r535_disp_event, &gsp->subdev, 3, 32, &disp->rm.event);
1691 	if (WARN_ON(ret))
1692 		return ret;
1693 
1694 	ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0000, NV2080_NOTIFIERS_HOTPLUG,
1695 					 r535_disp_hpd, &disp->rm.hpd);
1696 	if (ret)
1697 		return ret;
1698 
1699 	ret = nvkm_gsp_device_event_ctor(&disp->rm.device, 0x007e0001, NV2080_NOTIFIERS_DP_IRQ,
1700 					 r535_disp_irq, &disp->rm.irq);
1701 	if (ret)
1702 		return ret;
1703 
1704 	/* RAMHT. */
1705 	ret = nvkm_ramht_new(device, disp->func->ramht_size ? disp->func->ramht_size :
1706 			     0x1000, 0, disp->inst, &disp->ramht);
1707 	if (ret)
1708 		return ret;
1709 
1710 	ret = nvkm_gsp_intr_stall(gsp, disp->engine.subdev.type, disp->engine.subdev.inst);
1711 	if (ret < 0)
1712 		return ret;
1713 
1714 	ret = nvkm_inth_add(&device->vfn->intr, ret, NVKM_INTR_PRIO_NORMAL, &disp->engine.subdev,
1715 			    r535_disp_intr, &disp->engine.subdev.inth);
1716 	if (ret)
1717 		return ret;
1718 
1719 	nvkm_inth_allow(&disp->engine.subdev.inth);
1720 	return 0;
1721 }
1722 
1723 static void
r535_disp_dtor(struct nvkm_disp * disp)1724 r535_disp_dtor(struct nvkm_disp *disp)
1725 {
1726 	kfree(disp->func);
1727 }
1728 
1729 int
r535_disp_new(const struct nvkm_disp_func * hw,struct nvkm_device * device,enum nvkm_subdev_type type,int inst,struct nvkm_disp ** pdisp)1730 r535_disp_new(const struct nvkm_disp_func *hw, struct nvkm_device *device,
1731 	      enum nvkm_subdev_type type, int inst, struct nvkm_disp **pdisp)
1732 {
1733 	const struct nvkm_rm_gpu *gpu = device->gsp->rm->gpu;
1734 	struct nvkm_disp_func *rm;
1735 	int ret;
1736 
1737 	if (!(rm = kzalloc(sizeof(*rm) + 6 * sizeof(rm->user[0]), GFP_KERNEL)))
1738 		return -ENOMEM;
1739 
1740 	rm->dtor = r535_disp_dtor;
1741 	rm->oneinit = r535_disp_oneinit;
1742 	rm->init = r535_disp_init;
1743 	rm->fini = r535_disp_fini;
1744 	rm->uevent = hw->uevent;
1745 	rm->sor.cnt = r535_sor_cnt;
1746 	rm->sor.new = r535_sor_new;
1747 	rm->ramht_size = hw->ramht_size;
1748 
1749 	rm->root.oclass = gpu->disp.class.root;
1750 
1751 	rm->user[0].base.oclass = gpu->disp.class.caps;
1752 	rm->user[0].ctor = gv100_disp_caps_new;
1753 
1754 	rm->user[1].base.oclass = gpu->disp.class.core;
1755 	rm->user[1].ctor = nvkm_disp_core_new;
1756 	rm->user[1].chan = &r535_core;
1757 
1758 	rm->user[2].base.oclass = gpu->disp.class.wndw;
1759 	rm->user[2].ctor = nvkm_disp_wndw_new;
1760 	rm->user[2].chan = &r535_wndw;
1761 
1762 	rm->user[3].base.oclass = gpu->disp.class.wimm;
1763 	rm->user[3].ctor = nvkm_disp_wndw_new;
1764 	rm->user[3].chan = &r535_wimm;
1765 
1766 	rm->user[4].base.oclass = gpu->disp.class.curs;
1767 	rm->user[4].ctor = nvkm_disp_chan_new;
1768 	rm->user[4].chan = &r535_curs;
1769 
1770 	ret = nvkm_disp_new_(rm, device, type, inst, pdisp);
1771 	if (ret)
1772 		kfree(rm);
1773 
1774 	mutex_init(&(*pdisp)->super.mutex); //XXX
1775 	return ret;
1776 }
1777 
1778 const struct nvkm_rm_api_disp
1779 r535_disp = {
1780 	.get_static_info = r535_disp_get_static_info,
1781 	.get_supported = r535_disp_get_supported,
1782 	.get_connect_state = r535_disp_get_connect_state,
1783 	.get_active = r535_disp_get_active,
1784 	.bl_ctrl = r535_bl_ctrl,
1785 	.dp = {
1786 		.get_caps = r535_dp_get_caps,
1787 		.set_indexed_link_rates = r535_dp_set_indexed_link_rates,
1788 	},
1789 	.chan = {
1790 		.set_pushbuf = r535_disp_chan_set_pushbuf,
1791 		.dmac_alloc = r535_dmac_alloc,
1792 	}
1793 };
1794