1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27 #include "drmP.h"
28 #include "drm_mode.h"
29 #include "drm_crtc_helper.h"
30
31 #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
32 #include "nouveau_reg.h"
33 #include "nouveau_drv.h"
34 #include "nouveau_hw.h"
35 #include "nouveau_encoder.h"
36 #include "nouveau_crtc.h"
37 #include "nouveau_fb.h"
38 #include "nouveau_connector.h"
39 #include "nv50_display.h"
40
41 static void
nv50_crtc_lut_load(struct drm_crtc * crtc)42 nv50_crtc_lut_load(struct drm_crtc *crtc)
43 {
44 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
45 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
46 int i;
47
48 NV_DEBUG_KMS(crtc->dev, "\n");
49
50 for (i = 0; i < 256; i++) {
51 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
52 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
53 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
54 }
55
56 if (nv_crtc->lut.depth == 30) {
57 writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
58 writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
59 writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
60 }
61 }
62
63 int
nv50_crtc_blank(struct nouveau_crtc * nv_crtc,bool blanked)64 nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
65 {
66 struct drm_device *dev = nv_crtc->base.dev;
67 struct drm_nouveau_private *dev_priv = dev->dev_private;
68 struct nouveau_channel *evo = nv50_display(dev)->master;
69 int index = nv_crtc->index, ret;
70
71 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
72 NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
73
74 if (blanked) {
75 nv_crtc->cursor.hide(nv_crtc, false);
76
77 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
78 if (ret) {
79 NV_ERROR(dev, "no space while blanking crtc\n");
80 return ret;
81 }
82 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
83 OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
84 OUT_RING(evo, 0);
85 if (dev_priv->chipset != 0x50) {
86 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
87 OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
88 }
89
90 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
91 OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
92 } else {
93 if (nv_crtc->cursor.visible)
94 nv_crtc->cursor.show(nv_crtc, false);
95 else
96 nv_crtc->cursor.hide(nv_crtc, false);
97
98 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
99 if (ret) {
100 NV_ERROR(dev, "no space while unblanking crtc\n");
101 return ret;
102 }
103 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
104 OUT_RING(evo, nv_crtc->lut.depth == 8 ?
105 NV50_EVO_CRTC_CLUT_MODE_OFF :
106 NV50_EVO_CRTC_CLUT_MODE_ON);
107 OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
108 if (dev_priv->chipset != 0x50) {
109 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
110 OUT_RING(evo, NvEvoVRAM);
111 }
112
113 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
114 OUT_RING(evo, nv_crtc->fb.offset >> 8);
115 OUT_RING(evo, 0);
116 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
117 if (dev_priv->chipset != 0x50)
118 if (nv_crtc->fb.tile_flags == 0x7a00 ||
119 nv_crtc->fb.tile_flags == 0xfe00)
120 OUT_RING(evo, NvEvoFB32);
121 else
122 if (nv_crtc->fb.tile_flags == 0x7000)
123 OUT_RING(evo, NvEvoFB16);
124 else
125 OUT_RING(evo, NvEvoVRAM_LP);
126 else
127 OUT_RING(evo, NvEvoVRAM_LP);
128 }
129
130 nv_crtc->fb.blanked = blanked;
131 return 0;
132 }
133
134 static int
nv50_crtc_set_dither(struct nouveau_crtc * nv_crtc,bool update)135 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
136 {
137 struct nouveau_channel *evo = nv50_display(nv_crtc->base.dev)->master;
138 struct nouveau_connector *nv_connector;
139 struct drm_connector *connector;
140 int head = nv_crtc->index, ret;
141 u32 mode = 0x00;
142
143 nv_connector = nouveau_crtc_connector_get(nv_crtc);
144 connector = &nv_connector->base;
145 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
146 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
147 mode = DITHERING_MODE_DYNAMIC2X2;
148 } else {
149 mode = nv_connector->dithering_mode;
150 }
151
152 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
153 if (connector->display_info.bpc >= 8)
154 mode |= DITHERING_DEPTH_8BPC;
155 } else {
156 mode |= nv_connector->dithering_depth;
157 }
158
159 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
160 if (ret == 0) {
161 BEGIN_RING(evo, 0, NV50_EVO_CRTC(head, DITHER_CTRL), 1);
162 OUT_RING (evo, mode);
163 if (update) {
164 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
165 OUT_RING (evo, 0);
166 FIRE_RING (evo);
167 }
168 }
169
170 return ret;
171 }
172
173 struct nouveau_connector *
nouveau_crtc_connector_get(struct nouveau_crtc * nv_crtc)174 nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
175 {
176 struct drm_device *dev = nv_crtc->base.dev;
177 struct drm_connector *connector;
178 struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
179
180 /* The safest approach is to find an encoder with the right crtc, that
181 * is also linked to a connector. */
182 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
183 if (connector->encoder)
184 if (connector->encoder->crtc == crtc)
185 return nouveau_connector(connector);
186 }
187
188 return NULL;
189 }
190
191 static int
nv50_crtc_set_scale(struct nouveau_crtc * nv_crtc,bool update)192 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
193 {
194 struct nouveau_connector *nv_connector;
195 struct drm_crtc *crtc = &nv_crtc->base;
196 struct drm_device *dev = crtc->dev;
197 struct nouveau_channel *evo = nv50_display(dev)->master;
198 struct drm_display_mode *umode = &crtc->mode;
199 struct drm_display_mode *omode;
200 int scaling_mode, ret;
201 u32 ctrl = 0, oX, oY;
202
203 NV_DEBUG_KMS(dev, "\n");
204
205 nv_connector = nouveau_crtc_connector_get(nv_crtc);
206 if (!nv_connector || !nv_connector->native_mode) {
207 NV_ERROR(dev, "no native mode, forcing panel scaling\n");
208 scaling_mode = DRM_MODE_SCALE_NONE;
209 } else {
210 scaling_mode = nv_connector->scaling_mode;
211 }
212
213 /* start off at the resolution we programmed the crtc for, this
214 * effectively handles NONE/FULL scaling
215 */
216 if (scaling_mode != DRM_MODE_SCALE_NONE)
217 omode = nv_connector->native_mode;
218 else
219 omode = umode;
220
221 oX = omode->hdisplay;
222 oY = omode->vdisplay;
223 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
224 oY *= 2;
225
226 /* add overscan compensation if necessary, will keep the aspect
227 * ratio the same as the backend mode unless overridden by the
228 * user setting both hborder and vborder properties.
229 */
230 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
231 (nv_connector->underscan == UNDERSCAN_AUTO &&
232 nv_connector->edid &&
233 drm_detect_hdmi_monitor(nv_connector->edid)))) {
234 u32 bX = nv_connector->underscan_hborder;
235 u32 bY = nv_connector->underscan_vborder;
236 u32 aspect = (oY << 19) / oX;
237
238 if (bX) {
239 oX -= (bX * 2);
240 if (bY) oY -= (bY * 2);
241 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
242 } else {
243 oX -= (oX >> 4) + 32;
244 if (bY) oY -= (bY * 2);
245 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
246 }
247 }
248
249 /* handle CENTER/ASPECT scaling, taking into account the areas
250 * removed already for overscan compensation
251 */
252 switch (scaling_mode) {
253 case DRM_MODE_SCALE_CENTER:
254 oX = min((u32)umode->hdisplay, oX);
255 oY = min((u32)umode->vdisplay, oY);
256 /* fall-through */
257 case DRM_MODE_SCALE_ASPECT:
258 if (oY < oX) {
259 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
260 oX = ((oY * aspect) + (aspect / 2)) >> 19;
261 } else {
262 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
263 oY = ((oX * aspect) + (aspect / 2)) >> 19;
264 }
265 break;
266 default:
267 break;
268 }
269
270 if (umode->hdisplay != oX || umode->vdisplay != oY ||
271 umode->flags & DRM_MODE_FLAG_INTERLACE ||
272 umode->flags & DRM_MODE_FLAG_DBLSCAN)
273 ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;
274
275 ret = RING_SPACE(evo, 5);
276 if (ret)
277 return ret;
278
279 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
280 OUT_RING (evo, ctrl);
281 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
282 OUT_RING (evo, oY << 16 | oX);
283 OUT_RING (evo, oY << 16 | oX);
284
285 if (update) {
286 nv50_display_flip_stop(crtc);
287 nv50_display_sync(dev);
288 nv50_display_flip_next(crtc, crtc->fb, NULL);
289 }
290
291 return 0;
292 }
293
294 int
nv50_crtc_set_clock(struct drm_device * dev,int head,int pclk)295 nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
296 {
297 struct drm_nouveau_private *dev_priv = dev->dev_private;
298 struct pll_lims pll;
299 uint32_t reg1, reg2;
300 int ret, N1, M1, N2, M2, P;
301
302 ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
303 if (ret)
304 return ret;
305
306 if (pll.vco2.maxfreq) {
307 ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
308 if (ret <= 0)
309 return 0;
310
311 NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
312 pclk, ret, N1, M1, N2, M2, P);
313
314 reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
315 reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
316 nv_wr32(dev, pll.reg + 0, 0x10000611);
317 nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
318 nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
319 } else
320 if (dev_priv->chipset < NV_C0) {
321 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
322 if (ret <= 0)
323 return 0;
324
325 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
326 pclk, ret, N1, N2, M1, P);
327
328 reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
329 nv_wr32(dev, pll.reg + 0, 0x50000610);
330 nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
331 nv_wr32(dev, pll.reg + 8, N2);
332 } else {
333 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
334 if (ret <= 0)
335 return 0;
336
337 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
338 pclk, ret, N1, N2, M1, P);
339
340 nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
341 nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
342 nv_wr32(dev, pll.reg + 0x10, N2 << 16);
343 }
344
345 return 0;
346 }
347
348 static void
nv50_crtc_destroy(struct drm_crtc * crtc)349 nv50_crtc_destroy(struct drm_crtc *crtc)
350 {
351 struct drm_device *dev;
352 struct nouveau_crtc *nv_crtc;
353
354 if (!crtc)
355 return;
356
357 dev = crtc->dev;
358 nv_crtc = nouveau_crtc(crtc);
359
360 NV_DEBUG_KMS(dev, "\n");
361
362 drm_crtc_cleanup(&nv_crtc->base);
363
364 nouveau_bo_unmap(nv_crtc->lut.nvbo);
365 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
366 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
367 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
368 kfree(nv_crtc);
369 }
370
371 int
nv50_crtc_cursor_set(struct drm_crtc * crtc,struct drm_file * file_priv,uint32_t buffer_handle,uint32_t width,uint32_t height)372 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
373 uint32_t buffer_handle, uint32_t width, uint32_t height)
374 {
375 struct drm_device *dev = crtc->dev;
376 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
377 struct nouveau_bo *cursor = NULL;
378 struct drm_gem_object *gem;
379 int ret = 0, i;
380
381 if (!buffer_handle) {
382 nv_crtc->cursor.hide(nv_crtc, true);
383 return 0;
384 }
385
386 if (width != 64 || height != 64)
387 return -EINVAL;
388
389 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
390 if (!gem)
391 return -ENOENT;
392 cursor = nouveau_gem_object(gem);
393
394 ret = nouveau_bo_map(cursor);
395 if (ret)
396 goto out;
397
398 /* The simple will do for now. */
399 for (i = 0; i < 64 * 64; i++)
400 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
401
402 nouveau_bo_unmap(cursor);
403
404 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
405 nv_crtc->cursor.show(nv_crtc, true);
406
407 out:
408 drm_gem_object_unreference_unlocked(gem);
409 return ret;
410 }
411
412 int
nv50_crtc_cursor_move(struct drm_crtc * crtc,int x,int y)413 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
414 {
415 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
416
417 nv_crtc->cursor.set_pos(nv_crtc, x, y);
418 return 0;
419 }
420
421 static void
nv50_crtc_gamma_set(struct drm_crtc * crtc,u16 * r,u16 * g,u16 * b,uint32_t start,uint32_t size)422 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
423 uint32_t start, uint32_t size)
424 {
425 int end = (start + size > 256) ? 256 : start + size, i;
426 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
427
428 for (i = start; i < end; i++) {
429 nv_crtc->lut.r[i] = r[i];
430 nv_crtc->lut.g[i] = g[i];
431 nv_crtc->lut.b[i] = b[i];
432 }
433
434 /* We need to know the depth before we upload, but it's possible to
435 * get called before a framebuffer is bound. If this is the case,
436 * mark the lut values as dirty by setting depth==0, and it'll be
437 * uploaded on the first mode_set_base()
438 */
439 if (!nv_crtc->base.fb) {
440 nv_crtc->lut.depth = 0;
441 return;
442 }
443
444 nv50_crtc_lut_load(crtc);
445 }
446
447 static void
nv50_crtc_save(struct drm_crtc * crtc)448 nv50_crtc_save(struct drm_crtc *crtc)
449 {
450 NV_ERROR(crtc->dev, "!!\n");
451 }
452
453 static void
nv50_crtc_restore(struct drm_crtc * crtc)454 nv50_crtc_restore(struct drm_crtc *crtc)
455 {
456 NV_ERROR(crtc->dev, "!!\n");
457 }
458
459 static const struct drm_crtc_funcs nv50_crtc_funcs = {
460 .save = nv50_crtc_save,
461 .restore = nv50_crtc_restore,
462 .cursor_set = nv50_crtc_cursor_set,
463 .cursor_move = nv50_crtc_cursor_move,
464 .gamma_set = nv50_crtc_gamma_set,
465 .set_config = drm_crtc_helper_set_config,
466 .page_flip = nouveau_crtc_page_flip,
467 .destroy = nv50_crtc_destroy,
468 };
469
470 static void
nv50_crtc_dpms(struct drm_crtc * crtc,int mode)471 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
472 {
473 }
474
475 static void
nv50_crtc_prepare(struct drm_crtc * crtc)476 nv50_crtc_prepare(struct drm_crtc *crtc)
477 {
478 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
479 struct drm_device *dev = crtc->dev;
480
481 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
482
483 nv50_display_flip_stop(crtc);
484 drm_vblank_pre_modeset(dev, nv_crtc->index);
485 nv50_crtc_blank(nv_crtc, true);
486 }
487
488 static void
nv50_crtc_commit(struct drm_crtc * crtc)489 nv50_crtc_commit(struct drm_crtc *crtc)
490 {
491 struct drm_device *dev = crtc->dev;
492 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
493
494 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
495
496 nv50_crtc_blank(nv_crtc, false);
497 drm_vblank_post_modeset(dev, nv_crtc->index);
498 nv50_display_sync(dev);
499 nv50_display_flip_next(crtc, crtc->fb, NULL);
500 }
501
502 static bool
nv50_crtc_mode_fixup(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)503 nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
504 struct drm_display_mode *adjusted_mode)
505 {
506 return true;
507 }
508
509 static int
nv50_crtc_do_mode_set_base(struct drm_crtc * crtc,struct drm_framebuffer * passed_fb,int x,int y,bool atomic)510 nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
511 struct drm_framebuffer *passed_fb,
512 int x, int y, bool atomic)
513 {
514 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
515 struct drm_device *dev = nv_crtc->base.dev;
516 struct drm_nouveau_private *dev_priv = dev->dev_private;
517 struct nouveau_channel *evo = nv50_display(dev)->master;
518 struct drm_framebuffer *drm_fb;
519 struct nouveau_framebuffer *fb;
520 int ret;
521
522 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
523
524 /* no fb bound */
525 if (!atomic && !crtc->fb) {
526 NV_DEBUG_KMS(dev, "No FB bound\n");
527 return 0;
528 }
529
530 /* If atomic, we want to switch to the fb we were passed, so
531 * now we update pointers to do that. (We don't pin; just
532 * assume we're already pinned and update the base address.)
533 */
534 if (atomic) {
535 drm_fb = passed_fb;
536 fb = nouveau_framebuffer(passed_fb);
537 } else {
538 drm_fb = crtc->fb;
539 fb = nouveau_framebuffer(crtc->fb);
540 /* If not atomic, we can go ahead and pin, and unpin the
541 * old fb we were passed.
542 */
543 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
544 if (ret)
545 return ret;
546
547 if (passed_fb) {
548 struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
549 nouveau_bo_unpin(ofb->nvbo);
550 }
551 }
552
553 nv_crtc->fb.offset = fb->nvbo->bo.offset;
554 nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
555 nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
556 if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
557 ret = RING_SPACE(evo, 2);
558 if (ret)
559 return ret;
560
561 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
562 OUT_RING (evo, fb->r_dma);
563 }
564
565 ret = RING_SPACE(evo, 12);
566 if (ret)
567 return ret;
568
569 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
570 OUT_RING (evo, nv_crtc->fb.offset >> 8);
571 OUT_RING (evo, 0);
572 OUT_RING (evo, (drm_fb->height << 16) | drm_fb->width);
573 OUT_RING (evo, fb->r_pitch);
574 OUT_RING (evo, fb->r_format);
575
576 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
577 OUT_RING (evo, fb->base.depth == 8 ?
578 NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
579
580 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
581 OUT_RING (evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
582 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
583 OUT_RING (evo, (y << 16) | x);
584
585 if (nv_crtc->lut.depth != fb->base.depth) {
586 nv_crtc->lut.depth = fb->base.depth;
587 nv50_crtc_lut_load(crtc);
588 }
589
590 return 0;
591 }
592
593 static int
nv50_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * umode,struct drm_display_mode * mode,int x,int y,struct drm_framebuffer * old_fb)594 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
595 struct drm_display_mode *mode, int x, int y,
596 struct drm_framebuffer *old_fb)
597 {
598 struct drm_device *dev = crtc->dev;
599 struct nouveau_channel *evo = nv50_display(dev)->master;
600 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
601 u32 head = nv_crtc->index * 0x400;
602 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
603 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
604 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
605 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
606 u32 vblan2e = 0, vblan2s = 1;
607 int ret;
608
609 /* hw timing description looks like this:
610 *
611 * <sync> <back porch> <---------display---------> <front porch>
612 * ______
613 * |____________|---------------------------|____________|
614 *
615 * ^ synce ^ blanke ^ blanks ^ active
616 *
617 * interlaced modes also have 2 additional values pointing at the end
618 * and start of the next field's blanking period.
619 */
620
621 hactive = mode->htotal;
622 hsynce = mode->hsync_end - mode->hsync_start - 1;
623 hbackp = mode->htotal - mode->hsync_end;
624 hblanke = hsynce + hbackp;
625 hfrontp = mode->hsync_start - mode->hdisplay;
626 hblanks = mode->htotal - hfrontp - 1;
627
628 vactive = mode->vtotal * vscan / ilace;
629 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
630 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
631 vblanke = vsynce + vbackp;
632 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
633 vblanks = vactive - vfrontp - 1;
634 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
635 vblan2e = vactive + vsynce + vbackp;
636 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
637 vactive = (vactive * 2) + 1;
638 }
639
640 ret = RING_SPACE(evo, 18);
641 if (ret == 0) {
642 BEGIN_RING(evo, 0, 0x0804 + head, 2);
643 OUT_RING (evo, 0x00800000 | mode->clock);
644 OUT_RING (evo, (ilace == 2) ? 2 : 0);
645 BEGIN_RING(evo, 0, 0x0810 + head, 6);
646 OUT_RING (evo, 0x00000000); /* border colour */
647 OUT_RING (evo, (vactive << 16) | hactive);
648 OUT_RING (evo, ( vsynce << 16) | hsynce);
649 OUT_RING (evo, (vblanke << 16) | hblanke);
650 OUT_RING (evo, (vblanks << 16) | hblanks);
651 OUT_RING (evo, (vblan2e << 16) | vblan2s);
652 BEGIN_RING(evo, 0, 0x082c + head, 1);
653 OUT_RING (evo, 0x00000000);
654 BEGIN_RING(evo, 0, 0x0900 + head, 1);
655 OUT_RING (evo, 0x00000311); /* makes sync channel work */
656 BEGIN_RING(evo, 0, 0x08c8 + head, 1);
657 OUT_RING (evo, (umode->vdisplay << 16) | umode->hdisplay);
658 BEGIN_RING(evo, 0, 0x08d4 + head, 1);
659 OUT_RING (evo, 0x00000000); /* screen position */
660 }
661
662 nv_crtc->set_dither(nv_crtc, false);
663 nv_crtc->set_scale(nv_crtc, false);
664
665 return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
666 }
667
668 static int
nv50_crtc_mode_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)669 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
670 struct drm_framebuffer *old_fb)
671 {
672 int ret;
673
674 nv50_display_flip_stop(crtc);
675 ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
676 if (ret)
677 return ret;
678
679 ret = nv50_display_sync(crtc->dev);
680 if (ret)
681 return ret;
682
683 return nv50_display_flip_next(crtc, crtc->fb, NULL);
684 }
685
686 static int
nv50_crtc_mode_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)687 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
688 struct drm_framebuffer *fb,
689 int x, int y, enum mode_set_atomic state)
690 {
691 int ret;
692
693 nv50_display_flip_stop(crtc);
694 ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
695 if (ret)
696 return ret;
697
698 return nv50_display_sync(crtc->dev);
699 }
700
701 static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
702 .dpms = nv50_crtc_dpms,
703 .prepare = nv50_crtc_prepare,
704 .commit = nv50_crtc_commit,
705 .mode_fixup = nv50_crtc_mode_fixup,
706 .mode_set = nv50_crtc_mode_set,
707 .mode_set_base = nv50_crtc_mode_set_base,
708 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
709 .load_lut = nv50_crtc_lut_load,
710 };
711
712 int
nv50_crtc_create(struct drm_device * dev,int index)713 nv50_crtc_create(struct drm_device *dev, int index)
714 {
715 struct nouveau_crtc *nv_crtc = NULL;
716 int ret, i;
717
718 NV_DEBUG_KMS(dev, "\n");
719
720 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
721 if (!nv_crtc)
722 return -ENOMEM;
723
724 /* Default CLUT parameters, will be activated on the hw upon
725 * first mode set.
726 */
727 for (i = 0; i < 256; i++) {
728 nv_crtc->lut.r[i] = i << 8;
729 nv_crtc->lut.g[i] = i << 8;
730 nv_crtc->lut.b[i] = i << 8;
731 }
732 nv_crtc->lut.depth = 0;
733
734 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
735 0, 0x0000, &nv_crtc->lut.nvbo);
736 if (!ret) {
737 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
738 if (!ret)
739 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
740 if (ret)
741 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
742 }
743
744 if (ret) {
745 kfree(nv_crtc);
746 return ret;
747 }
748
749 nv_crtc->index = index;
750
751 /* set function pointers */
752 nv_crtc->set_dither = nv50_crtc_set_dither;
753 nv_crtc->set_scale = nv50_crtc_set_scale;
754
755 drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
756 drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
757 drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
758
759 ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
760 0, 0x0000, &nv_crtc->cursor.nvbo);
761 if (!ret) {
762 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
763 if (!ret)
764 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
765 if (ret)
766 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
767 }
768
769 nv50_cursor_init(nv_crtc);
770 return 0;
771 }
772