1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27 #include "drmP.h"
28 #include "drm_mode.h"
29 #include "drm_crtc_helper.h"
30
31 #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
32 #include "nouveau_reg.h"
33 #include "nouveau_drv.h"
34 #include "nouveau_hw.h"
35 #include "nouveau_encoder.h"
36 #include "nouveau_crtc.h"
37 #include "nouveau_fb.h"
38 #include "nouveau_connector.h"
39 #include "nv50_display.h"
40
41 static void
nv50_crtc_lut_load(struct drm_crtc * crtc)42 nv50_crtc_lut_load(struct drm_crtc *crtc)
43 {
44 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
45 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
46 int i;
47
48 NV_DEBUG_KMS(crtc->dev, "\n");
49
50 for (i = 0; i < 256; i++) {
51 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
52 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
53 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
54 }
55
56 if (nv_crtc->lut.depth == 30) {
57 writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
58 writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
59 writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
60 }
61 }
62
63 int
nv50_crtc_blank(struct nouveau_crtc * nv_crtc,bool blanked)64 nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
65 {
66 struct drm_device *dev = nv_crtc->base.dev;
67 struct drm_nouveau_private *dev_priv = dev->dev_private;
68 struct nouveau_channel *evo = nv50_display(dev)->master;
69 int index = nv_crtc->index, ret;
70
71 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
72 NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
73
74 if (blanked) {
75 nv_crtc->cursor.hide(nv_crtc, false);
76
77 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
78 if (ret) {
79 NV_ERROR(dev, "no space while blanking crtc\n");
80 return ret;
81 }
82 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
83 OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
84 OUT_RING(evo, 0);
85 if (dev_priv->chipset != 0x50) {
86 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
87 OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
88 }
89
90 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
91 OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
92 } else {
93 if (nv_crtc->cursor.visible)
94 nv_crtc->cursor.show(nv_crtc, false);
95 else
96 nv_crtc->cursor.hide(nv_crtc, false);
97
98 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
99 if (ret) {
100 NV_ERROR(dev, "no space while unblanking crtc\n");
101 return ret;
102 }
103 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
104 OUT_RING(evo, nv_crtc->lut.depth == 8 ?
105 NV50_EVO_CRTC_CLUT_MODE_OFF :
106 NV50_EVO_CRTC_CLUT_MODE_ON);
107 OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
108 if (dev_priv->chipset != 0x50) {
109 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
110 OUT_RING(evo, NvEvoVRAM);
111 }
112
113 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
114 OUT_RING(evo, nv_crtc->fb.offset >> 8);
115 OUT_RING(evo, 0);
116 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
117 if (dev_priv->chipset != 0x50)
118 if (nv_crtc->fb.tile_flags == 0x7a00 ||
119 nv_crtc->fb.tile_flags == 0xfe00)
120 OUT_RING(evo, NvEvoFB32);
121 else
122 if (nv_crtc->fb.tile_flags == 0x7000)
123 OUT_RING(evo, NvEvoFB16);
124 else
125 OUT_RING(evo, NvEvoVRAM_LP);
126 else
127 OUT_RING(evo, NvEvoVRAM_LP);
128 }
129
130 nv_crtc->fb.blanked = blanked;
131 return 0;
132 }
133
134 static int
nv50_crtc_set_dither(struct nouveau_crtc * nv_crtc,bool update)135 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
136 {
137 struct nouveau_channel *evo = nv50_display(nv_crtc->base.dev)->master;
138 struct nouveau_connector *nv_connector;
139 struct drm_connector *connector;
140 int head = nv_crtc->index, ret;
141 u32 mode = 0x00;
142
143 nv_connector = nouveau_crtc_connector_get(nv_crtc);
144 connector = &nv_connector->base;
145 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
146 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
147 mode = DITHERING_MODE_DYNAMIC2X2;
148 } else {
149 mode = nv_connector->dithering_mode;
150 }
151
152 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
153 if (connector->display_info.bpc >= 8)
154 mode |= DITHERING_DEPTH_8BPC;
155 } else {
156 mode |= nv_connector->dithering_depth;
157 }
158
159 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
160 if (ret == 0) {
161 BEGIN_RING(evo, 0, NV50_EVO_CRTC(head, DITHER_CTRL), 1);
162 OUT_RING (evo, mode);
163 if (update) {
164 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
165 OUT_RING (evo, 0);
166 FIRE_RING (evo);
167 }
168 }
169
170 return ret;
171 }
172
173 static int
nv50_crtc_set_color_vibrance(struct nouveau_crtc * nv_crtc,bool update)174 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
175 {
176 struct drm_device *dev = nv_crtc->base.dev;
177 struct nouveau_channel *evo = nv50_display(dev)->master;
178 int ret;
179 int adj;
180 u32 hue, vib;
181
182 NV_DEBUG_KMS(dev, "vibrance = %i, hue = %i\n",
183 nv_crtc->color_vibrance, nv_crtc->vibrant_hue);
184
185 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
186 if (ret) {
187 NV_ERROR(dev, "no space while setting color vibrance\n");
188 return ret;
189 }
190
191 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
192 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
193
194 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
195
196 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
197 OUT_RING (evo, (hue << 20) | (vib << 8));
198
199 if (update) {
200 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
201 OUT_RING (evo, 0);
202 FIRE_RING (evo);
203 }
204
205 return 0;
206 }
207
208 struct nouveau_connector *
nouveau_crtc_connector_get(struct nouveau_crtc * nv_crtc)209 nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
210 {
211 struct drm_device *dev = nv_crtc->base.dev;
212 struct drm_connector *connector;
213 struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
214
215 /* The safest approach is to find an encoder with the right crtc, that
216 * is also linked to a connector. */
217 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
218 if (connector->encoder)
219 if (connector->encoder->crtc == crtc)
220 return nouveau_connector(connector);
221 }
222
223 return NULL;
224 }
225
226 static int
nv50_crtc_set_scale(struct nouveau_crtc * nv_crtc,bool update)227 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
228 {
229 struct nouveau_connector *nv_connector;
230 struct drm_crtc *crtc = &nv_crtc->base;
231 struct drm_device *dev = crtc->dev;
232 struct nouveau_channel *evo = nv50_display(dev)->master;
233 struct drm_display_mode *umode = &crtc->mode;
234 struct drm_display_mode *omode;
235 int scaling_mode, ret;
236 u32 ctrl = 0, oX, oY;
237
238 NV_DEBUG_KMS(dev, "\n");
239
240 nv_connector = nouveau_crtc_connector_get(nv_crtc);
241 if (!nv_connector || !nv_connector->native_mode) {
242 NV_ERROR(dev, "no native mode, forcing panel scaling\n");
243 scaling_mode = DRM_MODE_SCALE_NONE;
244 } else {
245 scaling_mode = nv_connector->scaling_mode;
246 }
247
248 /* start off at the resolution we programmed the crtc for, this
249 * effectively handles NONE/FULL scaling
250 */
251 if (scaling_mode != DRM_MODE_SCALE_NONE)
252 omode = nv_connector->native_mode;
253 else
254 omode = umode;
255
256 oX = omode->hdisplay;
257 oY = omode->vdisplay;
258 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
259 oY *= 2;
260
261 /* add overscan compensation if necessary, will keep the aspect
262 * ratio the same as the backend mode unless overridden by the
263 * user setting both hborder and vborder properties.
264 */
265 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
266 (nv_connector->underscan == UNDERSCAN_AUTO &&
267 nv_connector->edid &&
268 drm_detect_hdmi_monitor(nv_connector->edid)))) {
269 u32 bX = nv_connector->underscan_hborder;
270 u32 bY = nv_connector->underscan_vborder;
271 u32 aspect = (oY << 19) / oX;
272
273 if (bX) {
274 oX -= (bX * 2);
275 if (bY) oY -= (bY * 2);
276 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
277 } else {
278 oX -= (oX >> 4) + 32;
279 if (bY) oY -= (bY * 2);
280 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
281 }
282 }
283
284 /* handle CENTER/ASPECT scaling, taking into account the areas
285 * removed already for overscan compensation
286 */
287 switch (scaling_mode) {
288 case DRM_MODE_SCALE_CENTER:
289 oX = min((u32)umode->hdisplay, oX);
290 oY = min((u32)umode->vdisplay, oY);
291 /* fall-through */
292 case DRM_MODE_SCALE_ASPECT:
293 if (oY < oX) {
294 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
295 oX = ((oY * aspect) + (aspect / 2)) >> 19;
296 } else {
297 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
298 oY = ((oX * aspect) + (aspect / 2)) >> 19;
299 }
300 break;
301 default:
302 break;
303 }
304
305 if (umode->hdisplay != oX || umode->vdisplay != oY ||
306 umode->flags & DRM_MODE_FLAG_INTERLACE ||
307 umode->flags & DRM_MODE_FLAG_DBLSCAN)
308 ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;
309
310 ret = RING_SPACE(evo, 5);
311 if (ret)
312 return ret;
313
314 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
315 OUT_RING (evo, ctrl);
316 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
317 OUT_RING (evo, oY << 16 | oX);
318 OUT_RING (evo, oY << 16 | oX);
319
320 if (update) {
321 nv50_display_flip_stop(crtc);
322 nv50_display_sync(dev);
323 nv50_display_flip_next(crtc, crtc->fb, NULL);
324 }
325
326 return 0;
327 }
328
329 int
nv50_crtc_set_clock(struct drm_device * dev,int head,int pclk)330 nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
331 {
332 struct drm_nouveau_private *dev_priv = dev->dev_private;
333 struct pll_lims pll;
334 uint32_t reg1, reg2;
335 int ret, N1, M1, N2, M2, P;
336
337 ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
338 if (ret)
339 return ret;
340
341 if (pll.vco2.maxfreq) {
342 ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
343 if (ret <= 0)
344 return 0;
345
346 NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
347 pclk, ret, N1, M1, N2, M2, P);
348
349 reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
350 reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
351 nv_wr32(dev, pll.reg + 0, 0x10000611);
352 nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
353 nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
354 } else
355 if (dev_priv->chipset < NV_C0) {
356 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
357 if (ret <= 0)
358 return 0;
359
360 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
361 pclk, ret, N1, N2, M1, P);
362
363 reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
364 nv_wr32(dev, pll.reg + 0, 0x50000610);
365 nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
366 nv_wr32(dev, pll.reg + 8, N2);
367 } else {
368 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
369 if (ret <= 0)
370 return 0;
371
372 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
373 pclk, ret, N1, N2, M1, P);
374
375 nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
376 nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
377 nv_wr32(dev, pll.reg + 0x10, N2 << 16);
378 }
379
380 return 0;
381 }
382
383 static void
nv50_crtc_destroy(struct drm_crtc * crtc)384 nv50_crtc_destroy(struct drm_crtc *crtc)
385 {
386 struct drm_device *dev;
387 struct nouveau_crtc *nv_crtc;
388
389 if (!crtc)
390 return;
391
392 dev = crtc->dev;
393 nv_crtc = nouveau_crtc(crtc);
394
395 NV_DEBUG_KMS(dev, "\n");
396
397 drm_crtc_cleanup(&nv_crtc->base);
398
399 nouveau_bo_unmap(nv_crtc->lut.nvbo);
400 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
401 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
402 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
403 kfree(nv_crtc);
404 }
405
406 int
nv50_crtc_cursor_set(struct drm_crtc * crtc,struct drm_file * file_priv,uint32_t buffer_handle,uint32_t width,uint32_t height)407 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
408 uint32_t buffer_handle, uint32_t width, uint32_t height)
409 {
410 struct drm_device *dev = crtc->dev;
411 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
412 struct nouveau_bo *cursor = NULL;
413 struct drm_gem_object *gem;
414 int ret = 0, i;
415
416 if (!buffer_handle) {
417 nv_crtc->cursor.hide(nv_crtc, true);
418 return 0;
419 }
420
421 if (width != 64 || height != 64)
422 return -EINVAL;
423
424 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
425 if (!gem)
426 return -ENOENT;
427 cursor = nouveau_gem_object(gem);
428
429 ret = nouveau_bo_map(cursor);
430 if (ret)
431 goto out;
432
433 /* The simple will do for now. */
434 for (i = 0; i < 64 * 64; i++)
435 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
436
437 nouveau_bo_unmap(cursor);
438
439 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
440 nv_crtc->cursor.show(nv_crtc, true);
441
442 out:
443 drm_gem_object_unreference_unlocked(gem);
444 return ret;
445 }
446
447 int
nv50_crtc_cursor_move(struct drm_crtc * crtc,int x,int y)448 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
449 {
450 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
451
452 nv_crtc->cursor.set_pos(nv_crtc, x, y);
453 return 0;
454 }
455
456 static void
nv50_crtc_gamma_set(struct drm_crtc * crtc,u16 * r,u16 * g,u16 * b,uint32_t start,uint32_t size)457 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
458 uint32_t start, uint32_t size)
459 {
460 int end = (start + size > 256) ? 256 : start + size, i;
461 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
462
463 for (i = start; i < end; i++) {
464 nv_crtc->lut.r[i] = r[i];
465 nv_crtc->lut.g[i] = g[i];
466 nv_crtc->lut.b[i] = b[i];
467 }
468
469 /* We need to know the depth before we upload, but it's possible to
470 * get called before a framebuffer is bound. If this is the case,
471 * mark the lut values as dirty by setting depth==0, and it'll be
472 * uploaded on the first mode_set_base()
473 */
474 if (!nv_crtc->base.fb) {
475 nv_crtc->lut.depth = 0;
476 return;
477 }
478
479 nv50_crtc_lut_load(crtc);
480 }
481
482 static void
nv50_crtc_save(struct drm_crtc * crtc)483 nv50_crtc_save(struct drm_crtc *crtc)
484 {
485 NV_ERROR(crtc->dev, "!!\n");
486 }
487
488 static void
nv50_crtc_restore(struct drm_crtc * crtc)489 nv50_crtc_restore(struct drm_crtc *crtc)
490 {
491 NV_ERROR(crtc->dev, "!!\n");
492 }
493
494 static const struct drm_crtc_funcs nv50_crtc_funcs = {
495 .save = nv50_crtc_save,
496 .restore = nv50_crtc_restore,
497 .cursor_set = nv50_crtc_cursor_set,
498 .cursor_move = nv50_crtc_cursor_move,
499 .gamma_set = nv50_crtc_gamma_set,
500 .set_config = drm_crtc_helper_set_config,
501 .page_flip = nouveau_crtc_page_flip,
502 .destroy = nv50_crtc_destroy,
503 };
504
505 static void
nv50_crtc_dpms(struct drm_crtc * crtc,int mode)506 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
507 {
508 }
509
510 static void
nv50_crtc_prepare(struct drm_crtc * crtc)511 nv50_crtc_prepare(struct drm_crtc *crtc)
512 {
513 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
514 struct drm_device *dev = crtc->dev;
515
516 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
517
518 nv50_display_flip_stop(crtc);
519 drm_vblank_pre_modeset(dev, nv_crtc->index);
520 nv50_crtc_blank(nv_crtc, true);
521 }
522
523 static void
nv50_crtc_commit(struct drm_crtc * crtc)524 nv50_crtc_commit(struct drm_crtc *crtc)
525 {
526 struct drm_device *dev = crtc->dev;
527 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
528
529 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
530
531 nv50_crtc_blank(nv_crtc, false);
532 drm_vblank_post_modeset(dev, nv_crtc->index);
533 nv50_display_sync(dev);
534 nv50_display_flip_next(crtc, crtc->fb, NULL);
535 }
536
537 static bool
nv50_crtc_mode_fixup(struct drm_crtc * crtc,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)538 nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
539 struct drm_display_mode *adjusted_mode)
540 {
541 return true;
542 }
543
544 static int
nv50_crtc_do_mode_set_base(struct drm_crtc * crtc,struct drm_framebuffer * passed_fb,int x,int y,bool atomic)545 nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
546 struct drm_framebuffer *passed_fb,
547 int x, int y, bool atomic)
548 {
549 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
550 struct drm_device *dev = nv_crtc->base.dev;
551 struct drm_nouveau_private *dev_priv = dev->dev_private;
552 struct nouveau_channel *evo = nv50_display(dev)->master;
553 struct drm_framebuffer *drm_fb;
554 struct nouveau_framebuffer *fb;
555 int ret;
556
557 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
558
559 /* no fb bound */
560 if (!atomic && !crtc->fb) {
561 NV_DEBUG_KMS(dev, "No FB bound\n");
562 return 0;
563 }
564
565 /* If atomic, we want to switch to the fb we were passed, so
566 * now we update pointers to do that. (We don't pin; just
567 * assume we're already pinned and update the base address.)
568 */
569 if (atomic) {
570 drm_fb = passed_fb;
571 fb = nouveau_framebuffer(passed_fb);
572 } else {
573 drm_fb = crtc->fb;
574 fb = nouveau_framebuffer(crtc->fb);
575 /* If not atomic, we can go ahead and pin, and unpin the
576 * old fb we were passed.
577 */
578 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
579 if (ret)
580 return ret;
581
582 if (passed_fb) {
583 struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
584 nouveau_bo_unpin(ofb->nvbo);
585 }
586 }
587
588 nv_crtc->fb.offset = fb->nvbo->bo.offset;
589 nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
590 nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
591 if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
592 ret = RING_SPACE(evo, 2);
593 if (ret)
594 return ret;
595
596 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
597 OUT_RING (evo, fb->r_dma);
598 }
599
600 ret = RING_SPACE(evo, 12);
601 if (ret)
602 return ret;
603
604 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
605 OUT_RING (evo, nv_crtc->fb.offset >> 8);
606 OUT_RING (evo, 0);
607 OUT_RING (evo, (drm_fb->height << 16) | drm_fb->width);
608 OUT_RING (evo, fb->r_pitch);
609 OUT_RING (evo, fb->r_format);
610
611 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
612 OUT_RING (evo, fb->base.depth == 8 ?
613 NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
614
615 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
616 OUT_RING (evo, (y << 16) | x);
617
618 if (nv_crtc->lut.depth != fb->base.depth) {
619 nv_crtc->lut.depth = fb->base.depth;
620 nv50_crtc_lut_load(crtc);
621 }
622
623 return 0;
624 }
625
626 static int
nv50_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * umode,struct drm_display_mode * mode,int x,int y,struct drm_framebuffer * old_fb)627 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
628 struct drm_display_mode *mode, int x, int y,
629 struct drm_framebuffer *old_fb)
630 {
631 struct drm_device *dev = crtc->dev;
632 struct nouveau_channel *evo = nv50_display(dev)->master;
633 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
634 u32 head = nv_crtc->index * 0x400;
635 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
636 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
637 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
638 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
639 u32 vblan2e = 0, vblan2s = 1;
640 int ret;
641
642 /* hw timing description looks like this:
643 *
644 * <sync> <back porch> <---------display---------> <front porch>
645 * ______
646 * |____________|---------------------------|____________|
647 *
648 * ^ synce ^ blanke ^ blanks ^ active
649 *
650 * interlaced modes also have 2 additional values pointing at the end
651 * and start of the next field's blanking period.
652 */
653
654 hactive = mode->htotal;
655 hsynce = mode->hsync_end - mode->hsync_start - 1;
656 hbackp = mode->htotal - mode->hsync_end;
657 hblanke = hsynce + hbackp;
658 hfrontp = mode->hsync_start - mode->hdisplay;
659 hblanks = mode->htotal - hfrontp - 1;
660
661 vactive = mode->vtotal * vscan / ilace;
662 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
663 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
664 vblanke = vsynce + vbackp;
665 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
666 vblanks = vactive - vfrontp - 1;
667 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
668 vblan2e = vactive + vsynce + vbackp;
669 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
670 vactive = (vactive * 2) + 1;
671 }
672
673 ret = RING_SPACE(evo, 18);
674 if (ret == 0) {
675 BEGIN_RING(evo, 0, 0x0804 + head, 2);
676 OUT_RING (evo, 0x00800000 | mode->clock);
677 OUT_RING (evo, (ilace == 2) ? 2 : 0);
678 BEGIN_RING(evo, 0, 0x0810 + head, 6);
679 OUT_RING (evo, 0x00000000); /* border colour */
680 OUT_RING (evo, (vactive << 16) | hactive);
681 OUT_RING (evo, ( vsynce << 16) | hsynce);
682 OUT_RING (evo, (vblanke << 16) | hblanke);
683 OUT_RING (evo, (vblanks << 16) | hblanks);
684 OUT_RING (evo, (vblan2e << 16) | vblan2s);
685 BEGIN_RING(evo, 0, 0x082c + head, 1);
686 OUT_RING (evo, 0x00000000);
687 BEGIN_RING(evo, 0, 0x0900 + head, 1);
688 OUT_RING (evo, 0x00000311); /* makes sync channel work */
689 BEGIN_RING(evo, 0, 0x08c8 + head, 1);
690 OUT_RING (evo, (umode->vdisplay << 16) | umode->hdisplay);
691 BEGIN_RING(evo, 0, 0x08d4 + head, 1);
692 OUT_RING (evo, 0x00000000); /* screen position */
693 }
694
695 nv_crtc->set_dither(nv_crtc, false);
696 nv_crtc->set_scale(nv_crtc, false);
697 nv_crtc->set_color_vibrance(nv_crtc, false);
698
699 return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
700 }
701
702 static int
nv50_crtc_mode_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)703 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
704 struct drm_framebuffer *old_fb)
705 {
706 int ret;
707
708 nv50_display_flip_stop(crtc);
709 ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
710 if (ret)
711 return ret;
712
713 ret = nv50_display_sync(crtc->dev);
714 if (ret)
715 return ret;
716
717 return nv50_display_flip_next(crtc, crtc->fb, NULL);
718 }
719
720 static int
nv50_crtc_mode_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)721 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
722 struct drm_framebuffer *fb,
723 int x, int y, enum mode_set_atomic state)
724 {
725 int ret;
726
727 nv50_display_flip_stop(crtc);
728 ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
729 if (ret)
730 return ret;
731
732 return nv50_display_sync(crtc->dev);
733 }
734
735 static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
736 .dpms = nv50_crtc_dpms,
737 .prepare = nv50_crtc_prepare,
738 .commit = nv50_crtc_commit,
739 .mode_fixup = nv50_crtc_mode_fixup,
740 .mode_set = nv50_crtc_mode_set,
741 .mode_set_base = nv50_crtc_mode_set_base,
742 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
743 .load_lut = nv50_crtc_lut_load,
744 };
745
746 int
nv50_crtc_create(struct drm_device * dev,int index)747 nv50_crtc_create(struct drm_device *dev, int index)
748 {
749 struct nouveau_crtc *nv_crtc = NULL;
750 int ret, i;
751
752 NV_DEBUG_KMS(dev, "\n");
753
754 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
755 if (!nv_crtc)
756 return -ENOMEM;
757
758 nv_crtc->color_vibrance = 50;
759 nv_crtc->vibrant_hue = 0;
760
761 /* Default CLUT parameters, will be activated on the hw upon
762 * first mode set.
763 */
764 for (i = 0; i < 256; i++) {
765 nv_crtc->lut.r[i] = i << 8;
766 nv_crtc->lut.g[i] = i << 8;
767 nv_crtc->lut.b[i] = i << 8;
768 }
769 nv_crtc->lut.depth = 0;
770
771 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
772 0, 0x0000, &nv_crtc->lut.nvbo);
773 if (!ret) {
774 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
775 if (!ret)
776 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
777 if (ret)
778 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
779 }
780
781 if (ret) {
782 kfree(nv_crtc);
783 return ret;
784 }
785
786 nv_crtc->index = index;
787
788 /* set function pointers */
789 nv_crtc->set_dither = nv50_crtc_set_dither;
790 nv_crtc->set_scale = nv50_crtc_set_scale;
791 nv_crtc->set_color_vibrance = nv50_crtc_set_color_vibrance;
792
793 drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
794 drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
795 drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
796
797 ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
798 0, 0x0000, &nv_crtc->cursor.nvbo);
799 if (!ret) {
800 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
801 if (!ret)
802 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
803 if (ret)
804 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
805 }
806
807 nv50_cursor_init(nv_crtc);
808 return 0;
809 }
810