2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include <drm/drm_crtc_helper.h>
29 #include <drm/drm_dp_helper.h>
31 #include <nvif/class.h>
33 #include "nouveau_drm.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_gem.h"
36 #include "nouveau_connector.h"
37 #include "nouveau_encoder.h"
38 #include "nouveau_crtc.h"
39 #include "nouveau_fence.h"
40 #include "nv50_display.h"
44 #define EVO_MASTER (0x00)
45 #define EVO_FLIP(c) (0x01 + (c))
46 #define EVO_OVLY(c) (0x05 + (c))
47 #define EVO_OIMM(c) (0x09 + (c))
48 #define EVO_CURS(c) (0x0d + (c))
50 /* offsets in shared sync bo of various structures */
51 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
52 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
53 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
54 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
56 #define EVO_CORE_HANDLE (0xd1500000)
57 #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
58 #define EVO_CHAN_OCLASS(t,c) (((c)->oclass & 0xff00) | ((t) & 0x00ff))
59 #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
60 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
62 /******************************************************************************
64 *****************************************************************************/
67 struct nvif_object user;
71 nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head,
72 void *data, u32 size, struct nv50_chan *chan)
75 int ret = nvif_object_init(disp, NULL, (oclass[0] << 16) | head,
76 oclass[0], data, size,
78 if (oclass++, ret == 0)
85 nv50_chan_destroy(struct nv50_chan *chan)
87 nvif_object_fini(&chan->user);
90 /******************************************************************************
92 *****************************************************************************/
95 struct nv50_chan base;
99 nv50_pioc_destroy(struct nv50_pioc *pioc)
101 nv50_chan_destroy(&pioc->base);
105 nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head,
106 void *data, u32 size, struct nv50_pioc *pioc)
108 return nv50_chan_create(disp, oclass, head, data, size, &pioc->base);
111 /******************************************************************************
113 *****************************************************************************/
116 struct nv50_pioc base;
120 nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs)
122 struct nv50_display_curs_class args = {
125 static const u32 oclass[] = {
126 GM107_DISP_CURS_CLASS,
127 NVF0_DISP_CURS_CLASS,
128 NVE0_DISP_CURS_CLASS,
129 NVD0_DISP_CURS_CLASS,
130 NVA3_DISP_CURS_CLASS,
131 NV94_DISP_CURS_CLASS,
132 NVA0_DISP_CURS_CLASS,
133 NV84_DISP_CURS_CLASS,
134 NV50_DISP_CURS_CLASS,
138 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
142 /******************************************************************************
144 *****************************************************************************/
147 struct nv50_pioc base;
151 nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm)
153 struct nv50_display_oimm_class args = {
156 static const u32 oclass[] = {
157 GM107_DISP_OIMM_CLASS,
158 NVF0_DISP_OIMM_CLASS,
159 NVE0_DISP_OIMM_CLASS,
160 NVD0_DISP_OIMM_CLASS,
161 NVA3_DISP_OIMM_CLASS,
162 NV94_DISP_OIMM_CLASS,
163 NVA0_DISP_OIMM_CLASS,
164 NV84_DISP_OIMM_CLASS,
165 NV50_DISP_OIMM_CLASS,
169 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
173 /******************************************************************************
175 *****************************************************************************/
178 struct nv50_chan base;
182 struct nvif_object sync;
183 struct nvif_object vram;
185 /* Protects against concurrent pushbuf access to this channel, lock is
186 * grabbed by evo_wait (if the pushbuf reservation is successful) and
187 * dropped again by evo_kick. */
192 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
194 nvif_object_fini(&dmac->vram);
195 nvif_object_fini(&dmac->sync);
197 nv50_chan_destroy(&dmac->base);
200 struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev;
201 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
206 nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head,
207 void *data, u32 size, u64 syncbuf,
208 struct nv50_dmac *dmac)
210 struct nouveau_fb *pfb = nvkm_fb(nvif_device(disp));
211 struct nvif_object pushbuf;
212 u32 handle = *(u32 *)data;
215 mutex_init(&dmac->lock);
217 dmac->ptr = pci_alloc_consistent(nvkm_device(nvif_device(disp))->pdev,
218 PAGE_SIZE, &dmac->handle);
222 ret = nvif_object_init(nvif_object(nvif_device(disp)), NULL, handle,
224 &(struct nv_dma_v0) {
225 .target = NV_DMA_V0_TARGET_PCI_US,
226 .access = NV_DMA_V0_ACCESS_RD,
227 .start = dmac->handle + 0x0000,
228 .limit = dmac->handle + 0x0fff,
229 }, sizeof(struct nv_dma_v0), &pushbuf);
233 ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base);
234 nvif_object_fini(&pushbuf);
238 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000,
240 &(struct nv_dma_v0) {
241 .target = NV_DMA_V0_TARGET_VRAM,
242 .access = NV_DMA_V0_ACCESS_RDWR,
243 .start = syncbuf + 0x0000,
244 .limit = syncbuf + 0x0fff,
245 }, sizeof(struct nv_dma_v0),
250 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001,
252 &(struct nv_dma_v0) {
253 .target = NV_DMA_V0_TARGET_VRAM,
254 .access = NV_DMA_V0_ACCESS_RDWR,
256 .limit = pfb->ram->size - 1,
257 }, sizeof(struct nv_dma_v0),
265 /******************************************************************************
267 *****************************************************************************/
270 struct nv50_dmac base;
274 nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core)
276 struct nv50_display_mast_class args = {
277 .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
279 static const u32 oclass[] = {
280 GM107_DISP_MAST_CLASS,
281 NVF0_DISP_MAST_CLASS,
282 NVE0_DISP_MAST_CLASS,
283 NVD0_DISP_MAST_CLASS,
284 NVA3_DISP_MAST_CLASS,
285 NV94_DISP_MAST_CLASS,
286 NVA0_DISP_MAST_CLASS,
287 NV84_DISP_MAST_CLASS,
288 NV50_DISP_MAST_CLASS,
292 return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf,
296 /******************************************************************************
298 *****************************************************************************/
301 struct nv50_dmac base;
307 nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf,
308 struct nv50_sync *base)
310 struct nv50_display_sync_class args = {
311 .pushbuf = EVO_PUSH_HANDLE(SYNC, head),
314 static const u32 oclass[] = {
315 GM107_DISP_SYNC_CLASS,
316 NVF0_DISP_SYNC_CLASS,
317 NVE0_DISP_SYNC_CLASS,
318 NVD0_DISP_SYNC_CLASS,
319 NVA3_DISP_SYNC_CLASS,
320 NV94_DISP_SYNC_CLASS,
321 NVA0_DISP_SYNC_CLASS,
322 NV84_DISP_SYNC_CLASS,
323 NV50_DISP_SYNC_CLASS,
327 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
328 syncbuf, &base->base);
331 /******************************************************************************
333 *****************************************************************************/
336 struct nv50_dmac base;
340 nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf,
341 struct nv50_ovly *ovly)
343 struct nv50_display_ovly_class args = {
344 .pushbuf = EVO_PUSH_HANDLE(OVLY, head),
347 static const u32 oclass[] = {
348 GM107_DISP_OVLY_CLASS,
349 NVF0_DISP_OVLY_CLASS,
350 NVE0_DISP_OVLY_CLASS,
351 NVD0_DISP_OVLY_CLASS,
352 NVA3_DISP_OVLY_CLASS,
353 NV94_DISP_OVLY_CLASS,
354 NVA0_DISP_OVLY_CLASS,
355 NV84_DISP_OVLY_CLASS,
356 NV50_DISP_OVLY_CLASS,
360 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
361 syncbuf, &ovly->base);
365 struct nouveau_crtc base;
366 struct nouveau_bo *image;
367 struct nv50_curs curs;
368 struct nv50_sync sync;
369 struct nv50_ovly ovly;
370 struct nv50_oimm oimm;
373 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
374 #define nv50_curs(c) (&nv50_head(c)->curs)
375 #define nv50_sync(c) (&nv50_head(c)->sync)
376 #define nv50_ovly(c) (&nv50_head(c)->ovly)
377 #define nv50_oimm(c) (&nv50_head(c)->oimm)
378 #define nv50_chan(c) (&(c)->base.base)
379 #define nv50_vers(c) nv50_chan(c)->user.oclass
382 struct list_head head;
383 struct nvif_object core;
384 struct nvif_object base[4];
388 struct nvif_object *disp;
389 struct nv50_mast mast;
391 struct list_head fbdma;
393 struct nouveau_bo *sync;
396 static struct nv50_disp *
397 nv50_disp(struct drm_device *dev)
399 return nouveau_display(dev)->priv;
402 #define nv50_mast(d) (&nv50_disp(d)->mast)
404 static struct drm_crtc *
405 nv50_display_crtc_get(struct drm_encoder *encoder)
407 return nouveau_encoder(encoder)->crtc;
410 /******************************************************************************
411 * EVO channel helpers
412 *****************************************************************************/
414 evo_wait(void *evoc, int nr)
416 struct nv50_dmac *dmac = evoc;
417 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
419 mutex_lock(&dmac->lock);
420 if (put + nr >= (PAGE_SIZE / 4) - 8) {
421 dmac->ptr[put] = 0x20000000;
423 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
424 if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) {
425 mutex_unlock(&dmac->lock);
426 nv_error(nvkm_object(&dmac->base.user), "channel stalled\n");
433 return dmac->ptr + put;
437 evo_kick(u32 *push, void *evoc)
439 struct nv50_dmac *dmac = evoc;
440 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
441 mutex_unlock(&dmac->lock);
444 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
445 #define evo_data(p,d) *((p)++) = (d)
448 evo_sync_wait(void *data)
450 if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
457 evo_sync(struct drm_device *dev)
459 struct nvif_device *device = &nouveau_drm(dev)->device;
460 struct nv50_disp *disp = nv50_disp(dev);
461 struct nv50_mast *mast = nv50_mast(dev);
462 u32 *push = evo_wait(mast, 8);
464 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
465 evo_mthd(push, 0x0084, 1);
466 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
467 evo_mthd(push, 0x0080, 2);
468 evo_data(push, 0x00000000);
469 evo_data(push, 0x00000000);
470 evo_kick(push, mast);
471 if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync))
478 /******************************************************************************
479 * Page flipping channel
480 *****************************************************************************/
482 nv50_display_crtc_sema(struct drm_device *dev, int crtc)
484 return nv50_disp(dev)->sync;
487 struct nv50_display_flip {
488 struct nv50_disp *disp;
489 struct nv50_sync *chan;
493 nv50_display_flip_wait(void *data)
495 struct nv50_display_flip *flip = data;
496 if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
504 nv50_display_flip_stop(struct drm_crtc *crtc)
506 struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
507 struct nv50_display_flip flip = {
508 .disp = nv50_disp(crtc->dev),
509 .chan = nv50_sync(crtc),
513 push = evo_wait(flip.chan, 8);
515 evo_mthd(push, 0x0084, 1);
516 evo_data(push, 0x00000000);
517 evo_mthd(push, 0x0094, 1);
518 evo_data(push, 0x00000000);
519 evo_mthd(push, 0x00c0, 1);
520 evo_data(push, 0x00000000);
521 evo_mthd(push, 0x0080, 1);
522 evo_data(push, 0x00000000);
523 evo_kick(push, flip.chan);
526 nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip);
530 nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
531 struct nouveau_channel *chan, u32 swap_interval)
533 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
534 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
535 struct nv50_head *head = nv50_head(crtc);
536 struct nv50_sync *sync = nv50_sync(crtc);
541 if (swap_interval == 0)
542 swap_interval |= 0x100;
546 push = evo_wait(sync, 128);
547 if (unlikely(push == NULL))
550 if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) {
551 ret = RING_SPACE(chan, 8);
555 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
556 OUT_RING (chan, NvEvoSema0 + nv_crtc->index);
557 OUT_RING (chan, sync->addr ^ 0x10);
558 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
559 OUT_RING (chan, sync->data + 1);
560 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
561 OUT_RING (chan, sync->addr);
562 OUT_RING (chan, sync->data);
564 if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) {
565 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
566 ret = RING_SPACE(chan, 12);
570 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
571 OUT_RING (chan, chan->vram.handle);
572 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
573 OUT_RING (chan, upper_32_bits(addr ^ 0x10));
574 OUT_RING (chan, lower_32_bits(addr ^ 0x10));
575 OUT_RING (chan, sync->data + 1);
576 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
577 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
578 OUT_RING (chan, upper_32_bits(addr));
579 OUT_RING (chan, lower_32_bits(addr));
580 OUT_RING (chan, sync->data);
581 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
584 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
585 ret = RING_SPACE(chan, 10);
589 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
590 OUT_RING (chan, upper_32_bits(addr ^ 0x10));
591 OUT_RING (chan, lower_32_bits(addr ^ 0x10));
592 OUT_RING (chan, sync->data + 1);
593 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
594 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
595 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
596 OUT_RING (chan, upper_32_bits(addr));
597 OUT_RING (chan, lower_32_bits(addr));
598 OUT_RING (chan, sync->data);
599 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
600 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
610 evo_mthd(push, 0x0100, 1);
611 evo_data(push, 0xfffe0000);
612 evo_mthd(push, 0x0084, 1);
613 evo_data(push, swap_interval);
614 if (!(swap_interval & 0x00000100)) {
615 evo_mthd(push, 0x00e0, 1);
616 evo_data(push, 0x40000000);
618 evo_mthd(push, 0x0088, 4);
619 evo_data(push, sync->addr);
620 evo_data(push, sync->data++);
621 evo_data(push, sync->data);
622 evo_data(push, sync->base.sync.handle);
623 evo_mthd(push, 0x00a0, 2);
624 evo_data(push, 0x00000000);
625 evo_data(push, 0x00000000);
626 evo_mthd(push, 0x00c0, 1);
627 evo_data(push, nv_fb->r_handle);
628 evo_mthd(push, 0x0110, 2);
629 evo_data(push, 0x00000000);
630 evo_data(push, 0x00000000);
631 if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
632 evo_mthd(push, 0x0800, 5);
633 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
635 evo_data(push, (fb->height << 16) | fb->width);
636 evo_data(push, nv_fb->r_pitch);
637 evo_data(push, nv_fb->r_format);
639 evo_mthd(push, 0x0400, 5);
640 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
642 evo_data(push, (fb->height << 16) | fb->width);
643 evo_data(push, nv_fb->r_pitch);
644 evo_data(push, nv_fb->r_format);
646 evo_mthd(push, 0x0080, 1);
647 evo_data(push, 0x00000000);
648 evo_kick(push, sync);
650 nouveau_bo_ref(nv_fb->nvbo, &head->image);
654 /******************************************************************************
656 *****************************************************************************/
658 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
660 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
661 struct nouveau_connector *nv_connector;
662 struct drm_connector *connector;
663 u32 *push, mode = 0x00;
665 nv_connector = nouveau_crtc_connector_get(nv_crtc);
666 connector = &nv_connector->base;
667 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
668 if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
669 mode = DITHERING_MODE_DYNAMIC2X2;
671 mode = nv_connector->dithering_mode;
674 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
675 if (connector->display_info.bpc >= 8)
676 mode |= DITHERING_DEPTH_8BPC;
678 mode |= nv_connector->dithering_depth;
681 push = evo_wait(mast, 4);
683 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
684 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
685 evo_data(push, mode);
687 if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
688 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
689 evo_data(push, mode);
691 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
692 evo_data(push, mode);
696 evo_mthd(push, 0x0080, 1);
697 evo_data(push, 0x00000000);
699 evo_kick(push, mast);
706 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
708 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
709 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
710 struct drm_crtc *crtc = &nv_crtc->base;
711 struct nouveau_connector *nv_connector;
712 int mode = DRM_MODE_SCALE_NONE;
715 /* start off at the resolution we programmed the crtc for, this
716 * effectively handles NONE/FULL scaling
718 nv_connector = nouveau_crtc_connector_get(nv_crtc);
719 if (nv_connector && nv_connector->native_mode)
720 mode = nv_connector->scaling_mode;
722 if (mode != DRM_MODE_SCALE_NONE)
723 omode = nv_connector->native_mode;
727 oX = omode->hdisplay;
728 oY = omode->vdisplay;
729 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
732 /* add overscan compensation if necessary, will keep the aspect
733 * ratio the same as the backend mode unless overridden by the
734 * user setting both hborder and vborder properties.
736 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
737 (nv_connector->underscan == UNDERSCAN_AUTO &&
738 nv_connector->edid &&
739 drm_detect_hdmi_monitor(nv_connector->edid)))) {
740 u32 bX = nv_connector->underscan_hborder;
741 u32 bY = nv_connector->underscan_vborder;
742 u32 aspect = (oY << 19) / oX;
746 if (bY) oY -= (bY * 2);
747 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
749 oX -= (oX >> 4) + 32;
750 if (bY) oY -= (bY * 2);
751 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
755 /* handle CENTER/ASPECT scaling, taking into account the areas
756 * removed already for overscan compensation
759 case DRM_MODE_SCALE_CENTER:
760 oX = min((u32)umode->hdisplay, oX);
761 oY = min((u32)umode->vdisplay, oY);
763 case DRM_MODE_SCALE_ASPECT:
765 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
766 oX = ((oY * aspect) + (aspect / 2)) >> 19;
768 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
769 oY = ((oX * aspect) + (aspect / 2)) >> 19;
776 push = evo_wait(mast, 8);
778 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
779 /*XXX: SCALE_CTRL_ACTIVE??? */
780 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
781 evo_data(push, (oY << 16) | oX);
782 evo_data(push, (oY << 16) | oX);
783 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
784 evo_data(push, 0x00000000);
785 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
786 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
788 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
789 evo_data(push, (oY << 16) | oX);
790 evo_data(push, (oY << 16) | oX);
791 evo_data(push, (oY << 16) | oX);
792 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
793 evo_data(push, 0x00000000);
794 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
795 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
798 evo_kick(push, mast);
801 nv50_display_flip_stop(crtc);
802 nv50_display_flip_next(crtc, crtc->primary->fb,
811 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
813 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
817 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
818 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
819 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
821 push = evo_wait(mast, 16);
823 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
824 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
825 evo_data(push, (hue << 20) | (vib << 8));
827 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
828 evo_data(push, (hue << 20) | (vib << 8));
832 evo_mthd(push, 0x0080, 1);
833 evo_data(push, 0x00000000);
835 evo_kick(push, mast);
842 nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
843 int x, int y, bool update)
845 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
846 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
849 push = evo_wait(mast, 16);
851 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
852 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
853 evo_data(push, nvfb->nvbo->bo.offset >> 8);
854 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
855 evo_data(push, (fb->height << 16) | fb->width);
856 evo_data(push, nvfb->r_pitch);
857 evo_data(push, nvfb->r_format);
858 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
859 evo_data(push, (y << 16) | x);
860 if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
861 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
862 evo_data(push, nvfb->r_handle);
865 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
866 evo_data(push, nvfb->nvbo->bo.offset >> 8);
867 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
868 evo_data(push, (fb->height << 16) | fb->width);
869 evo_data(push, nvfb->r_pitch);
870 evo_data(push, nvfb->r_format);
871 evo_data(push, nvfb->r_handle);
872 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
873 evo_data(push, (y << 16) | x);
877 evo_mthd(push, 0x0080, 1);
878 evo_data(push, 0x00000000);
880 evo_kick(push, mast);
883 nv_crtc->fb.handle = nvfb->r_handle;
888 nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
890 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
891 u32 *push = evo_wait(mast, 16);
893 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
894 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
895 evo_data(push, 0x85000000);
896 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
898 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
899 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
900 evo_data(push, 0x85000000);
901 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
902 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
903 evo_data(push, mast->base.vram.handle);
905 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
906 evo_data(push, 0x85000000);
907 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
908 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
909 evo_data(push, mast->base.vram.handle);
911 evo_kick(push, mast);
916 nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
918 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
919 u32 *push = evo_wait(mast, 16);
921 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
922 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
923 evo_data(push, 0x05000000);
925 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
926 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
927 evo_data(push, 0x05000000);
928 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
929 evo_data(push, 0x00000000);
931 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
932 evo_data(push, 0x05000000);
933 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
934 evo_data(push, 0x00000000);
936 evo_kick(push, mast);
941 nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
943 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
946 nv50_crtc_cursor_show(nv_crtc);
948 nv50_crtc_cursor_hide(nv_crtc);
951 u32 *push = evo_wait(mast, 2);
953 evo_mthd(push, 0x0080, 1);
954 evo_data(push, 0x00000000);
955 evo_kick(push, mast);
961 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
966 nv50_crtc_prepare(struct drm_crtc *crtc)
968 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
969 struct nv50_mast *mast = nv50_mast(crtc->dev);
972 nv50_display_flip_stop(crtc);
974 push = evo_wait(mast, 6);
976 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
977 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
978 evo_data(push, 0x00000000);
979 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
980 evo_data(push, 0x40000000);
982 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
983 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
984 evo_data(push, 0x00000000);
985 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
986 evo_data(push, 0x40000000);
987 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
988 evo_data(push, 0x00000000);
990 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
991 evo_data(push, 0x00000000);
992 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
993 evo_data(push, 0x03000000);
994 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
995 evo_data(push, 0x00000000);
998 evo_kick(push, mast);
1001 nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1005 nv50_crtc_commit(struct drm_crtc *crtc)
1007 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1008 struct nv50_mast *mast = nv50_mast(crtc->dev);
1011 push = evo_wait(mast, 32);
1013 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
1014 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1015 evo_data(push, nv_crtc->fb.handle);
1016 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1017 evo_data(push, 0xc0000000);
1018 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1020 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1021 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1022 evo_data(push, nv_crtc->fb.handle);
1023 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1024 evo_data(push, 0xc0000000);
1025 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1026 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1027 evo_data(push, mast->base.vram.handle);
1029 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1030 evo_data(push, nv_crtc->fb.handle);
1031 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1032 evo_data(push, 0x83000000);
1033 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1034 evo_data(push, 0x00000000);
1035 evo_data(push, 0x00000000);
1036 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1037 evo_data(push, mast->base.vram.handle);
1038 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1039 evo_data(push, 0xffffff00);
1042 evo_kick(push, mast);
1045 nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
1046 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1050 nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1051 struct drm_display_mode *adjusted_mode)
1053 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1058 nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1060 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1061 struct nv50_head *head = nv50_head(crtc);
1064 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
1067 nouveau_bo_unpin(head->image);
1068 nouveau_bo_ref(nvfb->nvbo, &head->image);
1075 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1076 struct drm_display_mode *mode, int x, int y,
1077 struct drm_framebuffer *old_fb)
1079 struct nv50_mast *mast = nv50_mast(crtc->dev);
1080 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1081 struct nouveau_connector *nv_connector;
1082 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1083 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1084 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1085 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1086 u32 vblan2e = 0, vblan2s = 1;
1090 hactive = mode->htotal;
1091 hsynce = mode->hsync_end - mode->hsync_start - 1;
1092 hbackp = mode->htotal - mode->hsync_end;
1093 hblanke = hsynce + hbackp;
1094 hfrontp = mode->hsync_start - mode->hdisplay;
1095 hblanks = mode->htotal - hfrontp - 1;
1097 vactive = mode->vtotal * vscan / ilace;
1098 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1099 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1100 vblanke = vsynce + vbackp;
1101 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1102 vblanks = vactive - vfrontp - 1;
1103 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1104 vblan2e = vactive + vsynce + vbackp;
1105 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1106 vactive = (vactive * 2) + 1;
1109 ret = nv50_crtc_swap_fbs(crtc, old_fb);
1113 push = evo_wait(mast, 64);
1115 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1116 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1117 evo_data(push, 0x00800000 | mode->clock);
1118 evo_data(push, (ilace == 2) ? 2 : 0);
1119 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1120 evo_data(push, 0x00000000);
1121 evo_data(push, (vactive << 16) | hactive);
1122 evo_data(push, ( vsynce << 16) | hsynce);
1123 evo_data(push, (vblanke << 16) | hblanke);
1124 evo_data(push, (vblanks << 16) | hblanks);
1125 evo_data(push, (vblan2e << 16) | vblan2s);
1126 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1127 evo_data(push, 0x00000000);
1128 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1129 evo_data(push, 0x00000311);
1130 evo_data(push, 0x00000100);
1132 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1133 evo_data(push, 0x00000000);
1134 evo_data(push, (vactive << 16) | hactive);
1135 evo_data(push, ( vsynce << 16) | hsynce);
1136 evo_data(push, (vblanke << 16) | hblanke);
1137 evo_data(push, (vblanks << 16) | hblanks);
1138 evo_data(push, (vblan2e << 16) | vblan2s);
1139 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1140 evo_data(push, 0x00000000); /* ??? */
1141 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1142 evo_data(push, mode->clock * 1000);
1143 evo_data(push, 0x00200000); /* ??? */
1144 evo_data(push, mode->clock * 1000);
1145 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1146 evo_data(push, 0x00000311);
1147 evo_data(push, 0x00000100);
1150 evo_kick(push, mast);
1153 nv_connector = nouveau_crtc_connector_get(nv_crtc);
1154 nv50_crtc_set_dither(nv_crtc, false);
1155 nv50_crtc_set_scale(nv_crtc, false);
1156 nv50_crtc_set_color_vibrance(nv_crtc, false);
1157 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1162 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1163 struct drm_framebuffer *old_fb)
1165 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1166 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1169 if (!crtc->primary->fb) {
1170 NV_DEBUG(drm, "No FB bound\n");
1174 ret = nv50_crtc_swap_fbs(crtc, old_fb);
1178 nv50_display_flip_stop(crtc);
1179 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1180 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1185 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1186 struct drm_framebuffer *fb, int x, int y,
1187 enum mode_set_atomic state)
1189 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1190 nv50_display_flip_stop(crtc);
1191 nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1196 nv50_crtc_lut_load(struct drm_crtc *crtc)
1198 struct nv50_disp *disp = nv50_disp(crtc->dev);
1199 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1200 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1203 for (i = 0; i < 256; i++) {
1204 u16 r = nv_crtc->lut.r[i] >> 2;
1205 u16 g = nv_crtc->lut.g[i] >> 2;
1206 u16 b = nv_crtc->lut.b[i] >> 2;
1208 if (disp->disp->oclass < NVD0_DISP_CLASS) {
1209 writew(r + 0x0000, lut + (i * 0x08) + 0);
1210 writew(g + 0x0000, lut + (i * 0x08) + 2);
1211 writew(b + 0x0000, lut + (i * 0x08) + 4);
1213 writew(r + 0x6000, lut + (i * 0x20) + 0);
1214 writew(g + 0x6000, lut + (i * 0x20) + 2);
1215 writew(b + 0x6000, lut + (i * 0x20) + 4);
1221 nv50_crtc_disable(struct drm_crtc *crtc)
1223 struct nv50_head *head = nv50_head(crtc);
1224 evo_sync(crtc->dev);
1226 nouveau_bo_unpin(head->image);
1227 nouveau_bo_ref(NULL, &head->image);
1231 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1232 uint32_t handle, uint32_t width, uint32_t height)
1234 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1235 struct drm_device *dev = crtc->dev;
1236 struct drm_gem_object *gem;
1237 struct nouveau_bo *nvbo;
1238 bool visible = (handle != 0);
1242 if (width != 64 || height != 64)
1245 gem = drm_gem_object_lookup(dev, file_priv, handle);
1248 nvbo = nouveau_gem_object(gem);
1250 ret = nouveau_bo_map(nvbo);
1252 for (i = 0; i < 64 * 64; i++) {
1253 u32 v = nouveau_bo_rd32(nvbo, i);
1254 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1256 nouveau_bo_unmap(nvbo);
1259 drm_gem_object_unreference_unlocked(gem);
1262 if (visible != nv_crtc->cursor.visible) {
1263 nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
1264 nv_crtc->cursor.visible = visible;
1271 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1273 struct nv50_curs *curs = nv50_curs(crtc);
1274 struct nv50_chan *chan = nv50_chan(curs);
1275 nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1276 nvif_wr32(&chan->user, 0x0080, 0x00000000);
1281 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1282 uint32_t start, uint32_t size)
1284 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1285 u32 end = min_t(u32, start + size, 256);
1288 for (i = start; i < end; i++) {
1289 nv_crtc->lut.r[i] = r[i];
1290 nv_crtc->lut.g[i] = g[i];
1291 nv_crtc->lut.b[i] = b[i];
1294 nv50_crtc_lut_load(crtc);
1298 nv50_crtc_destroy(struct drm_crtc *crtc)
1300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1301 struct nv50_disp *disp = nv50_disp(crtc->dev);
1302 struct nv50_head *head = nv50_head(crtc);
1303 struct nv50_fbdma *fbdma;
1305 list_for_each_entry(fbdma, &disp->fbdma, head) {
1306 nvif_object_fini(&fbdma->base[nv_crtc->index]);
1309 nv50_dmac_destroy(&head->ovly.base, disp->disp);
1310 nv50_pioc_destroy(&head->oimm.base);
1311 nv50_dmac_destroy(&head->sync.base, disp->disp);
1312 nv50_pioc_destroy(&head->curs.base);
1314 /*XXX: this shouldn't be necessary, but the core doesn't call
1315 * disconnect() during the cleanup paths
1318 nouveau_bo_unpin(head->image);
1319 nouveau_bo_ref(NULL, &head->image);
1321 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1322 if (nv_crtc->cursor.nvbo)
1323 nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1324 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1326 nouveau_bo_unmap(nv_crtc->lut.nvbo);
1327 if (nv_crtc->lut.nvbo)
1328 nouveau_bo_unpin(nv_crtc->lut.nvbo);
1329 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1331 drm_crtc_cleanup(crtc);
1335 static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1336 .dpms = nv50_crtc_dpms,
1337 .prepare = nv50_crtc_prepare,
1338 .commit = nv50_crtc_commit,
1339 .mode_fixup = nv50_crtc_mode_fixup,
1340 .mode_set = nv50_crtc_mode_set,
1341 .mode_set_base = nv50_crtc_mode_set_base,
1342 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1343 .load_lut = nv50_crtc_lut_load,
1344 .disable = nv50_crtc_disable,
1347 static const struct drm_crtc_funcs nv50_crtc_func = {
1348 .cursor_set = nv50_crtc_cursor_set,
1349 .cursor_move = nv50_crtc_cursor_move,
1350 .gamma_set = nv50_crtc_gamma_set,
1351 .set_config = nouveau_crtc_set_config,
1352 .destroy = nv50_crtc_destroy,
1353 .page_flip = nouveau_crtc_page_flip,
1357 nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1362 nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1367 nv50_crtc_create(struct drm_device *dev, int index)
1369 struct nv50_disp *disp = nv50_disp(dev);
1370 struct nv50_head *head;
1371 struct drm_crtc *crtc;
1374 head = kzalloc(sizeof(*head), GFP_KERNEL);
1378 head->base.index = index;
1379 head->base.set_dither = nv50_crtc_set_dither;
1380 head->base.set_scale = nv50_crtc_set_scale;
1381 head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1382 head->base.color_vibrance = 50;
1383 head->base.vibrant_hue = 0;
1384 head->base.cursor.set_offset = nv50_cursor_set_offset;
1385 head->base.cursor.set_pos = nv50_cursor_set_pos;
1386 for (i = 0; i < 256; i++) {
1387 head->base.lut.r[i] = i << 8;
1388 head->base.lut.g[i] = i << 8;
1389 head->base.lut.b[i] = i << 8;
1392 crtc = &head->base.base;
1393 drm_crtc_init(dev, crtc, &nv50_crtc_func);
1394 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1395 drm_mode_crtc_set_gamma_size(crtc, 256);
1397 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1398 0, 0x0000, NULL, &head->base.lut.nvbo);
1400 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1402 ret = nouveau_bo_map(head->base.lut.nvbo);
1404 nouveau_bo_unpin(head->base.lut.nvbo);
1407 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1413 nv50_crtc_lut_load(crtc);
1415 /* allocate cursor resources */
1416 ret = nv50_curs_create(disp->disp, index, &head->curs);
1420 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1421 0, 0x0000, NULL, &head->base.cursor.nvbo);
1423 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1425 ret = nouveau_bo_map(head->base.cursor.nvbo);
1427 nouveau_bo_unpin(head->base.lut.nvbo);
1430 nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1436 /* allocate page flip / sync resources */
1437 ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset,
1442 head->sync.addr = EVO_FLIP_SEM0(index);
1443 head->sync.data = 0x00000000;
1445 /* allocate overlay resources */
1446 ret = nv50_oimm_create(disp->disp, index, &head->oimm);
1450 ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset,
1457 nv50_crtc_destroy(crtc);
1461 /******************************************************************************
1463 *****************************************************************************/
1465 nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1467 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1468 struct nv50_disp *disp = nv50_disp(encoder->dev);
1470 struct nv50_disp_mthd_v1 base;
1471 struct nv50_disp_dac_pwr_v0 pwr;
1474 .base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1475 .base.hasht = nv_encoder->dcb->hasht,
1476 .base.hashm = nv_encoder->dcb->hashm,
1479 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1480 mode != DRM_MODE_DPMS_OFF),
1481 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1482 mode != DRM_MODE_DPMS_OFF),
1485 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1489 nv50_dac_mode_fixup(struct drm_encoder *encoder,
1490 const struct drm_display_mode *mode,
1491 struct drm_display_mode *adjusted_mode)
1493 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1494 struct nouveau_connector *nv_connector;
1496 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1497 if (nv_connector && nv_connector->native_mode) {
1498 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1499 int id = adjusted_mode->base.id;
1500 *adjusted_mode = *nv_connector->native_mode;
1501 adjusted_mode->base.id = id;
1509 nv50_dac_commit(struct drm_encoder *encoder)
1514 nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1515 struct drm_display_mode *adjusted_mode)
1517 struct nv50_mast *mast = nv50_mast(encoder->dev);
1518 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1519 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1522 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1524 push = evo_wait(mast, 8);
1526 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1527 u32 syncs = 0x00000000;
1529 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1530 syncs |= 0x00000001;
1531 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1532 syncs |= 0x00000002;
1534 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1535 evo_data(push, 1 << nv_crtc->index);
1536 evo_data(push, syncs);
1538 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1539 u32 syncs = 0x00000001;
1541 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1542 syncs |= 0x00000008;
1543 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1544 syncs |= 0x00000010;
1546 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1547 magic |= 0x00000001;
1549 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1550 evo_data(push, syncs);
1551 evo_data(push, magic);
1552 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1553 evo_data(push, 1 << nv_crtc->index);
1556 evo_kick(push, mast);
1559 nv_encoder->crtc = encoder->crtc;
1563 nv50_dac_disconnect(struct drm_encoder *encoder)
1565 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1566 struct nv50_mast *mast = nv50_mast(encoder->dev);
1567 const int or = nv_encoder->or;
1570 if (nv_encoder->crtc) {
1571 nv50_crtc_prepare(nv_encoder->crtc);
1573 push = evo_wait(mast, 4);
1575 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1576 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1577 evo_data(push, 0x00000000);
1579 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1580 evo_data(push, 0x00000000);
1582 evo_kick(push, mast);
1586 nv_encoder->crtc = NULL;
1589 static enum drm_connector_status
1590 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1592 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1593 struct nv50_disp *disp = nv50_disp(encoder->dev);
1595 struct nv50_disp_mthd_v1 base;
1596 struct nv50_disp_dac_load_v0 load;
1599 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1600 .base.hasht = nv_encoder->dcb->hasht,
1601 .base.hashm = nv_encoder->dcb->hashm,
1605 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1606 if (args.load.data == 0)
1607 args.load.data = 340;
1609 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1610 if (ret || !args.load.load)
1611 return connector_status_disconnected;
1613 return connector_status_connected;
1617 nv50_dac_destroy(struct drm_encoder *encoder)
1619 drm_encoder_cleanup(encoder);
1623 static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1624 .dpms = nv50_dac_dpms,
1625 .mode_fixup = nv50_dac_mode_fixup,
1626 .prepare = nv50_dac_disconnect,
1627 .commit = nv50_dac_commit,
1628 .mode_set = nv50_dac_mode_set,
1629 .disable = nv50_dac_disconnect,
1630 .get_crtc = nv50_display_crtc_get,
1631 .detect = nv50_dac_detect
1634 static const struct drm_encoder_funcs nv50_dac_func = {
1635 .destroy = nv50_dac_destroy,
1639 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1641 struct nouveau_drm *drm = nouveau_drm(connector->dev);
1642 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
1643 struct nouveau_encoder *nv_encoder;
1644 struct drm_encoder *encoder;
1645 int type = DRM_MODE_ENCODER_DAC;
1647 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1650 nv_encoder->dcb = dcbe;
1651 nv_encoder->or = ffs(dcbe->or) - 1;
1652 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1654 encoder = to_drm_encoder(nv_encoder);
1655 encoder->possible_crtcs = dcbe->heads;
1656 encoder->possible_clones = 0;
1657 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1658 drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1660 drm_mode_connector_attach_encoder(connector, encoder);
1664 /******************************************************************************
1666 *****************************************************************************/
1668 nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1670 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1671 struct nouveau_connector *nv_connector;
1672 struct nv50_disp *disp = nv50_disp(encoder->dev);
1674 struct nv50_disp_mthd_v1 base;
1675 struct nv50_disp_sor_hda_eld_v0 eld;
1676 u8 data[sizeof(nv_connector->base.eld)];
1679 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1680 .base.hasht = nv_encoder->dcb->hasht,
1681 .base.hashm = nv_encoder->dcb->hashm,
1684 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1685 if (!drm_detect_monitor_audio(nv_connector->edid))
1688 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1689 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
1691 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1695 nv50_audio_disconnect(struct drm_encoder *encoder)
1697 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1698 struct nv50_disp *disp = nv50_disp(encoder->dev);
1700 struct nv50_disp_mthd_v1 base;
1701 struct nv50_disp_sor_hda_eld_v0 eld;
1704 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1705 .base.hasht = nv_encoder->dcb->hasht,
1706 .base.hashm = nv_encoder->dcb->hashm,
1709 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1712 /******************************************************************************
1714 *****************************************************************************/
1716 nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1718 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1719 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1720 struct nv50_disp *disp = nv50_disp(encoder->dev);
1722 struct nv50_disp_mthd_v1 base;
1723 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1726 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1727 .base.hasht = nv_encoder->dcb->hasht,
1728 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
1729 (0x0100 << nv_crtc->index),
1731 .pwr.rekey = 56, /* binary driver, and tegra, constant */
1733 struct nouveau_connector *nv_connector;
1736 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1737 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1740 max_ac_packet = mode->htotal - mode->hdisplay;
1741 max_ac_packet -= args.pwr.rekey;
1742 max_ac_packet -= 18; /* constant from tegra */
1743 args.pwr.max_ac_packet = max_ac_packet / 32;
1745 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1746 nv50_audio_mode_set(encoder, mode);
1750 nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1752 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1753 struct nv50_disp *disp = nv50_disp(encoder->dev);
1755 struct nv50_disp_mthd_v1 base;
1756 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1759 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1760 .base.hasht = nv_encoder->dcb->hasht,
1761 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
1762 (0x0100 << nv_crtc->index),
1765 nv50_audio_disconnect(encoder);
1767 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1770 /******************************************************************************
1772 *****************************************************************************/
1774 nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1776 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1777 struct nv50_disp *disp = nv50_disp(encoder->dev);
1779 struct nv50_disp_mthd_v1 base;
1780 struct nv50_disp_sor_pwr_v0 pwr;
1783 .base.method = NV50_DISP_MTHD_V1_SOR_PWR,
1784 .base.hasht = nv_encoder->dcb->hasht,
1785 .base.hashm = nv_encoder->dcb->hashm,
1786 .pwr.state = mode == DRM_MODE_DPMS_ON,
1789 struct nv50_disp_mthd_v1 base;
1790 struct nv50_disp_sor_dp_pwr_v0 pwr;
1793 .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
1794 .base.hasht = nv_encoder->dcb->hasht,
1795 .base.hashm = nv_encoder->dcb->hashm,
1796 .pwr.state = mode == DRM_MODE_DPMS_ON,
1798 struct drm_device *dev = encoder->dev;
1799 struct drm_encoder *partner;
1801 nv_encoder->last_dpms = mode;
1803 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1804 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1806 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1809 if (nv_partner != nv_encoder &&
1810 nv_partner->dcb->or == nv_encoder->dcb->or) {
1811 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1817 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1819 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1820 nvif_mthd(disp->disp, 0, &link, sizeof(link));
1822 nvif_mthd(disp->disp, 0, &args, sizeof(args));
1827 nv50_sor_mode_fixup(struct drm_encoder *encoder,
1828 const struct drm_display_mode *mode,
1829 struct drm_display_mode *adjusted_mode)
1831 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1832 struct nouveau_connector *nv_connector;
1834 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1835 if (nv_connector && nv_connector->native_mode) {
1836 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1837 int id = adjusted_mode->base.id;
1838 *adjusted_mode = *nv_connector->native_mode;
1839 adjusted_mode->base.id = id;
1847 nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1849 struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1850 u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1851 if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1852 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1853 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1854 evo_data(push, (nv_encoder->ctrl = temp));
1856 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1857 evo_data(push, (nv_encoder->ctrl = temp));
1859 evo_kick(push, mast);
1864 nv50_sor_disconnect(struct drm_encoder *encoder)
1866 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1867 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1869 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1870 nv_encoder->crtc = NULL;
1873 nv50_crtc_prepare(&nv_crtc->base);
1874 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1875 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1880 nv50_sor_commit(struct drm_encoder *encoder)
1885 nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1886 struct drm_display_mode *mode)
1888 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1889 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1891 struct nv50_disp_mthd_v1 base;
1892 struct nv50_disp_sor_lvds_script_v0 lvds;
1895 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1896 .base.hasht = nv_encoder->dcb->hasht,
1897 .base.hashm = nv_encoder->dcb->hashm,
1899 struct nv50_disp *disp = nv50_disp(encoder->dev);
1900 struct nv50_mast *mast = nv50_mast(encoder->dev);
1901 struct drm_device *dev = encoder->dev;
1902 struct nouveau_drm *drm = nouveau_drm(dev);
1903 struct nouveau_connector *nv_connector;
1904 struct nvbios *bios = &drm->vbios;
1906 u8 owner = 1 << nv_crtc->index;
1910 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1911 nv_encoder->crtc = encoder->crtc;
1913 switch (nv_encoder->dcb->type) {
1914 case DCB_OUTPUT_TMDS:
1915 if (nv_encoder->dcb->sorconf.link & 1) {
1916 if (mode->clock < 165000)
1924 nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1926 case DCB_OUTPUT_LVDS:
1929 if (bios->fp_no_ddc) {
1930 if (bios->fp.dual_link)
1931 lvds.lvds.script |= 0x0100;
1932 if (bios->fp.if_is_24bit)
1933 lvds.lvds.script |= 0x0200;
1935 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1936 if (((u8 *)nv_connector->edid)[121] == 2)
1937 lvds.lvds.script |= 0x0100;
1939 if (mode->clock >= bios->fp.duallink_transition_clk) {
1940 lvds.lvds.script |= 0x0100;
1943 if (lvds.lvds.script & 0x0100) {
1944 if (bios->fp.strapless_is_24bit & 2)
1945 lvds.lvds.script |= 0x0200;
1947 if (bios->fp.strapless_is_24bit & 1)
1948 lvds.lvds.script |= 0x0200;
1951 if (nv_connector->base.display_info.bpc == 8)
1952 lvds.lvds.script |= 0x0200;
1955 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
1958 if (nv_connector->base.display_info.bpc == 6) {
1959 nv_encoder->dp.datarate = mode->clock * 18 / 8;
1962 if (nv_connector->base.display_info.bpc == 8) {
1963 nv_encoder->dp.datarate = mode->clock * 24 / 8;
1966 nv_encoder->dp.datarate = mode->clock * 30 / 8;
1970 if (nv_encoder->dcb->sorconf.link & 1)
1980 nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
1982 if (nv50_vers(mast) >= NVD0_DISP_CLASS) {
1983 u32 *push = evo_wait(mast, 3);
1985 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1986 u32 syncs = 0x00000001;
1988 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1989 syncs |= 0x00000008;
1990 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1991 syncs |= 0x00000010;
1993 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1994 magic |= 0x00000001;
1996 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1997 evo_data(push, syncs | (depth << 6));
1998 evo_data(push, magic);
1999 evo_kick(push, mast);
2005 ctrl = (depth << 16) | (proto << 8);
2006 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2008 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2013 nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2017 nv50_sor_destroy(struct drm_encoder *encoder)
2019 drm_encoder_cleanup(encoder);
2023 static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2024 .dpms = nv50_sor_dpms,
2025 .mode_fixup = nv50_sor_mode_fixup,
2026 .prepare = nv50_sor_disconnect,
2027 .commit = nv50_sor_commit,
2028 .mode_set = nv50_sor_mode_set,
2029 .disable = nv50_sor_disconnect,
2030 .get_crtc = nv50_display_crtc_get,
2033 static const struct drm_encoder_funcs nv50_sor_func = {
2034 .destroy = nv50_sor_destroy,
2038 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2040 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2041 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
2042 struct nouveau_encoder *nv_encoder;
2043 struct drm_encoder *encoder;
2046 switch (dcbe->type) {
2047 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2048 case DCB_OUTPUT_TMDS:
2051 type = DRM_MODE_ENCODER_TMDS;
2055 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2058 nv_encoder->dcb = dcbe;
2059 nv_encoder->or = ffs(dcbe->or) - 1;
2060 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
2061 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2063 encoder = to_drm_encoder(nv_encoder);
2064 encoder->possible_crtcs = dcbe->heads;
2065 encoder->possible_clones = 0;
2066 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
2067 drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2069 drm_mode_connector_attach_encoder(connector, encoder);
2073 /******************************************************************************
2075 *****************************************************************************/
2078 nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2080 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2081 struct nv50_disp *disp = nv50_disp(encoder->dev);
2083 struct nv50_disp_mthd_v1 base;
2084 struct nv50_disp_pior_pwr_v0 pwr;
2087 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2088 .base.hasht = nv_encoder->dcb->hasht,
2089 .base.hashm = nv_encoder->dcb->hashm,
2090 .pwr.state = mode == DRM_MODE_DPMS_ON,
2091 .pwr.type = nv_encoder->dcb->type,
2094 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2098 nv50_pior_mode_fixup(struct drm_encoder *encoder,
2099 const struct drm_display_mode *mode,
2100 struct drm_display_mode *adjusted_mode)
2102 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2103 struct nouveau_connector *nv_connector;
2105 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2106 if (nv_connector && nv_connector->native_mode) {
2107 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
2108 int id = adjusted_mode->base.id;
2109 *adjusted_mode = *nv_connector->native_mode;
2110 adjusted_mode->base.id = id;
2114 adjusted_mode->clock *= 2;
2119 nv50_pior_commit(struct drm_encoder *encoder)
2124 nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2125 struct drm_display_mode *adjusted_mode)
2127 struct nv50_mast *mast = nv50_mast(encoder->dev);
2128 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2129 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2130 struct nouveau_connector *nv_connector;
2131 u8 owner = 1 << nv_crtc->index;
2135 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2136 switch (nv_connector->base.display_info.bpc) {
2137 case 10: depth = 0x6; break;
2138 case 8: depth = 0x5; break;
2139 case 6: depth = 0x2; break;
2140 default: depth = 0x0; break;
2143 switch (nv_encoder->dcb->type) {
2144 case DCB_OUTPUT_TMDS:
2153 nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2155 push = evo_wait(mast, 8);
2157 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2158 u32 ctrl = (depth << 16) | (proto << 8) | owner;
2159 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2161 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2163 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2164 evo_data(push, ctrl);
2167 evo_kick(push, mast);
2170 nv_encoder->crtc = encoder->crtc;
2174 nv50_pior_disconnect(struct drm_encoder *encoder)
2176 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2177 struct nv50_mast *mast = nv50_mast(encoder->dev);
2178 const int or = nv_encoder->or;
2181 if (nv_encoder->crtc) {
2182 nv50_crtc_prepare(nv_encoder->crtc);
2184 push = evo_wait(mast, 4);
2186 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2187 evo_mthd(push, 0x0700 + (or * 0x040), 1);
2188 evo_data(push, 0x00000000);
2190 evo_kick(push, mast);
2194 nv_encoder->crtc = NULL;
2198 nv50_pior_destroy(struct drm_encoder *encoder)
2200 drm_encoder_cleanup(encoder);
2204 static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2205 .dpms = nv50_pior_dpms,
2206 .mode_fixup = nv50_pior_mode_fixup,
2207 .prepare = nv50_pior_disconnect,
2208 .commit = nv50_pior_commit,
2209 .mode_set = nv50_pior_mode_set,
2210 .disable = nv50_pior_disconnect,
2211 .get_crtc = nv50_display_crtc_get,
2214 static const struct drm_encoder_funcs nv50_pior_func = {
2215 .destroy = nv50_pior_destroy,
2219 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2221 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2222 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
2223 struct nouveau_i2c_port *ddc = NULL;
2224 struct nouveau_encoder *nv_encoder;
2225 struct drm_encoder *encoder;
2228 switch (dcbe->type) {
2229 case DCB_OUTPUT_TMDS:
2230 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
2231 type = DRM_MODE_ENCODER_TMDS;
2234 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
2235 type = DRM_MODE_ENCODER_TMDS;
2241 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2244 nv_encoder->dcb = dcbe;
2245 nv_encoder->or = ffs(dcbe->or) - 1;
2246 nv_encoder->i2c = ddc;
2248 encoder = to_drm_encoder(nv_encoder);
2249 encoder->possible_crtcs = dcbe->heads;
2250 encoder->possible_clones = 0;
2251 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
2252 drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2254 drm_mode_connector_attach_encoder(connector, encoder);
2258 /******************************************************************************
2260 *****************************************************************************/
2263 nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2266 for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2267 nvif_object_fini(&fbdma->base[i]);
2268 nvif_object_fini(&fbdma->core);
2269 list_del(&fbdma->head);
2274 nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2276 struct nouveau_drm *drm = nouveau_drm(dev);
2277 struct nv50_disp *disp = nv50_disp(dev);
2278 struct nv50_mast *mast = nv50_mast(dev);
2279 struct __attribute__ ((packed)) {
2280 struct nv_dma_v0 base;
2282 struct nv50_dma_v0 nv50;
2283 struct gf100_dma_v0 gf100;
2284 struct gf110_dma_v0 gf110;
2287 struct nv50_fbdma *fbdma;
2288 struct drm_crtc *crtc;
2289 u32 size = sizeof(args.base);
2292 list_for_each_entry(fbdma, &disp->fbdma, head) {
2293 if (fbdma->core.handle == name)
2297 fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2300 list_add(&fbdma->head, &disp->fbdma);
2302 args.base.target = NV_DMA_V0_TARGET_VRAM;
2303 args.base.access = NV_DMA_V0_ACCESS_RDWR;
2304 args.base.start = offset;
2305 args.base.limit = offset + length - 1;
2307 if (drm->device.info.chipset < 0x80) {
2308 args.nv50.part = NV50_DMA_V0_PART_256;
2309 size += sizeof(args.nv50);
2311 if (drm->device.info.chipset < 0xc0) {
2312 args.nv50.part = NV50_DMA_V0_PART_256;
2313 args.nv50.kind = kind;
2314 size += sizeof(args.nv50);
2316 if (drm->device.info.chipset < 0xd0) {
2317 args.gf100.kind = kind;
2318 size += sizeof(args.gf100);
2320 args.gf110.page = GF110_DMA_V0_PAGE_LP;
2321 args.gf110.kind = kind;
2322 size += sizeof(args.gf110);
2325 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2326 struct nv50_head *head = nv50_head(crtc);
2327 int ret = nvif_object_init(&head->sync.base.base.user, NULL,
2328 name, NV_DMA_IN_MEMORY, &args, size,
2329 &fbdma->base[head->base.index]);
2331 nv50_fbdma_fini(fbdma);
2336 ret = nvif_object_init(&mast->base.base.user, NULL, name,
2337 NV_DMA_IN_MEMORY, &args, size,
2340 nv50_fbdma_fini(fbdma);
2348 nv50_fb_dtor(struct drm_framebuffer *fb)
2353 nv50_fb_ctor(struct drm_framebuffer *fb)
2355 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2356 struct nouveau_drm *drm = nouveau_drm(fb->dev);
2357 struct nouveau_bo *nvbo = nv_fb->nvbo;
2358 struct nv50_disp *disp = nv50_disp(fb->dev);
2359 struct nouveau_fb *pfb = nvkm_fb(&drm->device);
2360 u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2361 u8 tile = nvbo->tile_mode;
2363 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) {
2364 NV_ERROR(drm, "framebuffer requires contiguous bo\n");
2368 if (drm->device.info.chipset >= 0xc0)
2369 tile >>= 4; /* yep.. */
2371 switch (fb->depth) {
2372 case 8: nv_fb->r_format = 0x1e00; break;
2373 case 15: nv_fb->r_format = 0xe900; break;
2374 case 16: nv_fb->r_format = 0xe800; break;
2376 case 32: nv_fb->r_format = 0xcf00; break;
2377 case 30: nv_fb->r_format = 0xd100; break;
2379 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2383 if (disp->disp->oclass < NV84_DISP_CLASS) {
2384 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2385 (fb->pitches[0] | 0x00100000);
2386 nv_fb->r_format |= kind << 16;
2388 if (disp->disp->oclass < NVD0_DISP_CLASS) {
2389 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2390 (fb->pitches[0] | 0x00100000);
2392 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2393 (fb->pitches[0] | 0x01000000);
2395 nv_fb->r_handle = 0xffff0000 | kind;
2397 return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, pfb->ram->size, kind);
2400 /******************************************************************************
2402 *****************************************************************************/
2405 nv50_display_fini(struct drm_device *dev)
2410 nv50_display_init(struct drm_device *dev)
2412 struct nv50_disp *disp = nv50_disp(dev);
2413 struct drm_crtc *crtc;
2416 push = evo_wait(nv50_mast(dev), 32);
2420 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2421 struct nv50_sync *sync = nv50_sync(crtc);
2422 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2425 evo_mthd(push, 0x0088, 1);
2426 evo_data(push, nv50_mast(dev)->base.sync.handle);
2427 evo_kick(push, nv50_mast(dev));
2432 nv50_display_destroy(struct drm_device *dev)
2434 struct nv50_disp *disp = nv50_disp(dev);
2435 struct nv50_fbdma *fbdma, *fbtmp;
2437 list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2438 nv50_fbdma_fini(fbdma);
2441 nv50_dmac_destroy(&disp->mast.base, disp->disp);
2443 nouveau_bo_unmap(disp->sync);
2445 nouveau_bo_unpin(disp->sync);
2446 nouveau_bo_ref(NULL, &disp->sync);
2448 nouveau_display(dev)->priv = NULL;
2453 nv50_display_create(struct drm_device *dev)
2455 struct nvif_device *device = &nouveau_drm(dev)->device;
2456 struct nouveau_drm *drm = nouveau_drm(dev);
2457 struct dcb_table *dcb = &drm->vbios.dcb;
2458 struct drm_connector *connector, *tmp;
2459 struct nv50_disp *disp;
2460 struct dcb_output *dcbe;
2463 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2466 INIT_LIST_HEAD(&disp->fbdma);
2468 nouveau_display(dev)->priv = disp;
2469 nouveau_display(dev)->dtor = nv50_display_destroy;
2470 nouveau_display(dev)->init = nv50_display_init;
2471 nouveau_display(dev)->fini = nv50_display_fini;
2472 nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2473 nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2474 disp->disp = &nouveau_display(dev)->disp;
2476 /* small shared memory area we use for notifiers and semaphores */
2477 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2478 0, 0x0000, NULL, &disp->sync);
2480 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2482 ret = nouveau_bo_map(disp->sync);
2484 nouveau_bo_unpin(disp->sync);
2487 nouveau_bo_ref(NULL, &disp->sync);
2493 /* allocate master evo channel */
2494 ret = nv50_core_create(disp->disp, disp->sync->bo.offset,
2499 /* create crtc objects to represent the hw heads */
2500 if (disp->disp->oclass >= NVD0_DISP_CLASS)
2501 crtcs = nvif_rd32(device, 0x022448);
2505 for (i = 0; i < crtcs; i++) {
2506 ret = nv50_crtc_create(dev, i);
2511 /* create encoder/connector objects based on VBIOS DCB table */
2512 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2513 connector = nouveau_connector_create(dev, dcbe->connector);
2514 if (IS_ERR(connector))
2517 if (dcbe->location == DCB_LOC_ON_CHIP) {
2518 switch (dcbe->type) {
2519 case DCB_OUTPUT_TMDS:
2520 case DCB_OUTPUT_LVDS:
2522 ret = nv50_sor_create(connector, dcbe);
2524 case DCB_OUTPUT_ANALOG:
2525 ret = nv50_dac_create(connector, dcbe);
2532 ret = nv50_pior_create(connector, dcbe);
2536 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2537 dcbe->location, dcbe->type,
2538 ffs(dcbe->or) - 1, ret);
2543 /* cull any connectors we created that don't have an encoder */
2544 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2545 if (connector->encoder_ids[0])
2548 NV_WARN(drm, "%s has no encoders, removing\n",
2550 connector->funcs->destroy(connector);
2555 nv50_display_destroy(dev);