Merge drm/drm-next into drm-misc-next
[sfrench/cifs-2.6.git] / drivers / gpu / drm / nouveau / dispnv50 / disp.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 #include "disp.h"
25 #include "atom.h"
26 #include "core.h"
27 #include "head.h"
28 #include "wndw.h"
29 #include "handles.h"
30
31 #include <linux/dma-mapping.h>
32 #include <linux/hdmi.h>
33 #include <linux/component.h>
34 #include <linux/iopoll.h>
35
36 #include <drm/display/drm_dp_helper.h>
37 #include <drm/display/drm_scdc_helper.h>
38 #include <drm/drm_atomic.h>
39 #include <drm/drm_atomic_helper.h>
40 #include <drm/drm_edid.h>
41 #include <drm/drm_eld.h>
42 #include <drm/drm_fb_helper.h>
43 #include <drm/drm_probe_helper.h>
44 #include <drm/drm_vblank.h>
45
46 #include <nvif/push507c.h>
47
48 #include <nvif/class.h>
49 #include <nvif/cl0002.h>
50 #include <nvif/event.h>
51 #include <nvif/if0012.h>
52 #include <nvif/if0014.h>
53 #include <nvif/timer.h>
54
55 #include <nvhw/class/cl507c.h>
56 #include <nvhw/class/cl507d.h>
57 #include <nvhw/class/cl837d.h>
58 #include <nvhw/class/cl887d.h>
59 #include <nvhw/class/cl907d.h>
60 #include <nvhw/class/cl917d.h>
61
62 #include "nouveau_drv.h"
63 #include "nouveau_dma.h"
64 #include "nouveau_gem.h"
65 #include "nouveau_connector.h"
66 #include "nouveau_encoder.h"
67 #include "nouveau_fence.h"
68 #include "nv50_display.h"
69
70 /******************************************************************************
71  * EVO channel
72  *****************************************************************************/
73
74 static int
75 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
76                  const s32 *oclass, u8 head, void *data, u32 size,
77                  struct nv50_chan *chan)
78 {
79         struct nvif_sclass *sclass;
80         int ret, i, n;
81
82         chan->device = device;
83
84         ret = n = nvif_object_sclass_get(disp, &sclass);
85         if (ret < 0)
86                 return ret;
87
88         while (oclass[0]) {
89                 for (i = 0; i < n; i++) {
90                         if (sclass[i].oclass == oclass[0]) {
91                                 ret = nvif_object_ctor(disp, "kmsChan", 0,
92                                                        oclass[0], data, size,
93                                                        &chan->user);
94                                 if (ret == 0)
95                                         nvif_object_map(&chan->user, NULL, 0);
96                                 nvif_object_sclass_put(&sclass);
97                                 return ret;
98                         }
99                 }
100                 oclass++;
101         }
102
103         nvif_object_sclass_put(&sclass);
104         return -ENOSYS;
105 }
106
107 static void
108 nv50_chan_destroy(struct nv50_chan *chan)
109 {
110         nvif_object_dtor(&chan->user);
111 }
112
113 /******************************************************************************
114  * DMA EVO channel
115  *****************************************************************************/
116
117 void
118 nv50_dmac_destroy(struct nv50_dmac *dmac)
119 {
120         nvif_object_dtor(&dmac->vram);
121         nvif_object_dtor(&dmac->sync);
122
123         nv50_chan_destroy(&dmac->base);
124
125         nvif_mem_dtor(&dmac->_push.mem);
126 }
127
128 static void
129 nv50_dmac_kick(struct nvif_push *push)
130 {
131         struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
132
133         dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr;
134         if (dmac->put != dmac->cur) {
135                 /* Push buffer fetches are not coherent with BAR1, we need to ensure
136                  * writes have been flushed right through to VRAM before writing PUT.
137                  */
138                 if (dmac->push->mem.type & NVIF_MEM_VRAM) {
139                         struct nvif_device *device = dmac->base.device;
140                         nvif_wr32(&device->object, 0x070000, 0x00000001);
141                         nvif_msec(device, 2000,
142                                 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002))
143                                         break;
144                         );
145                 }
146
147                 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur);
148                 dmac->put = dmac->cur;
149         }
150
151         push->bgn = push->cur;
152 }
153
154 static int
155 nv50_dmac_free(struct nv50_dmac *dmac)
156 {
157         u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
158         if (get > dmac->cur) /* NVIDIA stay 5 away from GET, do the same. */
159                 return get - dmac->cur - 5;
160         return dmac->max - dmac->cur;
161 }
162
163 static int
164 nv50_dmac_wind(struct nv50_dmac *dmac)
165 {
166         /* Wait for GET to depart from the beginning of the push buffer to
167          * prevent writing PUT == GET, which would be ignored by HW.
168          */
169         u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
170         if (get == 0) {
171                 /* Corner-case, HW idle, but non-committed work pending. */
172                 if (dmac->put == 0)
173                         nv50_dmac_kick(dmac->push);
174
175                 if (nvif_msec(dmac->base.device, 2000,
176                         if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0))
177                                 break;
178                 ) < 0)
179                         return -ETIMEDOUT;
180         }
181
182         PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0));
183         dmac->cur = 0;
184         return 0;
185 }
186
187 static int
188 nv50_dmac_wait(struct nvif_push *push, u32 size)
189 {
190         struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
191         int free;
192
193         if (WARN_ON(size > dmac->max))
194                 return -EINVAL;
195
196         dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr;
197         if (dmac->cur + size >= dmac->max) {
198                 int ret = nv50_dmac_wind(dmac);
199                 if (ret)
200                         return ret;
201
202                 push->cur = dmac->_push.mem.object.map.ptr;
203                 push->cur = push->cur + dmac->cur;
204                 nv50_dmac_kick(push);
205         }
206
207         if (nvif_msec(dmac->base.device, 2000,
208                 if ((free = nv50_dmac_free(dmac)) >= size)
209                         break;
210         ) < 0) {
211                 WARN_ON(1);
212                 return -ETIMEDOUT;
213         }
214
215         push->bgn = dmac->_push.mem.object.map.ptr;
216         push->bgn = push->bgn + dmac->cur;
217         push->cur = push->bgn;
218         push->end = push->cur + free;
219         return 0;
220 }
221
222 MODULE_PARM_DESC(kms_vram_pushbuf, "Place EVO/NVD push buffers in VRAM (default: auto)");
223 static int nv50_dmac_vram_pushbuf = -1;
224 module_param_named(kms_vram_pushbuf, nv50_dmac_vram_pushbuf, int, 0400);
225
226 int
227 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
228                  const s32 *oclass, u8 head, void *data, u32 size, s64 syncbuf,
229                  struct nv50_dmac *dmac)
230 {
231         struct nouveau_cli *cli = (void *)device->object.client;
232         struct nvif_disp_chan_v0 *args = data;
233         u8 type = NVIF_MEM_COHERENT;
234         int ret;
235
236         mutex_init(&dmac->lock);
237
238         /* Pascal added support for 47-bit physical addresses, but some
239          * parts of EVO still only accept 40-bit PAs.
240          *
241          * To avoid issues on systems with large amounts of RAM, and on
242          * systems where an IOMMU maps pages at a high address, we need
243          * to allocate push buffers in VRAM instead.
244          *
245          * This appears to match NVIDIA's behaviour on Pascal.
246          */
247         if ((nv50_dmac_vram_pushbuf > 0) ||
248             (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL))
249                 type |= NVIF_MEM_VRAM;
250
251         ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000,
252                                 &dmac->_push.mem);
253         if (ret)
254                 return ret;
255
256         dmac->ptr = dmac->_push.mem.object.map.ptr;
257         dmac->_push.wait = nv50_dmac_wait;
258         dmac->_push.kick = nv50_dmac_kick;
259         dmac->push = &dmac->_push;
260         dmac->push->bgn = dmac->_push.mem.object.map.ptr;
261         dmac->push->cur = dmac->push->bgn;
262         dmac->push->end = dmac->push->bgn;
263         dmac->max = 0x1000/4 - 1;
264
265         /* EVO channels are affected by a HW bug where the last 12 DWORDs
266          * of the push buffer aren't able to be used safely.
267          */
268         if (disp->oclass < GV100_DISP)
269                 dmac->max -= 12;
270
271         args->pushbuf = nvif_handle(&dmac->_push.mem.object);
272
273         ret = nv50_chan_create(device, disp, oclass, head, data, size,
274                                &dmac->base);
275         if (ret)
276                 return ret;
277
278         if (syncbuf < 0)
279                 return 0;
280
281         ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF,
282                                NV_DMA_IN_MEMORY,
283                                &(struct nv_dma_v0) {
284                                         .target = NV_DMA_V0_TARGET_VRAM,
285                                         .access = NV_DMA_V0_ACCESS_RDWR,
286                                         .start = syncbuf + 0x0000,
287                                         .limit = syncbuf + 0x0fff,
288                                }, sizeof(struct nv_dma_v0),
289                                &dmac->sync);
290         if (ret)
291                 return ret;
292
293         ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM,
294                                NV_DMA_IN_MEMORY,
295                                &(struct nv_dma_v0) {
296                                         .target = NV_DMA_V0_TARGET_VRAM,
297                                         .access = NV_DMA_V0_ACCESS_RDWR,
298                                         .start = 0,
299                                         .limit = device->info.ram_user - 1,
300                                }, sizeof(struct nv_dma_v0),
301                                &dmac->vram);
302         if (ret)
303                 return ret;
304
305         return ret;
306 }
307
308 /******************************************************************************
309  * Output path helpers
310  *****************************************************************************/
311 static void
312 nv50_outp_dump_caps(struct nouveau_drm *drm,
313                     struct nouveau_encoder *outp)
314 {
315         NV_DEBUG(drm, "%s caps: dp_interlace=%d\n",
316                  outp->base.base.name, outp->caps.dp_interlace);
317 }
318
319 static int
320 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
321                             struct drm_crtc_state *crtc_state,
322                             struct drm_connector_state *conn_state,
323                             struct drm_display_mode *native_mode)
324 {
325         struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
326         struct drm_display_mode *mode = &crtc_state->mode;
327         struct drm_connector *connector = conn_state->connector;
328         struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
329         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
330
331         NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
332         asyc->scaler.full = false;
333         if (!native_mode)
334                 return 0;
335
336         if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
337                 switch (connector->connector_type) {
338                 case DRM_MODE_CONNECTOR_LVDS:
339                 case DRM_MODE_CONNECTOR_eDP:
340                         /* Don't force scaler for EDID modes with
341                          * same size as the native one (e.g. different
342                          * refresh rate)
343                          */
344                         if (mode->hdisplay == native_mode->hdisplay &&
345                             mode->vdisplay == native_mode->vdisplay &&
346                             mode->type & DRM_MODE_TYPE_DRIVER)
347                                 break;
348                         mode = native_mode;
349                         asyc->scaler.full = true;
350                         break;
351                 default:
352                         break;
353                 }
354         } else {
355                 mode = native_mode;
356         }
357
358         if (!drm_mode_equal(adjusted_mode, mode)) {
359                 drm_mode_copy(adjusted_mode, mode);
360                 crtc_state->mode_changed = true;
361         }
362
363         return 0;
364 }
365
366 static void
367 nv50_outp_atomic_fix_depth(struct drm_encoder *encoder, struct drm_crtc_state *crtc_state)
368 {
369         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
370         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
371         struct drm_display_mode *mode = &asyh->state.adjusted_mode;
372         unsigned int max_rate, mode_rate;
373
374         switch (nv_encoder->dcb->type) {
375         case DCB_OUTPUT_DP:
376                 max_rate = nv_encoder->dp.link_nr * nv_encoder->dp.link_bw;
377
378                 /* we don't support more than 10 anyway */
379                 asyh->or.bpc = min_t(u8, asyh->or.bpc, 10);
380
381                 /* reduce the bpc until it works out */
382                 while (asyh->or.bpc > 6) {
383                         mode_rate = DIV_ROUND_UP(mode->clock * asyh->or.bpc * 3, 8);
384                         if (mode_rate <= max_rate)
385                                 break;
386
387                         asyh->or.bpc -= 2;
388                 }
389                 break;
390         default:
391                 break;
392         }
393 }
394
395 static int
396 nv50_outp_atomic_check(struct drm_encoder *encoder,
397                        struct drm_crtc_state *crtc_state,
398                        struct drm_connector_state *conn_state)
399 {
400         struct drm_connector *connector = conn_state->connector;
401         struct nouveau_connector *nv_connector = nouveau_connector(connector);
402         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
403         int ret;
404
405         ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
406                                           nv_connector->native_mode);
407         if (ret)
408                 return ret;
409
410         if (crtc_state->mode_changed || crtc_state->connectors_changed)
411                 asyh->or.bpc = connector->display_info.bpc;
412
413         /* We might have to reduce the bpc */
414         nv50_outp_atomic_fix_depth(encoder, crtc_state);
415
416         return 0;
417 }
418
419 struct nouveau_connector *
420 nv50_outp_get_new_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
421 {
422         struct drm_connector *connector;
423         struct drm_connector_state *connector_state;
424         struct drm_encoder *encoder = to_drm_encoder(outp);
425         int i;
426
427         for_each_new_connector_in_state(state, connector, connector_state, i) {
428                 if (connector_state->best_encoder == encoder)
429                         return nouveau_connector(connector);
430         }
431
432         return NULL;
433 }
434
435 struct nouveau_connector *
436 nv50_outp_get_old_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
437 {
438         struct drm_connector *connector;
439         struct drm_connector_state *connector_state;
440         struct drm_encoder *encoder = to_drm_encoder(outp);
441         int i;
442
443         for_each_old_connector_in_state(state, connector, connector_state, i) {
444                 if (connector_state->best_encoder == encoder)
445                         return nouveau_connector(connector);
446         }
447
448         return NULL;
449 }
450
451 static struct nouveau_crtc *
452 nv50_outp_get_new_crtc(const struct drm_atomic_state *state, const struct nouveau_encoder *outp)
453 {
454         struct drm_crtc *crtc;
455         struct drm_crtc_state *crtc_state;
456         const u32 mask = drm_encoder_mask(&outp->base.base);
457         int i;
458
459         for_each_new_crtc_in_state(state, crtc, crtc_state, i) {
460                 if (crtc_state->encoder_mask & mask)
461                         return nouveau_crtc(crtc);
462         }
463
464         return NULL;
465 }
466
467 /******************************************************************************
468  * DAC
469  *****************************************************************************/
470 static void
471 nv50_dac_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
472 {
473         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
474         struct nv50_core *core = nv50_disp(encoder->dev)->core;
475         const u32 ctrl = NVDEF(NV507D, DAC_SET_CONTROL, OWNER, NONE);
476
477         core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL);
478         nv_encoder->crtc = NULL;
479 }
480
481 static void
482 nv50_dac_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
483 {
484         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
485         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
486         struct nv50_head_atom *asyh =
487                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
488         struct nv50_core *core = nv50_disp(encoder->dev)->core;
489         u32 ctrl = 0;
490
491         switch (nv_crtc->index) {
492         case 0: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD0); break;
493         case 1: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD1); break;
494         case 2: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD2); break;
495         case 3: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD3); break;
496         default:
497                 WARN_ON(1);
498                 break;
499         }
500
501         ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, PROTOCOL, RGB_CRT);
502
503         if (!nvif_outp_acquired(&nv_encoder->outp))
504                 nvif_outp_acquire_dac(&nv_encoder->outp);
505
506         core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh);
507         asyh->or.depth = 0;
508
509         nv_encoder->crtc = &nv_crtc->base;
510 }
511
512 static enum drm_connector_status
513 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
514 {
515         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
516         u32 loadval;
517         int ret;
518
519         loadval = nouveau_drm(encoder->dev)->vbios.dactestval;
520         if (loadval == 0)
521                 loadval = 340;
522
523         ret = nvif_outp_load_detect(&nv_encoder->outp, loadval);
524         if (ret <= 0)
525                 return connector_status_disconnected;
526
527         return connector_status_connected;
528 }
529
530 static const struct drm_encoder_helper_funcs
531 nv50_dac_help = {
532         .atomic_check = nv50_outp_atomic_check,
533         .atomic_enable = nv50_dac_atomic_enable,
534         .atomic_disable = nv50_dac_atomic_disable,
535         .detect = nv50_dac_detect
536 };
537
538 static void
539 nv50_dac_destroy(struct drm_encoder *encoder)
540 {
541         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
542
543         nvif_outp_dtor(&nv_encoder->outp);
544
545         drm_encoder_cleanup(encoder);
546         kfree(encoder);
547 }
548
549 static const struct drm_encoder_funcs
550 nv50_dac_func = {
551         .destroy = nv50_dac_destroy,
552 };
553
554 static int
555 nv50_dac_create(struct nouveau_encoder *nv_encoder)
556 {
557         struct drm_connector *connector = &nv_encoder->conn->base;
558         struct nouveau_drm *drm = nouveau_drm(connector->dev);
559         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
560         struct nvkm_i2c_bus *bus;
561         struct drm_encoder *encoder;
562         struct dcb_output *dcbe = nv_encoder->dcb;
563         int type = DRM_MODE_ENCODER_DAC;
564
565         bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
566         if (bus)
567                 nv_encoder->i2c = &bus->i2c;
568
569         encoder = to_drm_encoder(nv_encoder);
570         drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
571                          "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
572         drm_encoder_helper_add(encoder, &nv50_dac_help);
573
574         drm_connector_attach_encoder(connector, encoder);
575         return 0;
576 }
577
578 /*
579  * audio component binding for ELD notification
580  */
581 static void
582 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port,
583                                 int dev_id)
584 {
585         if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify)
586                 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr,
587                                                  port, dev_id);
588 }
589
590 static int
591 nv50_audio_component_get_eld(struct device *kdev, int port, int dev_id,
592                              bool *enabled, unsigned char *buf, int max_bytes)
593 {
594         struct drm_device *drm_dev = dev_get_drvdata(kdev);
595         struct nouveau_drm *drm = nouveau_drm(drm_dev);
596         struct drm_encoder *encoder;
597         struct nouveau_encoder *nv_encoder;
598         struct nouveau_crtc *nv_crtc;
599         int ret = 0;
600
601         *enabled = false;
602
603         mutex_lock(&drm->audio.lock);
604
605         drm_for_each_encoder(encoder, drm->dev) {
606                 struct nouveau_connector *nv_connector = NULL;
607
608                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST)
609                         continue; /* TODO */
610
611                 nv_encoder = nouveau_encoder(encoder);
612                 nv_connector = nv_encoder->conn;
613                 nv_crtc = nouveau_crtc(nv_encoder->crtc);
614
615                 if (!nv_crtc || nv_encoder->outp.or.id != port || nv_crtc->index != dev_id)
616                         continue;
617
618                 *enabled = nv_encoder->audio.enabled;
619                 if (*enabled) {
620                         ret = drm_eld_size(nv_connector->base.eld);
621                         memcpy(buf, nv_connector->base.eld,
622                                min(max_bytes, ret));
623                 }
624                 break;
625         }
626
627         mutex_unlock(&drm->audio.lock);
628
629         return ret;
630 }
631
632 static const struct drm_audio_component_ops nv50_audio_component_ops = {
633         .get_eld = nv50_audio_component_get_eld,
634 };
635
636 static int
637 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev,
638                           void *data)
639 {
640         struct drm_device *drm_dev = dev_get_drvdata(kdev);
641         struct nouveau_drm *drm = nouveau_drm(drm_dev);
642         struct drm_audio_component *acomp = data;
643
644         if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS)))
645                 return -ENOMEM;
646
647         drm_modeset_lock_all(drm_dev);
648         acomp->ops = &nv50_audio_component_ops;
649         acomp->dev = kdev;
650         drm->audio.component = acomp;
651         drm_modeset_unlock_all(drm_dev);
652         return 0;
653 }
654
655 static void
656 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev,
657                             void *data)
658 {
659         struct drm_device *drm_dev = dev_get_drvdata(kdev);
660         struct nouveau_drm *drm = nouveau_drm(drm_dev);
661         struct drm_audio_component *acomp = data;
662
663         drm_modeset_lock_all(drm_dev);
664         drm->audio.component = NULL;
665         acomp->ops = NULL;
666         acomp->dev = NULL;
667         drm_modeset_unlock_all(drm_dev);
668 }
669
670 static const struct component_ops nv50_audio_component_bind_ops = {
671         .bind   = nv50_audio_component_bind,
672         .unbind = nv50_audio_component_unbind,
673 };
674
675 static void
676 nv50_audio_component_init(struct nouveau_drm *drm)
677 {
678         if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops))
679                 return;
680
681         drm->audio.component_registered = true;
682         mutex_init(&drm->audio.lock);
683 }
684
685 static void
686 nv50_audio_component_fini(struct nouveau_drm *drm)
687 {
688         if (!drm->audio.component_registered)
689                 return;
690
691         component_del(drm->dev->dev, &nv50_audio_component_bind_ops);
692         drm->audio.component_registered = false;
693         mutex_destroy(&drm->audio.lock);
694 }
695
696 /******************************************************************************
697  * Audio
698  *****************************************************************************/
699 static bool
700 nv50_audio_supported(struct drm_encoder *encoder)
701 {
702         struct nv50_disp *disp = nv50_disp(encoder->dev);
703
704         if (disp->disp->object.oclass <= GT200_DISP ||
705             disp->disp->object.oclass == GT206_DISP)
706                 return false;
707
708         if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
709                 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
710
711                 switch (nv_encoder->dcb->type) {
712                 case DCB_OUTPUT_TMDS:
713                 case DCB_OUTPUT_DP:
714                         break;
715                 default:
716                         return false;
717                 }
718         }
719
720         return true;
721 }
722
723 static void
724 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
725 {
726         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
727         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
728         struct nvif_outp *outp = &nv_encoder->outp;
729
730         if (!nv50_audio_supported(encoder))
731                 return;
732
733         mutex_lock(&drm->audio.lock);
734         if (nv_encoder->audio.enabled) {
735                 nv_encoder->audio.enabled = false;
736                 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, NULL, 0);
737         }
738         mutex_unlock(&drm->audio.lock);
739
740         nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index);
741 }
742
743 static void
744 nv50_audio_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
745                   struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
746                   struct drm_display_mode *mode)
747 {
748         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
749         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
750         struct nvif_outp *outp = &nv_encoder->outp;
751
752         if (!nv50_audio_supported(encoder) || !drm_detect_monitor_audio(nv_connector->edid))
753                 return;
754
755         mutex_lock(&drm->audio.lock);
756
757         nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, nv_connector->base.eld,
758                           drm_eld_size(nv_connector->base.eld));
759         nv_encoder->audio.enabled = true;
760
761         mutex_unlock(&drm->audio.lock);
762
763         nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index);
764 }
765
766 /******************************************************************************
767  * HDMI
768  *****************************************************************************/
769 static void
770 nv50_hdmi_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
771                  struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
772                  struct drm_display_mode *mode, bool hda)
773 {
774         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
775         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
776         struct drm_hdmi_info *hdmi = &nv_connector->base.display_info.hdmi;
777         union hdmi_infoframe infoframe = { 0 };
778         const u8 rekey = 56; /* binary driver, and tegra, constant */
779         u32 max_ac_packet;
780         struct {
781                 struct nvif_outp_infoframe_v0 infoframe;
782                 u8 data[17];
783         } args = { 0 };
784         int ret, size;
785
786         max_ac_packet  = mode->htotal - mode->hdisplay;
787         max_ac_packet -= rekey;
788         max_ac_packet -= 18; /* constant from tegra */
789         max_ac_packet /= 32;
790
791         if (nv_encoder->i2c && hdmi->scdc.scrambling.supported) {
792                 const bool high_tmds_clock_ratio = mode->clock > 340000;
793                 u8 scdc;
794
795                 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &scdc);
796                 if (ret < 0) {
797                         NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret);
798                         return;
799                 }
800
801                 scdc &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE);
802                 if (high_tmds_clock_ratio || hdmi->scdc.scrambling.low_rates)
803                         scdc |= SCDC_SCRAMBLING_ENABLE;
804                 if (high_tmds_clock_ratio)
805                         scdc |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40;
806
807                 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, scdc);
808                 if (ret < 0)
809                         NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
810                                  scdc, ret);
811         }
812
813         ret = nvif_outp_hdmi(&nv_encoder->outp, nv_crtc->index, true, max_ac_packet, rekey,
814                              mode->clock, hdmi->scdc.supported, hdmi->scdc.scrambling.supported,
815                              hdmi->scdc.scrambling.low_rates);
816         if (ret)
817                 return;
818
819         /* AVI InfoFrame. */
820         args.infoframe.version = 0;
821         args.infoframe.head = nv_crtc->index;
822
823         if (!drm_hdmi_avi_infoframe_from_display_mode(&infoframe.avi, &nv_connector->base, mode)) {
824                 drm_hdmi_avi_infoframe_quant_range(&infoframe.avi, &nv_connector->base, mode,
825                                                    HDMI_QUANTIZATION_RANGE_FULL);
826
827                 size = hdmi_infoframe_pack(&infoframe, args.data, ARRAY_SIZE(args.data));
828         } else {
829                 size = 0;
830         }
831
832         nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_AVI, &args.infoframe, size);
833
834         /* Vendor InfoFrame. */
835         memset(&args.data, 0, sizeof(args.data));
836         if (!drm_hdmi_vendor_infoframe_from_display_mode(&infoframe.vendor.hdmi,
837                                                          &nv_connector->base, mode))
838                 size = hdmi_infoframe_pack(&infoframe, args.data, ARRAY_SIZE(args.data));
839         else
840                 size = 0;
841
842         nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_VSI, &args.infoframe, size);
843
844         nv_encoder->hdmi.enabled = true;
845 }
846
847 /******************************************************************************
848  * MST
849  *****************************************************************************/
850 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
851 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
852 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
853
854 struct nv50_mstc {
855         struct nv50_mstm *mstm;
856         struct drm_dp_mst_port *port;
857         struct drm_connector connector;
858
859         struct drm_display_mode *native;
860         struct edid *edid;
861 };
862
863 struct nv50_msto {
864         struct drm_encoder encoder;
865
866         /* head is statically assigned on msto creation */
867         struct nv50_head *head;
868         struct nv50_mstc *mstc;
869         bool disabled;
870         bool enabled;
871
872         u32 display_id;
873 };
874
875 struct nouveau_encoder *nv50_real_outp(struct drm_encoder *encoder)
876 {
877         struct nv50_msto *msto;
878
879         if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
880                 return nouveau_encoder(encoder);
881
882         msto = nv50_msto(encoder);
883         if (!msto->mstc)
884                 return NULL;
885         return msto->mstc->mstm->outp;
886 }
887
888 static void
889 nv50_msto_cleanup(struct drm_atomic_state *state,
890                   struct drm_dp_mst_topology_state *new_mst_state,
891                   struct drm_dp_mst_topology_mgr *mgr,
892                   struct nv50_msto *msto)
893 {
894         struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
895         struct drm_dp_mst_atomic_payload *new_payload =
896                 drm_atomic_get_mst_payload_state(new_mst_state, msto->mstc->port);
897         struct drm_dp_mst_topology_state *old_mst_state =
898                 drm_atomic_get_old_mst_topology_state(state, mgr);
899         const struct drm_dp_mst_atomic_payload *old_payload =
900                 drm_atomic_get_mst_payload_state(old_mst_state, msto->mstc->port);
901         struct nv50_mstc *mstc = msto->mstc;
902         struct nv50_mstm *mstm = mstc->mstm;
903
904         NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
905
906         if (msto->disabled) {
907                 if (msto->head->func->display_id) {
908                         nvif_outp_dp_mst_id_put(&mstm->outp->outp, msto->display_id);
909                         msto->display_id = 0;
910                 }
911
912                 msto->mstc = NULL;
913                 msto->disabled = false;
914                 drm_dp_remove_payload_part2(mgr, new_mst_state, old_payload, new_payload);
915         } else if (msto->enabled) {
916                 drm_dp_add_payload_part2(mgr, state, new_payload);
917                 msto->enabled = false;
918         }
919 }
920
921 static void
922 nv50_msto_prepare(struct drm_atomic_state *state,
923                   struct drm_dp_mst_topology_state *mst_state,
924                   struct drm_dp_mst_topology_mgr *mgr,
925                   struct nv50_msto *msto)
926 {
927         struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
928         struct nv50_mstc *mstc = msto->mstc;
929         struct nv50_mstm *mstm = mstc->mstm;
930         struct drm_dp_mst_atomic_payload *payload;
931         int ret = 0;
932
933         NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
934
935         payload = drm_atomic_get_mst_payload_state(mst_state, mstc->port);
936
937         if (msto->disabled) {
938                 drm_dp_remove_payload_part1(mgr, mst_state, payload);
939                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0);
940                 ret = 1;
941         } else {
942                 if (msto->enabled)
943                         ret = drm_dp_add_payload_part1(mgr, mst_state, payload);
944         }
945
946         if (ret == 0) {
947                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index,
948                                       payload->vc_start_slot, payload->time_slots,
949                                       payload->pbn, payload->time_slots * mst_state->pbn_div);
950         } else {
951                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0);
952         }
953 }
954
955 static int
956 nv50_msto_atomic_check(struct drm_encoder *encoder,
957                        struct drm_crtc_state *crtc_state,
958                        struct drm_connector_state *conn_state)
959 {
960         struct drm_atomic_state *state = crtc_state->state;
961         struct drm_connector *connector = conn_state->connector;
962         struct drm_dp_mst_topology_state *mst_state;
963         struct nv50_mstc *mstc = nv50_mstc(connector);
964         struct nv50_mstm *mstm = mstc->mstm;
965         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
966         int slots;
967         int ret;
968
969         ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
970                                           mstc->native);
971         if (ret)
972                 return ret;
973
974         if (!drm_atomic_crtc_needs_modeset(crtc_state))
975                 return 0;
976
977         /*
978          * When restoring duplicated states, we need to make sure that the bw
979          * remains the same and avoid recalculating it, as the connector's bpc
980          * may have changed after the state was duplicated
981          */
982         if (!state->duplicated) {
983                 const int clock = crtc_state->adjusted_mode.clock;
984
985                 asyh->or.bpc = connector->display_info.bpc;
986                 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3,
987                                                     false);
988         }
989
990         mst_state = drm_atomic_get_mst_topology_state(state, &mstm->mgr);
991         if (IS_ERR(mst_state))
992                 return PTR_ERR(mst_state);
993
994         if (!mst_state->pbn_div) {
995                 struct nouveau_encoder *outp = mstc->mstm->outp;
996
997                 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&mstm->mgr,
998                                                               outp->dp.link_bw, outp->dp.link_nr);
999         }
1000
1001         slots = drm_dp_atomic_find_time_slots(state, &mstm->mgr, mstc->port, asyh->dp.pbn);
1002         if (slots < 0)
1003                 return slots;
1004
1005         asyh->dp.tu = slots;
1006
1007         return 0;
1008 }
1009
1010 static u8
1011 nv50_dp_bpc_to_depth(unsigned int bpc)
1012 {
1013         switch (bpc) {
1014         case  6: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444;
1015         case  8: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444;
1016         case 10:
1017         default: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444;
1018         }
1019 }
1020
1021 static void
1022 nv50_msto_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1023 {
1024         struct nv50_msto *msto = nv50_msto(encoder);
1025         struct nv50_head *head = msto->head;
1026         struct nv50_head_atom *asyh =
1027                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base));
1028         struct nv50_mstc *mstc = NULL;
1029         struct nv50_mstm *mstm = NULL;
1030         struct drm_connector *connector;
1031         struct drm_connector_list_iter conn_iter;
1032         u8 proto;
1033
1034         drm_connector_list_iter_begin(encoder->dev, &conn_iter);
1035         drm_for_each_connector_iter(connector, &conn_iter) {
1036                 if (connector->state->best_encoder == &msto->encoder) {
1037                         mstc = nv50_mstc(connector);
1038                         mstm = mstc->mstm;
1039                         break;
1040                 }
1041         }
1042         drm_connector_list_iter_end(&conn_iter);
1043
1044         if (WARN_ON(!mstc))
1045                 return;
1046
1047         if (!mstm->links++) {
1048                 nvif_outp_acquire_sor(&mstm->outp->outp, false /*TODO: MST audio... */);
1049                 nouveau_dp_train(mstm->outp, true, 0, 0);
1050         }
1051
1052         if (head->func->display_id) {
1053                 if (!WARN_ON(nvif_outp_dp_mst_id_get(&mstm->outp->outp, &msto->display_id)))
1054                         head->func->display_id(head, msto->display_id);
1055         }
1056
1057         if (mstm->outp->outp.or.link & 1)
1058                 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1059         else
1060                 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1061
1062         mstm->outp->update(mstm->outp, head->base.index, asyh, proto,
1063                            nv50_dp_bpc_to_depth(asyh->or.bpc));
1064
1065         msto->mstc = mstc;
1066         msto->enabled = true;
1067         mstm->modified = true;
1068 }
1069
1070 static void
1071 nv50_msto_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1072 {
1073         struct nv50_msto *msto = nv50_msto(encoder);
1074         struct nv50_mstc *mstc = msto->mstc;
1075         struct nv50_mstm *mstm = mstc->mstm;
1076
1077         if (msto->head->func->display_id)
1078                 msto->head->func->display_id(msto->head, 0);
1079
1080         mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
1081         mstm->modified = true;
1082         if (!--mstm->links)
1083                 mstm->disabled = true;
1084         msto->disabled = true;
1085 }
1086
1087 static const struct drm_encoder_helper_funcs
1088 nv50_msto_help = {
1089         .atomic_disable = nv50_msto_atomic_disable,
1090         .atomic_enable = nv50_msto_atomic_enable,
1091         .atomic_check = nv50_msto_atomic_check,
1092 };
1093
1094 static void
1095 nv50_msto_destroy(struct drm_encoder *encoder)
1096 {
1097         struct nv50_msto *msto = nv50_msto(encoder);
1098         drm_encoder_cleanup(&msto->encoder);
1099         kfree(msto);
1100 }
1101
1102 static const struct drm_encoder_funcs
1103 nv50_msto = {
1104         .destroy = nv50_msto_destroy,
1105 };
1106
1107 static struct nv50_msto *
1108 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id)
1109 {
1110         struct nv50_msto *msto;
1111         int ret;
1112
1113         msto = kzalloc(sizeof(*msto), GFP_KERNEL);
1114         if (!msto)
1115                 return ERR_PTR(-ENOMEM);
1116
1117         ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
1118                                DRM_MODE_ENCODER_DPMST, "mst-%d", id);
1119         if (ret) {
1120                 kfree(msto);
1121                 return ERR_PTR(ret);
1122         }
1123
1124         drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
1125         msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base);
1126         msto->head = head;
1127         return msto;
1128 }
1129
1130 static struct drm_encoder *
1131 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
1132                               struct drm_atomic_state *state)
1133 {
1134         struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state,
1135                                                                                          connector);
1136         struct nv50_mstc *mstc = nv50_mstc(connector);
1137         struct drm_crtc *crtc = connector_state->crtc;
1138
1139         if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1140                 return NULL;
1141
1142         return &nv50_head(crtc)->msto->encoder;
1143 }
1144
1145 static enum drm_mode_status
1146 nv50_mstc_mode_valid(struct drm_connector *connector,
1147                      struct drm_display_mode *mode)
1148 {
1149         struct nv50_mstc *mstc = nv50_mstc(connector);
1150         struct nouveau_encoder *outp = mstc->mstm->outp;
1151
1152         /* TODO: calculate the PBN from the dotclock and validate against the
1153          * MSTB's max possible PBN
1154          */
1155
1156         return nv50_dp_mode_valid(outp, mode, NULL);
1157 }
1158
1159 static int
1160 nv50_mstc_get_modes(struct drm_connector *connector)
1161 {
1162         struct nv50_mstc *mstc = nv50_mstc(connector);
1163         int ret = 0;
1164
1165         mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
1166         drm_connector_update_edid_property(&mstc->connector, mstc->edid);
1167         if (mstc->edid)
1168                 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
1169
1170         /*
1171          * XXX: Since we don't use HDR in userspace quite yet, limit the bpc
1172          * to 8 to save bandwidth on the topology. In the future, we'll want
1173          * to properly fix this by dynamically selecting the highest possible
1174          * bpc that would fit in the topology
1175          */
1176         if (connector->display_info.bpc)
1177                 connector->display_info.bpc =
1178                         clamp(connector->display_info.bpc, 6U, 8U);
1179         else
1180                 connector->display_info.bpc = 8;
1181
1182         if (mstc->native)
1183                 drm_mode_destroy(mstc->connector.dev, mstc->native);
1184         mstc->native = nouveau_conn_native_mode(&mstc->connector);
1185         return ret;
1186 }
1187
1188 static int
1189 nv50_mstc_atomic_check(struct drm_connector *connector,
1190                        struct drm_atomic_state *state)
1191 {
1192         struct nv50_mstc *mstc = nv50_mstc(connector);
1193         struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr;
1194
1195         return drm_dp_atomic_release_time_slots(state, mgr, mstc->port);
1196 }
1197
1198 static int
1199 nv50_mstc_detect(struct drm_connector *connector,
1200                  struct drm_modeset_acquire_ctx *ctx, bool force)
1201 {
1202         struct nv50_mstc *mstc = nv50_mstc(connector);
1203         int ret;
1204
1205         if (drm_connector_is_unregistered(connector))
1206                 return connector_status_disconnected;
1207
1208         ret = pm_runtime_get_sync(connector->dev->dev);
1209         if (ret < 0 && ret != -EACCES) {
1210                 pm_runtime_put_autosuspend(connector->dev->dev);
1211                 return connector_status_disconnected;
1212         }
1213
1214         ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr,
1215                                      mstc->port);
1216         if (ret != connector_status_connected)
1217                 goto out;
1218
1219 out:
1220         pm_runtime_mark_last_busy(connector->dev->dev);
1221         pm_runtime_put_autosuspend(connector->dev->dev);
1222         return ret;
1223 }
1224
1225 static const struct drm_connector_helper_funcs
1226 nv50_mstc_help = {
1227         .get_modes = nv50_mstc_get_modes,
1228         .mode_valid = nv50_mstc_mode_valid,
1229         .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
1230         .atomic_check = nv50_mstc_atomic_check,
1231         .detect_ctx = nv50_mstc_detect,
1232 };
1233
1234 static void
1235 nv50_mstc_destroy(struct drm_connector *connector)
1236 {
1237         struct nv50_mstc *mstc = nv50_mstc(connector);
1238
1239         drm_connector_cleanup(&mstc->connector);
1240         drm_dp_mst_put_port_malloc(mstc->port);
1241
1242         kfree(mstc);
1243 }
1244
1245 static const struct drm_connector_funcs
1246 nv50_mstc = {
1247         .reset = nouveau_conn_reset,
1248         .fill_modes = drm_helper_probe_single_connector_modes,
1249         .destroy = nv50_mstc_destroy,
1250         .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
1251         .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
1252         .atomic_set_property = nouveau_conn_atomic_set_property,
1253         .atomic_get_property = nouveau_conn_atomic_get_property,
1254 };
1255
1256 static int
1257 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
1258               const char *path, struct nv50_mstc **pmstc)
1259 {
1260         struct drm_device *dev = mstm->outp->base.base.dev;
1261         struct drm_crtc *crtc;
1262         struct nv50_mstc *mstc;
1263         int ret;
1264
1265         if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
1266                 return -ENOMEM;
1267         mstc->mstm = mstm;
1268         mstc->port = port;
1269
1270         ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
1271                                  DRM_MODE_CONNECTOR_DisplayPort);
1272         if (ret) {
1273                 kfree(*pmstc);
1274                 *pmstc = NULL;
1275                 return ret;
1276         }
1277
1278         drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
1279
1280         mstc->connector.funcs->reset(&mstc->connector);
1281         nouveau_conn_attach_properties(&mstc->connector);
1282
1283         drm_for_each_crtc(crtc, dev) {
1284                 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1285                         continue;
1286
1287                 drm_connector_attach_encoder(&mstc->connector,
1288                                              &nv50_head(crtc)->msto->encoder);
1289         }
1290
1291         drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
1292         drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
1293         drm_connector_set_path_property(&mstc->connector, path);
1294         drm_dp_mst_get_port_malloc(port);
1295         return 0;
1296 }
1297
1298 static void
1299 nv50_mstm_cleanup(struct drm_atomic_state *state,
1300                   struct drm_dp_mst_topology_state *mst_state,
1301                   struct nv50_mstm *mstm)
1302 {
1303         struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1304         struct drm_encoder *encoder;
1305
1306         NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
1307         drm_dp_check_act_status(&mstm->mgr);
1308
1309         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1310                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1311                         struct nv50_msto *msto = nv50_msto(encoder);
1312                         struct nv50_mstc *mstc = msto->mstc;
1313                         if (mstc && mstc->mstm == mstm)
1314                                 nv50_msto_cleanup(state, mst_state, &mstm->mgr, msto);
1315                 }
1316         }
1317
1318         if (mstm->disabled) {
1319                 nouveau_dp_power_down(mstm->outp);
1320                 nvif_outp_release(&mstm->outp->outp);
1321                 mstm->disabled = false;
1322         }
1323
1324         mstm->modified = false;
1325 }
1326
1327 static void
1328 nv50_mstm_prepare(struct drm_atomic_state *state,
1329                   struct drm_dp_mst_topology_state *mst_state,
1330                   struct nv50_mstm *mstm)
1331 {
1332         struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1333         struct drm_encoder *encoder;
1334
1335         NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
1336
1337         /* Disable payloads first */
1338         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1339                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1340                         struct nv50_msto *msto = nv50_msto(encoder);
1341                         struct nv50_mstc *mstc = msto->mstc;
1342                         if (mstc && mstc->mstm == mstm && msto->disabled)
1343                                 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto);
1344                 }
1345         }
1346
1347         /* Add payloads for new heads, while also updating the start slots of any unmodified (but
1348          * active) heads that may have had their VC slots shifted left after the previous step
1349          */
1350         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1351                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1352                         struct nv50_msto *msto = nv50_msto(encoder);
1353                         struct nv50_mstc *mstc = msto->mstc;
1354                         if (mstc && mstc->mstm == mstm && !msto->disabled)
1355                                 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto);
1356                 }
1357         }
1358 }
1359
1360 static struct drm_connector *
1361 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1362                         struct drm_dp_mst_port *port, const char *path)
1363 {
1364         struct nv50_mstm *mstm = nv50_mstm(mgr);
1365         struct nv50_mstc *mstc;
1366         int ret;
1367
1368         ret = nv50_mstc_new(mstm, port, path, &mstc);
1369         if (ret)
1370                 return NULL;
1371
1372         return &mstc->connector;
1373 }
1374
1375 static const struct drm_dp_mst_topology_cbs
1376 nv50_mstm = {
1377         .add_connector = nv50_mstm_add_connector,
1378 };
1379
1380 bool
1381 nv50_mstm_service(struct nouveau_drm *drm,
1382                   struct nouveau_connector *nv_connector,
1383                   struct nv50_mstm *mstm)
1384 {
1385         struct drm_dp_aux *aux = &nv_connector->aux;
1386         bool handled = true, ret = true;
1387         int rc;
1388         u8 esi[8] = {};
1389
1390         while (handled) {
1391                 u8 ack[8] = {};
1392
1393                 rc = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1394                 if (rc != 8) {
1395                         ret = false;
1396                         break;
1397                 }
1398
1399                 drm_dp_mst_hpd_irq_handle_event(&mstm->mgr, esi, ack, &handled);
1400                 if (!handled)
1401                         break;
1402
1403                 rc = drm_dp_dpcd_writeb(aux, DP_SINK_COUNT_ESI + 1, ack[1]);
1404
1405                 if (rc != 1) {
1406                         ret = false;
1407                         break;
1408                 }
1409
1410                 drm_dp_mst_hpd_irq_send_new_request(&mstm->mgr);
1411         }
1412
1413         if (!ret)
1414                 NV_DEBUG(drm, "Failed to handle ESI on %s: %d\n",
1415                          nv_connector->base.name, rc);
1416
1417         return ret;
1418 }
1419
1420 void
1421 nv50_mstm_remove(struct nv50_mstm *mstm)
1422 {
1423         mstm->is_mst = false;
1424         drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1425 }
1426
1427 int
1428 nv50_mstm_detect(struct nouveau_encoder *outp)
1429 {
1430         struct nv50_mstm *mstm = outp->dp.mstm;
1431         struct drm_dp_aux *aux;
1432         int ret;
1433
1434         if (!mstm || !mstm->can_mst)
1435                 return 0;
1436
1437         aux = mstm->mgr.aux;
1438
1439         /* Clear any leftover MST state we didn't set ourselves by first
1440          * disabling MST if it was already enabled
1441          */
1442         ret = drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1443         if (ret < 0)
1444                 return ret;
1445
1446         /* And start enabling */
1447         ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true);
1448         if (ret)
1449                 return ret;
1450
1451         mstm->is_mst = true;
1452         return 1;
1453 }
1454
1455 static void
1456 nv50_mstm_fini(struct nouveau_encoder *outp)
1457 {
1458         struct nv50_mstm *mstm = outp->dp.mstm;
1459
1460         if (!mstm)
1461                 return;
1462
1463         /* Don't change the MST state of this connector until we've finished
1464          * resuming, since we can't safely grab hpd_irq_lock in our resume
1465          * path to protect mstm->is_mst without potentially deadlocking
1466          */
1467         mutex_lock(&outp->dp.hpd_irq_lock);
1468         mstm->suspended = true;
1469         mutex_unlock(&outp->dp.hpd_irq_lock);
1470
1471         if (mstm->is_mst)
1472                 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1473 }
1474
1475 static void
1476 nv50_mstm_init(struct nouveau_encoder *outp, bool runtime)
1477 {
1478         struct nv50_mstm *mstm = outp->dp.mstm;
1479         int ret = 0;
1480
1481         if (!mstm)
1482                 return;
1483
1484         if (mstm->is_mst) {
1485                 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime);
1486                 if (ret == -1)
1487                         nv50_mstm_remove(mstm);
1488         }
1489
1490         mutex_lock(&outp->dp.hpd_irq_lock);
1491         mstm->suspended = false;
1492         mutex_unlock(&outp->dp.hpd_irq_lock);
1493
1494         if (ret == -1)
1495                 drm_kms_helper_hotplug_event(mstm->mgr.dev);
1496 }
1497
1498 static void
1499 nv50_mstm_del(struct nv50_mstm **pmstm)
1500 {
1501         struct nv50_mstm *mstm = *pmstm;
1502         if (mstm) {
1503                 drm_dp_mst_topology_mgr_destroy(&mstm->mgr);
1504                 kfree(*pmstm);
1505                 *pmstm = NULL;
1506         }
1507 }
1508
1509 static int
1510 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1511               int conn_base_id, struct nv50_mstm **pmstm)
1512 {
1513         const int max_payloads = hweight8(outp->dcb->heads);
1514         struct drm_device *dev = outp->base.base.dev;
1515         struct nv50_mstm *mstm;
1516         int ret;
1517
1518         if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1519                 return -ENOMEM;
1520         mstm->outp = outp;
1521         mstm->mgr.cbs = &nv50_mstm;
1522
1523         ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1524                                            max_payloads, conn_base_id);
1525         if (ret)
1526                 return ret;
1527
1528         return 0;
1529 }
1530
1531 /******************************************************************************
1532  * SOR
1533  *****************************************************************************/
1534 static void
1535 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1536                 struct nv50_head_atom *asyh, u8 proto, u8 depth)
1537 {
1538         struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1539         struct nv50_core *core = disp->core;
1540
1541         if (!asyh) {
1542                 nv_encoder->ctrl &= ~BIT(head);
1543                 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE))
1544                         nv_encoder->ctrl = 0;
1545         } else {
1546                 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto);
1547                 nv_encoder->ctrl |= BIT(head);
1548                 asyh->or.depth = depth;
1549         }
1550
1551         core->func->sor->ctrl(core, nv_encoder->outp.or.id, nv_encoder->ctrl, asyh);
1552 }
1553
1554 /* TODO: Should we extend this to PWM-only backlights?
1555  * As well, should we add a DRM helper for waiting for the backlight to acknowledge
1556  * the panel backlight has been shut off? Intel doesn't seem to do this, and uses a
1557  * fixed time delay from the vbios…
1558  */
1559 static void
1560 nv50_sor_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1561 {
1562         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1563         struct nv50_head *head = nv50_head(nv_encoder->crtc);
1564 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1565         struct nouveau_connector *nv_connector = nv50_outp_get_old_connector(state, nv_encoder);
1566         struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
1567         struct nouveau_backlight *backlight = nv_connector->backlight;
1568         struct drm_dp_aux *aux = &nv_connector->aux;
1569         int ret;
1570
1571         if (backlight && backlight->uses_dpcd) {
1572                 ret = drm_edp_backlight_disable(aux, &backlight->edp_info);
1573                 if (ret < 0)
1574                         NV_ERROR(drm, "Failed to disable backlight on [CONNECTOR:%d:%s]: %d\n",
1575                                  nv_connector->base.base.id, nv_connector->base.name, ret);
1576         }
1577 #endif
1578
1579         if (nv_encoder->dcb->type == DCB_OUTPUT_TMDS && nv_encoder->hdmi.enabled) {
1580                 nvif_outp_hdmi(&nv_encoder->outp, head->base.index,
1581                                false, 0, 0, 0, false, false, false);
1582                 nv_encoder->hdmi.enabled = false;
1583         }
1584
1585         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1586                 nouveau_dp_power_down(nv_encoder);
1587
1588         if (head->func->display_id)
1589                 head->func->display_id(head, 0);
1590
1591         nv_encoder->update(nv_encoder, head->base.index, NULL, 0, 0);
1592         nv50_audio_disable(encoder, &head->base);
1593         nv_encoder->crtc = NULL;
1594 }
1595
1596 // common/inc/displayport/displayport.h
1597 #define DP_CONFIG_WATERMARK_ADJUST                   2
1598 #define DP_CONFIG_WATERMARK_LIMIT                   20
1599 #define DP_CONFIG_INCREASED_WATERMARK_ADJUST         8
1600 #define DP_CONFIG_INCREASED_WATERMARK_LIMIT         22
1601
1602 static bool
1603 nv50_sor_dp_watermark_sst(struct nouveau_encoder *outp,
1604                           struct nv50_head *head, struct nv50_head_atom *asyh)
1605 {
1606         bool enhancedFraming = outp->dp.dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP;
1607         u64 minRate = outp->dp.link_bw * 1000;
1608         unsigned tuSize = 64;
1609         unsigned waterMark;
1610         unsigned hBlankSym;
1611         unsigned vBlankSym;
1612         unsigned watermarkAdjust = DP_CONFIG_WATERMARK_ADJUST;
1613         unsigned watermarkMinimum = DP_CONFIG_WATERMARK_LIMIT;
1614         // depth is multiplied by 16 in case of DSC enable
1615         s32 hblank_symbols;
1616         // number of link clocks per line.
1617         int vblank_symbols        = 0;
1618         bool bEnableDsc = false;
1619         unsigned surfaceWidth = asyh->mode.h.blanks - asyh->mode.h.blanke;
1620         unsigned rasterWidth = asyh->mode.h.active;
1621         unsigned depth = asyh->or.bpc * 3;
1622         unsigned DSC_FACTOR = bEnableDsc ? 16 : 1;
1623         u64 pixelClockHz = asyh->mode.clock * 1000;
1624         u64 PrecisionFactor = 100000, ratioF, watermarkF;
1625         u32 numLanesPerLink = outp->dp.link_nr;
1626         u32 numSymbolsPerLine;
1627         u32 BlankingBits;
1628         u32 surfaceWidthPerLink;
1629         u32 PixelSteeringBits;
1630         u64 NumBlankingLinkClocks;
1631         u32 MinHBlank;
1632
1633         if (outp->outp.info.dp.increased_wm) {
1634                 watermarkAdjust = DP_CONFIG_INCREASED_WATERMARK_ADJUST;
1635                 watermarkMinimum = DP_CONFIG_INCREASED_WATERMARK_LIMIT;
1636         }
1637
1638         if ((pixelClockHz * depth) >= (8 * minRate * outp->dp.link_nr * DSC_FACTOR))
1639         {
1640                 return false;
1641         }
1642
1643         //
1644         // For DSC, if (pclk * bpp) < (1/64 * orclk * 8 * lanes) then some TU may end up with
1645         // 0 active symbols. This may cause HW hang. Bug 200379426
1646         //
1647         if ((bEnableDsc) &&
1648             ((pixelClockHz * depth) < div_u64(8 * minRate * outp->dp.link_nr * DSC_FACTOR, 64)))
1649         {
1650                 return false;
1651         }
1652
1653         //
1654         //  Perform the SST calculation.
1655         //      For auto mode the watermark calculation does not need to track accumulated error the
1656         //      formulas for manual mode will not work.  So below calculation was extracted from the DTB.
1657         //
1658         ratioF = div_u64((u64)pixelClockHz * depth * PrecisionFactor, DSC_FACTOR);
1659
1660         ratioF = div_u64(ratioF, 8 * (u64) minRate * outp->dp.link_nr);
1661
1662         if (PrecisionFactor < ratioF) // Assert if we will end up with a negative number in below
1663                 return false;
1664
1665         watermarkF = div_u64(ratioF * tuSize * (PrecisionFactor - ratioF), PrecisionFactor);
1666         waterMark = (unsigned)(watermarkAdjust + (div_u64(2 * div_u64(depth * PrecisionFactor, 8 * numLanesPerLink * DSC_FACTOR) + watermarkF, PrecisionFactor)));
1667
1668         //
1669         //  Bounds check the watermark
1670         //
1671         numSymbolsPerLine = div_u64(surfaceWidth * depth, 8 * outp->dp.link_nr * DSC_FACTOR);
1672
1673         if (WARN_ON(waterMark > 39 || waterMark > numSymbolsPerLine))
1674                 return false;
1675
1676         //
1677         //  Clamp the low side
1678         //
1679         if (waterMark < watermarkMinimum)
1680                 waterMark = watermarkMinimum;
1681
1682         //Bits to send BS/BE/Extra symbols due to pixel padding
1683         //Also accounts for enhanced framing.
1684         BlankingBits = 3*8*numLanesPerLink + (enhancedFraming ? 3*8*numLanesPerLink : 0);
1685
1686         //VBID/MVID/MAUD sent 4 times all the time
1687         BlankingBits += 3*8*4;
1688
1689         surfaceWidthPerLink = surfaceWidth;
1690
1691         //Extra bits sent due to pixel steering
1692         u32 remain;
1693         div_u64_rem(surfaceWidthPerLink, numLanesPerLink, &remain);
1694         PixelSteeringBits = remain ? div_u64((numLanesPerLink - remain) * depth, DSC_FACTOR) : 0;
1695
1696         BlankingBits += PixelSteeringBits;
1697         NumBlankingLinkClocks = div_u64((u64)BlankingBits * PrecisionFactor, (8 * numLanesPerLink));
1698         MinHBlank = (u32)(div_u64(div_u64(NumBlankingLinkClocks * pixelClockHz, minRate), PrecisionFactor));
1699         MinHBlank += 12;
1700
1701         if (WARN_ON(MinHBlank > rasterWidth - surfaceWidth))
1702                 return false;
1703
1704         // Bug 702290 - Active Width should be greater than 60
1705         if (WARN_ON(surfaceWidth <= 60))
1706                 return false;
1707
1708
1709         hblank_symbols = (s32)(div_u64((u64)(rasterWidth - surfaceWidth - MinHBlank) * minRate, pixelClockHz));
1710
1711         //reduce HBlank Symbols to account for secondary data packet
1712         hblank_symbols -= 1; //Stuffer latency to send BS
1713         hblank_symbols -= 3; //SPKT latency to send data to stuffer
1714
1715         hblank_symbols -= numLanesPerLink == 1 ? 9  : numLanesPerLink == 2 ? 6 : 3;
1716
1717         hBlankSym = (hblank_symbols < 0) ? 0 : hblank_symbols;
1718
1719         // Refer to dev_disp.ref for more information.
1720         // # symbols/vblank = ((SetRasterBlankEnd.X + SetRasterSize.Width - SetRasterBlankStart.X - 40) * link_clk / pclk) - Y - 1;
1721         // where Y = (# lanes == 4) 12 : (# lanes == 2) ? 21 : 39
1722         if (surfaceWidth < 40)
1723         {
1724                 vblank_symbols = 0;
1725         }
1726         else
1727         {
1728                 vblank_symbols = (s32)((div_u64((u64)(surfaceWidth - 40) * minRate, pixelClockHz))) - 1;
1729
1730                 vblank_symbols -= numLanesPerLink == 1 ? 39  : numLanesPerLink == 2 ? 21 : 12;
1731         }
1732
1733         vBlankSym = (vblank_symbols < 0) ? 0 : vblank_symbols;
1734
1735         return nvif_outp_dp_sst(&outp->outp, head->base.index, waterMark, hBlankSym, vBlankSym);
1736 }
1737
1738 static void
1739 nv50_sor_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1740 {
1741         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1742         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1743         struct nv50_head_atom *asyh =
1744                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1745         struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1746         struct nv50_disp *disp = nv50_disp(encoder->dev);
1747         struct nv50_head *head = nv50_head(&nv_crtc->base);
1748         struct nvif_outp *outp = &nv_encoder->outp;
1749         struct drm_device *dev = encoder->dev;
1750         struct nouveau_drm *drm = nouveau_drm(dev);
1751         struct nouveau_connector *nv_connector;
1752 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1753         struct nouveau_backlight *backlight;
1754 #endif
1755         struct nvbios *bios = &drm->vbios;
1756         bool lvds_dual = false, lvds_8bpc = false, hda = false;
1757         u8 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_CUSTOM;
1758         u8 depth = NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT;
1759
1760         nv_connector = nv50_outp_get_new_connector(state, nv_encoder);
1761         nv_encoder->crtc = &nv_crtc->base;
1762
1763         if ((disp->disp->object.oclass == GT214_DISP ||
1764              disp->disp->object.oclass >= GF110_DISP) &&
1765             nv_encoder->dcb->type != DCB_OUTPUT_LVDS &&
1766             drm_detect_monitor_audio(nv_connector->edid))
1767                 hda = true;
1768
1769         if (!nvif_outp_acquired(outp))
1770                 nvif_outp_acquire_sor(outp, hda);
1771
1772         switch (nv_encoder->dcb->type) {
1773         case DCB_OUTPUT_TMDS:
1774                 if (disp->disp->object.oclass != NV50_DISP &&
1775                     drm_detect_hdmi_monitor(nv_connector->edid))
1776                         nv50_hdmi_enable(encoder, nv_crtc, nv_connector, state, mode, hda);
1777
1778                 if (nv_encoder->outp.or.link & 1) {
1779                         proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_A;
1780                         /* Only enable dual-link if:
1781                          *  - Need to (i.e. rate > 165MHz)
1782                          *  - DCB says we can
1783                          *  - Not an HDMI monitor, since there's no dual-link
1784                          *    on HDMI.
1785                          */
1786                         if (mode->clock >= 165000 &&
1787                             nv_encoder->dcb->duallink_possible &&
1788                             !drm_detect_hdmi_monitor(nv_connector->edid))
1789                                 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_DUAL_TMDS;
1790                 } else {
1791                         proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_B;
1792                 }
1793                 break;
1794         case DCB_OUTPUT_LVDS:
1795                 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_LVDS_CUSTOM;
1796
1797                 if (bios->fp_no_ddc) {
1798                         lvds_dual = bios->fp.dual_link;
1799                         lvds_8bpc = bios->fp.if_is_24bit;
1800                 } else {
1801                         if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1802                                 if (((u8 *)nv_connector->edid)[121] == 2)
1803                                         lvds_dual = true;
1804                         } else
1805                         if (mode->clock >= bios->fp.duallink_transition_clk) {
1806                                 lvds_dual = true;
1807                         }
1808
1809                         if (lvds_dual) {
1810                                 if (bios->fp.strapless_is_24bit & 2)
1811                                         lvds_8bpc = true;
1812                         } else {
1813                                 if (bios->fp.strapless_is_24bit & 1)
1814                                         lvds_8bpc = true;
1815                         }
1816
1817                         if (asyh->or.bpc == 8)
1818                                 lvds_8bpc = true;
1819                 }
1820
1821                 nvif_outp_lvds(&nv_encoder->outp, lvds_dual, lvds_8bpc);
1822                 break;
1823         case DCB_OUTPUT_DP:
1824                 nouveau_dp_train(nv_encoder, false, mode->clock, asyh->or.bpc);
1825                 nv50_sor_dp_watermark_sst(nv_encoder, head, asyh);
1826                 depth = nv50_dp_bpc_to_depth(asyh->or.bpc);
1827
1828                 if (nv_encoder->outp.or.link & 1)
1829                         proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1830                 else
1831                         proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1832
1833 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1834                 backlight = nv_connector->backlight;
1835                 if (backlight && backlight->uses_dpcd)
1836                         drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info,
1837                                                  (u16)backlight->dev->props.brightness);
1838 #endif
1839
1840                 break;
1841         default:
1842                 BUG();
1843                 break;
1844         }
1845
1846         if (head->func->display_id)
1847                 head->func->display_id(head, BIT(nv_encoder->outp.id));
1848
1849         nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1850 }
1851
1852 static const struct drm_encoder_helper_funcs
1853 nv50_sor_help = {
1854         .atomic_check = nv50_outp_atomic_check,
1855         .atomic_enable = nv50_sor_atomic_enable,
1856         .atomic_disable = nv50_sor_atomic_disable,
1857 };
1858
1859 static void
1860 nv50_sor_destroy(struct drm_encoder *encoder)
1861 {
1862         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1863
1864         nv50_mstm_del(&nv_encoder->dp.mstm);
1865         drm_encoder_cleanup(encoder);
1866
1867         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1868                 mutex_destroy(&nv_encoder->dp.hpd_irq_lock);
1869
1870         nvif_outp_dtor(&nv_encoder->outp);
1871         kfree(encoder);
1872 }
1873
1874 static const struct drm_encoder_funcs
1875 nv50_sor_func = {
1876         .destroy = nv50_sor_destroy,
1877 };
1878
1879 static int
1880 nv50_sor_create(struct nouveau_encoder *nv_encoder)
1881 {
1882         struct drm_connector *connector = &nv_encoder->conn->base;
1883         struct nouveau_connector *nv_connector = nouveau_connector(connector);
1884         struct nouveau_drm *drm = nouveau_drm(connector->dev);
1885         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1886         struct drm_encoder *encoder;
1887         struct dcb_output *dcbe = nv_encoder->dcb;
1888         struct nv50_disp *disp = nv50_disp(connector->dev);
1889         int type, ret;
1890
1891         switch (dcbe->type) {
1892         case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1893         case DCB_OUTPUT_TMDS:
1894         case DCB_OUTPUT_DP:
1895         default:
1896                 type = DRM_MODE_ENCODER_TMDS;
1897                 break;
1898         }
1899
1900         nv_encoder->update = nv50_sor_update;
1901
1902         encoder = to_drm_encoder(nv_encoder);
1903         drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1904                          "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1905         drm_encoder_helper_add(encoder, &nv50_sor_help);
1906
1907         drm_connector_attach_encoder(connector, encoder);
1908
1909         disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1910         nv50_outp_dump_caps(drm, nv_encoder);
1911
1912         if (dcbe->type == DCB_OUTPUT_DP) {
1913                 mutex_init(&nv_encoder->dp.hpd_irq_lock);
1914
1915                 if (disp->disp->object.oclass < GF110_DISP) {
1916                         /* HW has no support for address-only
1917                          * transactions, so we're required to
1918                          * use custom I2C-over-AUX code.
1919                          */
1920                         struct nvkm_i2c_aux *aux;
1921
1922                         aux = nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1923                         if (!aux)
1924                                 return -EINVAL;
1925
1926                         nv_encoder->i2c = &aux->i2c;
1927                 } else {
1928                         nv_encoder->i2c = &nv_connector->aux.ddc;
1929                 }
1930
1931                 if (nv_connector->type != DCB_CONNECTOR_eDP && nv_encoder->outp.info.dp.mst) {
1932                         ret = nv50_mstm_new(nv_encoder, &nv_connector->aux,
1933                                             16, nv_connector->base.base.id,
1934                                             &nv_encoder->dp.mstm);
1935                         if (ret)
1936                                 return ret;
1937                 }
1938         } else
1939         if (nv_encoder->outp.info.ddc != NVIF_OUTP_DDC_INVALID) {
1940                 struct nvkm_i2c_bus *bus =
1941                         nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1942                 if (bus)
1943                         nv_encoder->i2c = &bus->i2c;
1944         }
1945
1946         return 0;
1947 }
1948
1949 /******************************************************************************
1950  * PIOR
1951  *****************************************************************************/
1952 static int
1953 nv50_pior_atomic_check(struct drm_encoder *encoder,
1954                        struct drm_crtc_state *crtc_state,
1955                        struct drm_connector_state *conn_state)
1956 {
1957         int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1958         if (ret)
1959                 return ret;
1960         crtc_state->adjusted_mode.clock *= 2;
1961         return 0;
1962 }
1963
1964 static void
1965 nv50_pior_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1966 {
1967         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1968         struct nv50_core *core = nv50_disp(encoder->dev)->core;
1969         const u32 ctrl = NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, NONE);
1970
1971         core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL);
1972         nv_encoder->crtc = NULL;
1973 }
1974
1975 static void
1976 nv50_pior_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1977 {
1978         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1979         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1980         struct nv50_head_atom *asyh =
1981                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1982         struct nv50_core *core = nv50_disp(encoder->dev)->core;
1983         u32 ctrl = 0;
1984
1985         switch (nv_crtc->index) {
1986         case 0: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD0); break;
1987         case 1: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD1); break;
1988         default:
1989                 WARN_ON(1);
1990                 break;
1991         }
1992
1993         switch (asyh->or.bpc) {
1994         case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break;
1995         case  8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break;
1996         case  6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break;
1997         default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break;
1998         }
1999
2000         if (!nvif_outp_acquired(&nv_encoder->outp))
2001                 nvif_outp_acquire_pior(&nv_encoder->outp);
2002
2003         switch (nv_encoder->dcb->type) {
2004         case DCB_OUTPUT_TMDS:
2005                 ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, PROTOCOL, EXT_TMDS_ENC);
2006                 break;
2007         case DCB_OUTPUT_DP:
2008                 ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, PROTOCOL, EXT_TMDS_ENC);
2009                 nouveau_dp_train(nv_encoder, false, asyh->state.adjusted_mode.clock, 6);
2010                 break;
2011         default:
2012                 BUG();
2013                 break;
2014         }
2015
2016         core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh);
2017         nv_encoder->crtc = &nv_crtc->base;
2018 }
2019
2020 static const struct drm_encoder_helper_funcs
2021 nv50_pior_help = {
2022         .atomic_check = nv50_pior_atomic_check,
2023         .atomic_enable = nv50_pior_atomic_enable,
2024         .atomic_disable = nv50_pior_atomic_disable,
2025 };
2026
2027 static void
2028 nv50_pior_destroy(struct drm_encoder *encoder)
2029 {
2030         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2031
2032         nvif_outp_dtor(&nv_encoder->outp);
2033
2034         drm_encoder_cleanup(encoder);
2035
2036         mutex_destroy(&nv_encoder->dp.hpd_irq_lock);
2037         kfree(encoder);
2038 }
2039
2040 static const struct drm_encoder_funcs
2041 nv50_pior_func = {
2042         .destroy = nv50_pior_destroy,
2043 };
2044
2045 static int
2046 nv50_pior_create(struct nouveau_encoder *nv_encoder)
2047 {
2048         struct drm_connector *connector = &nv_encoder->conn->base;
2049         struct drm_device *dev = connector->dev;
2050         struct nouveau_drm *drm = nouveau_drm(dev);
2051         struct nv50_disp *disp = nv50_disp(dev);
2052         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
2053         struct nvkm_i2c_bus *bus = NULL;
2054         struct nvkm_i2c_aux *aux = NULL;
2055         struct i2c_adapter *ddc;
2056         struct drm_encoder *encoder;
2057         struct dcb_output *dcbe = nv_encoder->dcb;
2058         int type;
2059
2060         switch (dcbe->type) {
2061         case DCB_OUTPUT_TMDS:
2062                 bus  = nvkm_i2c_bus_find(i2c, nv_encoder->outp.info.ddc);
2063                 ddc  = bus ? &bus->i2c : NULL;
2064                 type = DRM_MODE_ENCODER_TMDS;
2065                 break;
2066         case DCB_OUTPUT_DP:
2067                 aux  = nvkm_i2c_aux_find(i2c, nv_encoder->outp.info.dp.aux);
2068                 ddc  = aux ? &aux->i2c : NULL;
2069                 type = DRM_MODE_ENCODER_TMDS;
2070                 break;
2071         default:
2072                 return -ENODEV;
2073         }
2074
2075         nv_encoder->i2c = ddc;
2076
2077         mutex_init(&nv_encoder->dp.hpd_irq_lock);
2078
2079         encoder = to_drm_encoder(nv_encoder);
2080         drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
2081                          "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
2082         drm_encoder_helper_add(encoder, &nv50_pior_help);
2083
2084         drm_connector_attach_encoder(connector, encoder);
2085
2086         disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
2087         nv50_outp_dump_caps(drm, nv_encoder);
2088
2089         return 0;
2090 }
2091
2092 /******************************************************************************
2093  * Atomic
2094  *****************************************************************************/
2095
2096 static void
2097 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock)
2098 {
2099         struct drm_dp_mst_topology_mgr *mgr;
2100         struct drm_dp_mst_topology_state *mst_state;
2101         struct nouveau_drm *drm = nouveau_drm(state->dev);
2102         struct nv50_disp *disp = nv50_disp(drm->dev);
2103         struct nv50_atom *atom = nv50_atom(state);
2104         struct nv50_core *core = disp->core;
2105         struct nv50_outp_atom *outp;
2106         struct nv50_mstm *mstm;
2107         int i;
2108
2109         NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
2110
2111         for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) {
2112                 mstm = nv50_mstm(mgr);
2113                 if (mstm->modified)
2114                         nv50_mstm_prepare(state, mst_state, mstm);
2115         }
2116
2117         core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
2118         core->func->update(core, interlock, true);
2119         if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
2120                                        disp->core->chan.base.device))
2121                 NV_ERROR(drm, "core notifier timeout\n");
2122
2123         for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) {
2124                 mstm = nv50_mstm(mgr);
2125                 if (mstm->modified)
2126                         nv50_mstm_cleanup(state, mst_state, mstm);
2127         }
2128
2129         list_for_each_entry(outp, &atom->outp, head) {
2130                 if (outp->encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2131                         struct nouveau_encoder *nv_encoder = nouveau_encoder(outp->encoder);
2132
2133                         if (outp->enabled) {
2134                                 nv50_audio_enable(outp->encoder, nouveau_crtc(nv_encoder->crtc),
2135                                                   nv_encoder->conn, NULL, NULL);
2136                                 outp->enabled = outp->disabled = false;
2137                         } else {
2138                                 if (outp->disabled) {
2139                                         nvif_outp_release(&nv_encoder->outp);
2140                                         outp->disabled = false;
2141                                 }
2142                         }
2143                 }
2144         }
2145 }
2146
2147 static void
2148 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock)
2149 {
2150         struct drm_plane_state *new_plane_state;
2151         struct drm_plane *plane;
2152         int i;
2153
2154         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2155                 struct nv50_wndw *wndw = nv50_wndw(plane);
2156                 if (interlock[wndw->interlock.type] & wndw->interlock.data) {
2157                         if (wndw->func->update)
2158                                 wndw->func->update(wndw, interlock);
2159                 }
2160         }
2161 }
2162
2163 static void
2164 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
2165 {
2166         struct drm_device *dev = state->dev;
2167         struct drm_crtc_state *new_crtc_state, *old_crtc_state;
2168         struct drm_crtc *crtc;
2169         struct drm_plane_state *new_plane_state;
2170         struct drm_plane *plane;
2171         struct nouveau_drm *drm = nouveau_drm(dev);
2172         struct nv50_disp *disp = nv50_disp(dev);
2173         struct nv50_atom *atom = nv50_atom(state);
2174         struct nv50_core *core = disp->core;
2175         struct nv50_outp_atom *outp, *outt;
2176         u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
2177         int i;
2178         bool flushed = false;
2179
2180         NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
2181         nv50_crc_atomic_stop_reporting(state);
2182         drm_atomic_helper_wait_for_fences(dev, state, false);
2183         drm_atomic_helper_wait_for_dependencies(state);
2184         drm_dp_mst_atomic_wait_for_dependencies(state);
2185         drm_atomic_helper_update_legacy_modeset_state(dev, state);
2186         drm_atomic_helper_calc_timestamping_constants(state);
2187
2188         if (atom->lock_core)
2189                 mutex_lock(&disp->mutex);
2190
2191         /* Disable head(s). */
2192         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2193                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2194                 struct nv50_head *head = nv50_head(crtc);
2195
2196                 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
2197                           asyh->clr.mask, asyh->set.mask);
2198
2199                 if (old_crtc_state->active && !new_crtc_state->active) {
2200                         pm_runtime_put_noidle(dev->dev);
2201                         drm_crtc_vblank_off(crtc);
2202                 }
2203
2204                 if (asyh->clr.mask) {
2205                         nv50_head_flush_clr(head, asyh, atom->flush_disable);
2206                         interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2207                 }
2208         }
2209
2210         /* Disable plane(s). */
2211         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2212                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2213                 struct nv50_wndw *wndw = nv50_wndw(plane);
2214
2215                 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
2216                           asyw->clr.mask, asyw->set.mask);
2217                 if (!asyw->clr.mask)
2218                         continue;
2219
2220                 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
2221         }
2222
2223         /* Disable output path(s). */
2224         list_for_each_entry(outp, &atom->outp, head) {
2225                 const struct drm_encoder_helper_funcs *help;
2226                 struct drm_encoder *encoder;
2227
2228                 encoder = outp->encoder;
2229                 help = encoder->helper_private;
2230
2231                 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
2232                           outp->clr.mask, outp->set.mask);
2233
2234                 if (outp->clr.mask) {
2235                         help->atomic_disable(encoder, state);
2236                         outp->disabled = true;
2237                         interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2238                 }
2239         }
2240
2241         /* Flush disable. */
2242         if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2243                 if (atom->flush_disable) {
2244                         nv50_disp_atomic_commit_wndw(state, interlock);
2245                         nv50_disp_atomic_commit_core(state, interlock);
2246                         memset(interlock, 0x00, sizeof(interlock));
2247
2248                         flushed = true;
2249                 }
2250         }
2251
2252         if (flushed)
2253                 nv50_crc_atomic_release_notifier_contexts(state);
2254         nv50_crc_atomic_init_notifier_contexts(state);
2255
2256         /* Update output path(s). */
2257         list_for_each_entry(outp, &atom->outp, head) {
2258                 const struct drm_encoder_helper_funcs *help;
2259                 struct drm_encoder *encoder;
2260
2261                 encoder = outp->encoder;
2262                 help = encoder->helper_private;
2263
2264                 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
2265                           outp->set.mask, outp->clr.mask);
2266
2267                 if (outp->set.mask) {
2268                         help->atomic_enable(encoder, state);
2269                         outp->enabled = true;
2270                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2271                 }
2272         }
2273
2274         /* Update head(s). */
2275         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2276                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2277                 struct nv50_head *head = nv50_head(crtc);
2278
2279                 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2280                           asyh->set.mask, asyh->clr.mask);
2281
2282                 if (asyh->set.mask) {
2283                         nv50_head_flush_set(head, asyh);
2284                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2285                 }
2286
2287                 if (new_crtc_state->active) {
2288                         if (!old_crtc_state->active) {
2289                                 drm_crtc_vblank_on(crtc);
2290                                 pm_runtime_get_noresume(dev->dev);
2291                         }
2292                         if (new_crtc_state->event)
2293                                 drm_crtc_vblank_get(crtc);
2294                 }
2295         }
2296
2297         /* Update window->head assignment.
2298          *
2299          * This has to happen in an update that's not interlocked with
2300          * any window channels to avoid hitting HW error checks.
2301          *
2302          *TODO: Proper handling of window ownership (Turing apparently
2303          *      supports non-fixed mappings).
2304          */
2305         if (core->assign_windows) {
2306                 core->func->wndw.owner(core);
2307                 nv50_disp_atomic_commit_core(state, interlock);
2308                 core->assign_windows = false;
2309                 interlock[NV50_DISP_INTERLOCK_CORE] = 0;
2310         }
2311
2312         /* Finish updating head(s)...
2313          *
2314          * NVD is rather picky about both where window assignments can change,
2315          * *and* about certain core and window channel states matching.
2316          *
2317          * The EFI GOP driver on newer GPUs configures window channels with a
2318          * different output format to what we do, and the core channel update
2319          * in the assign_windows case above would result in a state mismatch.
2320          *
2321          * Delay some of the head update until after that point to workaround
2322          * the issue.  This only affects the initial modeset.
2323          *
2324          * TODO: handle this better when adding flexible window mapping
2325          */
2326         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2327                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2328                 struct nv50_head *head = nv50_head(crtc);
2329
2330                 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2331                           asyh->set.mask, asyh->clr.mask);
2332
2333                 if (asyh->set.mask) {
2334                         nv50_head_flush_set_wndw(head, asyh);
2335                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2336                 }
2337         }
2338
2339         /* Update plane(s). */
2340         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2341                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2342                 struct nv50_wndw *wndw = nv50_wndw(plane);
2343
2344                 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
2345                           asyw->set.mask, asyw->clr.mask);
2346                 if ( !asyw->set.mask &&
2347                     (!asyw->clr.mask || atom->flush_disable))
2348                         continue;
2349
2350                 nv50_wndw_flush_set(wndw, interlock, asyw);
2351         }
2352
2353         /* Flush update. */
2354         nv50_disp_atomic_commit_wndw(state, interlock);
2355
2356         if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2357                 if (interlock[NV50_DISP_INTERLOCK_BASE] ||
2358                     interlock[NV50_DISP_INTERLOCK_OVLY] ||
2359                     interlock[NV50_DISP_INTERLOCK_WNDW] ||
2360                     !atom->state.legacy_cursor_update)
2361                         nv50_disp_atomic_commit_core(state, interlock);
2362                 else
2363                         disp->core->func->update(disp->core, interlock, false);
2364         }
2365
2366         if (atom->lock_core)
2367                 mutex_unlock(&disp->mutex);
2368
2369         list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2370                 list_del(&outp->head);
2371                 kfree(outp);
2372         }
2373
2374         /* Wait for HW to signal completion. */
2375         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2376                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2377                 struct nv50_wndw *wndw = nv50_wndw(plane);
2378                 int ret = nv50_wndw_wait_armed(wndw, asyw);
2379                 if (ret)
2380                         NV_ERROR(drm, "%s: timeout\n", plane->name);
2381         }
2382
2383         for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2384                 if (new_crtc_state->event) {
2385                         unsigned long flags;
2386                         /* Get correct count/ts if racing with vblank irq */
2387                         if (new_crtc_state->active)
2388                                 drm_crtc_accurate_vblank_count(crtc);
2389                         spin_lock_irqsave(&crtc->dev->event_lock, flags);
2390                         drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
2391                         spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
2392
2393                         new_crtc_state->event = NULL;
2394                         if (new_crtc_state->active)
2395                                 drm_crtc_vblank_put(crtc);
2396                 }
2397         }
2398
2399         nv50_crc_atomic_start_reporting(state);
2400         if (!flushed)
2401                 nv50_crc_atomic_release_notifier_contexts(state);
2402
2403         drm_atomic_helper_commit_hw_done(state);
2404         drm_atomic_helper_cleanup_planes(dev, state);
2405         drm_atomic_helper_commit_cleanup_done(state);
2406         drm_atomic_state_put(state);
2407
2408         /* Drop the RPM ref we got from nv50_disp_atomic_commit() */
2409         pm_runtime_mark_last_busy(dev->dev);
2410         pm_runtime_put_autosuspend(dev->dev);
2411 }
2412
2413 static void
2414 nv50_disp_atomic_commit_work(struct work_struct *work)
2415 {
2416         struct drm_atomic_state *state =
2417                 container_of(work, typeof(*state), commit_work);
2418         nv50_disp_atomic_commit_tail(state);
2419 }
2420
2421 static int
2422 nv50_disp_atomic_commit(struct drm_device *dev,
2423                         struct drm_atomic_state *state, bool nonblock)
2424 {
2425         struct drm_plane_state *new_plane_state;
2426         struct drm_plane *plane;
2427         int ret, i;
2428
2429         ret = pm_runtime_get_sync(dev->dev);
2430         if (ret < 0 && ret != -EACCES) {
2431                 pm_runtime_put_autosuspend(dev->dev);
2432                 return ret;
2433         }
2434
2435         ret = drm_atomic_helper_setup_commit(state, nonblock);
2436         if (ret)
2437                 goto done;
2438
2439         INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
2440
2441         ret = drm_atomic_helper_prepare_planes(dev, state);
2442         if (ret)
2443                 goto done;
2444
2445         if (!nonblock) {
2446                 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
2447                 if (ret)
2448                         goto err_cleanup;
2449         }
2450
2451         ret = drm_atomic_helper_swap_state(state, true);
2452         if (ret)
2453                 goto err_cleanup;
2454
2455         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2456                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2457                 struct nv50_wndw *wndw = nv50_wndw(plane);
2458
2459                 if (asyw->set.image)
2460                         nv50_wndw_ntfy_enable(wndw, asyw);
2461         }
2462
2463         drm_atomic_state_get(state);
2464
2465         /*
2466          * Grab another RPM ref for the commit tail, which will release the
2467          * ref when it's finished
2468          */
2469         pm_runtime_get_noresume(dev->dev);
2470
2471         if (nonblock)
2472                 queue_work(system_unbound_wq, &state->commit_work);
2473         else
2474                 nv50_disp_atomic_commit_tail(state);
2475
2476 err_cleanup:
2477         if (ret)
2478                 drm_atomic_helper_cleanup_planes(dev, state);
2479 done:
2480         pm_runtime_put_autosuspend(dev->dev);
2481         return ret;
2482 }
2483
2484 static struct nv50_outp_atom *
2485 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
2486 {
2487         struct nv50_outp_atom *outp;
2488
2489         list_for_each_entry(outp, &atom->outp, head) {
2490                 if (outp->encoder == encoder)
2491                         return outp;
2492         }
2493
2494         outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2495         if (!outp)
2496                 return ERR_PTR(-ENOMEM);
2497
2498         list_add(&outp->head, &atom->outp);
2499         outp->encoder = encoder;
2500         return outp;
2501 }
2502
2503 static int
2504 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
2505                                 struct drm_connector_state *old_connector_state)
2506 {
2507         struct drm_encoder *encoder = old_connector_state->best_encoder;
2508         struct drm_crtc_state *old_crtc_state, *new_crtc_state;
2509         struct drm_crtc *crtc;
2510         struct nv50_outp_atom *outp;
2511
2512         if (!(crtc = old_connector_state->crtc))
2513                 return 0;
2514
2515         old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
2516         new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2517         if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2518                 outp = nv50_disp_outp_atomic_add(atom, encoder);
2519                 if (IS_ERR(outp))
2520                         return PTR_ERR(outp);
2521
2522                 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST ||
2523                     nouveau_encoder(outp->encoder)->dcb->type == DCB_OUTPUT_DP)
2524                         atom->flush_disable = true;
2525                 outp->clr.ctrl = true;
2526                 atom->lock_core = true;
2527         }
2528
2529         return 0;
2530 }
2531
2532 static int
2533 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
2534                                 struct drm_connector_state *connector_state)
2535 {
2536         struct drm_encoder *encoder = connector_state->best_encoder;
2537         struct drm_crtc_state *new_crtc_state;
2538         struct drm_crtc *crtc;
2539         struct nv50_outp_atom *outp;
2540
2541         if (!(crtc = connector_state->crtc))
2542                 return 0;
2543
2544         new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2545         if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2546                 outp = nv50_disp_outp_atomic_add(atom, encoder);
2547                 if (IS_ERR(outp))
2548                         return PTR_ERR(outp);
2549
2550                 outp->set.ctrl = true;
2551                 atom->lock_core = true;
2552         }
2553
2554         return 0;
2555 }
2556
2557 static int
2558 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
2559 {
2560         struct nv50_atom *atom = nv50_atom(state);
2561         struct nv50_core *core = nv50_disp(dev)->core;
2562         struct drm_connector_state *old_connector_state, *new_connector_state;
2563         struct drm_connector *connector;
2564         struct drm_crtc_state *new_crtc_state;
2565         struct drm_crtc *crtc;
2566         struct nv50_head *head;
2567         struct nv50_head_atom *asyh;
2568         int ret, i;
2569
2570         if (core->assign_windows && core->func->head->static_wndw_map) {
2571                 drm_for_each_crtc(crtc, dev) {
2572                         new_crtc_state = drm_atomic_get_crtc_state(state,
2573                                                                    crtc);
2574                         if (IS_ERR(new_crtc_state))
2575                                 return PTR_ERR(new_crtc_state);
2576
2577                         head = nv50_head(crtc);
2578                         asyh = nv50_head_atom(new_crtc_state);
2579                         core->func->head->static_wndw_map(head, asyh);
2580                 }
2581         }
2582
2583         /* We need to handle colour management on a per-plane basis. */
2584         for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2585                 if (new_crtc_state->color_mgmt_changed) {
2586                         ret = drm_atomic_add_affected_planes(state, crtc);
2587                         if (ret)
2588                                 return ret;
2589                 }
2590         }
2591
2592         ret = drm_atomic_helper_check(dev, state);
2593         if (ret)
2594                 return ret;
2595
2596         for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
2597                 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
2598                 if (ret)
2599                         return ret;
2600
2601                 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2602                 if (ret)
2603                         return ret;
2604         }
2605
2606         ret = drm_dp_mst_atomic_check(state);
2607         if (ret)
2608                 return ret;
2609
2610         nv50_crc_atomic_check_outp(atom);
2611
2612         return 0;
2613 }
2614
2615 static void
2616 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2617 {
2618         struct nv50_atom *atom = nv50_atom(state);
2619         struct nv50_outp_atom *outp, *outt;
2620
2621         list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2622                 list_del(&outp->head);
2623                 kfree(outp);
2624         }
2625
2626         drm_atomic_state_default_clear(state);
2627 }
2628
2629 static void
2630 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2631 {
2632         struct nv50_atom *atom = nv50_atom(state);
2633         drm_atomic_state_default_release(&atom->state);
2634         kfree(atom);
2635 }
2636
2637 static struct drm_atomic_state *
2638 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2639 {
2640         struct nv50_atom *atom;
2641         if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2642             drm_atomic_state_init(dev, &atom->state) < 0) {
2643                 kfree(atom);
2644                 return NULL;
2645         }
2646         INIT_LIST_HEAD(&atom->outp);
2647         return &atom->state;
2648 }
2649
2650 static const struct drm_mode_config_funcs
2651 nv50_disp_func = {
2652         .fb_create = nouveau_user_framebuffer_create,
2653         .output_poll_changed = drm_fb_helper_output_poll_changed,
2654         .atomic_check = nv50_disp_atomic_check,
2655         .atomic_commit = nv50_disp_atomic_commit,
2656         .atomic_state_alloc = nv50_disp_atomic_state_alloc,
2657         .atomic_state_clear = nv50_disp_atomic_state_clear,
2658         .atomic_state_free = nv50_disp_atomic_state_free,
2659 };
2660
2661 static const struct drm_mode_config_helper_funcs
2662 nv50_disp_helper_func = {
2663         .atomic_commit_setup = drm_dp_mst_atomic_setup_commit,
2664 };
2665
2666 /******************************************************************************
2667  * Init
2668  *****************************************************************************/
2669
2670 static void
2671 nv50_display_fini(struct drm_device *dev, bool runtime, bool suspend)
2672 {
2673         struct nouveau_drm *drm = nouveau_drm(dev);
2674         struct drm_encoder *encoder;
2675
2676         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2677                 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
2678                         nv50_mstm_fini(nouveau_encoder(encoder));
2679         }
2680
2681         if (!runtime)
2682                 cancel_work_sync(&drm->hpd_work);
2683 }
2684
2685 static inline void
2686 nv50_display_read_hw_or_state(struct drm_device *dev, struct nv50_disp *disp,
2687                               struct nouveau_encoder *outp)
2688 {
2689         struct drm_crtc *crtc;
2690         struct drm_connector_list_iter conn_iter;
2691         struct drm_connector *conn;
2692         struct nv50_head_atom *armh;
2693         const u32 encoder_mask = drm_encoder_mask(&outp->base.base);
2694         bool found_conn = false, found_head = false;
2695         u8 proto;
2696         int head_idx;
2697         int ret;
2698
2699         switch (outp->dcb->type) {
2700         case DCB_OUTPUT_TMDS:
2701                 ret = nvif_outp_inherit_tmds(&outp->outp, &proto);
2702                 break;
2703         case DCB_OUTPUT_DP:
2704                 ret = nvif_outp_inherit_dp(&outp->outp, &proto);
2705                 break;
2706         case DCB_OUTPUT_LVDS:
2707                 ret = nvif_outp_inherit_lvds(&outp->outp, &proto);
2708                 break;
2709         case DCB_OUTPUT_ANALOG:
2710                 ret = nvif_outp_inherit_rgb_crt(&outp->outp, &proto);
2711                 break;
2712         default:
2713                 drm_dbg_kms(dev, "Readback for %s not implemented yet, skipping\n",
2714                             outp->base.base.name);
2715                 drm_WARN_ON(dev, true);
2716                 return;
2717         }
2718
2719         if (ret < 0)
2720                 return;
2721
2722         head_idx = ret;
2723
2724         drm_for_each_crtc(crtc, dev) {
2725                 if (crtc->index != head_idx)
2726                         continue;
2727
2728                 armh = nv50_head_atom(crtc->state);
2729                 found_head = true;
2730                 break;
2731         }
2732         if (drm_WARN_ON(dev, !found_head))
2733                 return;
2734
2735         /* Figure out which connector is being used by this encoder */
2736         drm_connector_list_iter_begin(dev, &conn_iter);
2737         nouveau_for_each_non_mst_connector_iter(conn, &conn_iter) {
2738                 if (nouveau_connector(conn)->index == outp->dcb->connector) {
2739                         found_conn = true;
2740                         break;
2741                 }
2742         }
2743         drm_connector_list_iter_end(&conn_iter);
2744         if (drm_WARN_ON(dev, !found_conn))
2745                 return;
2746
2747         armh->state.encoder_mask = encoder_mask;
2748         armh->state.connector_mask = drm_connector_mask(conn);
2749         armh->state.active = true;
2750         armh->state.enable = true;
2751         pm_runtime_get_noresume(dev->dev);
2752
2753         outp->crtc = crtc;
2754         outp->ctrl = NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto) | BIT(crtc->index);
2755
2756         drm_connector_get(conn);
2757         conn->state->crtc = crtc;
2758         conn->state->best_encoder = &outp->base.base;
2759 }
2760
2761 /* Read back the currently programmed display state */
2762 static void
2763 nv50_display_read_hw_state(struct nouveau_drm *drm)
2764 {
2765         struct drm_device *dev = drm->dev;
2766         struct drm_encoder *encoder;
2767         struct drm_modeset_acquire_ctx ctx;
2768         struct nv50_disp *disp = nv50_disp(dev);
2769         int ret;
2770
2771         DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, ret);
2772
2773         drm_for_each_encoder(encoder, dev) {
2774                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST)
2775                         continue;
2776
2777                 nv50_display_read_hw_or_state(dev, disp, nouveau_encoder(encoder));
2778         }
2779
2780         DRM_MODESET_LOCK_ALL_END(dev, ctx, ret);
2781 }
2782
2783 static int
2784 nv50_display_init(struct drm_device *dev, bool resume, bool runtime)
2785 {
2786         struct nv50_core *core = nv50_disp(dev)->core;
2787         struct drm_encoder *encoder;
2788
2789         if (resume || runtime)
2790                 core->func->init(core);
2791
2792         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2793                 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2794                         struct nouveau_encoder *nv_encoder =
2795                                 nouveau_encoder(encoder);
2796                         nv50_mstm_init(nv_encoder, runtime);
2797                 }
2798         }
2799
2800         if (!resume)
2801                 nv50_display_read_hw_state(nouveau_drm(dev));
2802
2803         return 0;
2804 }
2805
2806 static void
2807 nv50_display_destroy(struct drm_device *dev)
2808 {
2809         struct nv50_disp *disp = nv50_disp(dev);
2810
2811         nv50_audio_component_fini(nouveau_drm(dev));
2812
2813         nvif_object_unmap(&disp->caps);
2814         nvif_object_dtor(&disp->caps);
2815         nv50_core_del(&disp->core);
2816
2817         nouveau_bo_unmap(disp->sync);
2818         if (disp->sync)
2819                 nouveau_bo_unpin(disp->sync);
2820         nouveau_bo_ref(NULL, &disp->sync);
2821
2822         nouveau_display(dev)->priv = NULL;
2823         kfree(disp);
2824 }
2825
2826 int
2827 nv50_display_create(struct drm_device *dev)
2828 {
2829         struct nouveau_drm *drm = nouveau_drm(dev);
2830         struct drm_connector *connector, *tmp;
2831         struct nv50_disp *disp;
2832         int ret, i;
2833         bool has_mst = false;
2834
2835         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2836         if (!disp)
2837                 return -ENOMEM;
2838
2839         mutex_init(&disp->mutex);
2840
2841         nouveau_display(dev)->priv = disp;
2842         nouveau_display(dev)->dtor = nv50_display_destroy;
2843         nouveau_display(dev)->init = nv50_display_init;
2844         nouveau_display(dev)->fini = nv50_display_fini;
2845         disp->disp = &nouveau_display(dev)->disp;
2846         dev->mode_config.funcs = &nv50_disp_func;
2847         dev->mode_config.helper_private = &nv50_disp_helper_func;
2848         dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true;
2849         dev->mode_config.normalize_zpos = true;
2850
2851         /* small shared memory area we use for notifiers and semaphores */
2852         ret = nouveau_bo_new(&drm->client, 4096, 0x1000,
2853                              NOUVEAU_GEM_DOMAIN_VRAM,
2854                              0, 0x0000, NULL, NULL, &disp->sync);
2855         if (!ret) {
2856                 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true);
2857                 if (!ret) {
2858                         ret = nouveau_bo_map(disp->sync);
2859                         if (ret)
2860                                 nouveau_bo_unpin(disp->sync);
2861                 }
2862                 if (ret)
2863                         nouveau_bo_ref(NULL, &disp->sync);
2864         }
2865
2866         if (ret)
2867                 goto out;
2868
2869         /* allocate master evo channel */
2870         ret = nv50_core_new(drm, &disp->core);
2871         if (ret)
2872                 goto out;
2873
2874         disp->core->func->init(disp->core);
2875         if (disp->core->func->caps_init) {
2876                 ret = disp->core->func->caps_init(drm, disp);
2877                 if (ret)
2878                         goto out;
2879         }
2880
2881         /* Assign the correct format modifiers */
2882         if (disp->disp->object.oclass >= TU102_DISP)
2883                 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers;
2884         else
2885         if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI)
2886                 nouveau_display(dev)->format_modifiers = disp90xx_modifiers;
2887         else
2888                 nouveau_display(dev)->format_modifiers = disp50xx_modifiers;
2889
2890         /* FIXME: 256x256 cursors are supported on Kepler, however unlike Maxwell and later
2891          * generations Kepler requires that we use small pages (4K) for cursor scanout surfaces. The
2892          * proper fix for this is to teach nouveau to migrate fbs being used for the cursor plane to
2893          * small page allocations in prepare_fb(). When this is implemented, we should also force
2894          * large pages (128K) for ovly fbs in order to fix Kepler ovlys.
2895          * But until then, just limit cursors to 128x128 - which is small enough to avoid ever using
2896          * large pages.
2897          */
2898         if (disp->disp->object.oclass >= GM107_DISP) {
2899                 dev->mode_config.cursor_width = 256;
2900                 dev->mode_config.cursor_height = 256;
2901         } else if (disp->disp->object.oclass >= GK104_DISP) {
2902                 dev->mode_config.cursor_width = 128;
2903                 dev->mode_config.cursor_height = 128;
2904         } else {
2905                 dev->mode_config.cursor_width = 64;
2906                 dev->mode_config.cursor_height = 64;
2907         }
2908
2909         /* create encoder/connector objects based on VBIOS DCB table */
2910         for_each_set_bit(i, &disp->disp->outp_mask, sizeof(disp->disp->outp_mask) * 8) {
2911                 struct nouveau_encoder *outp;
2912
2913                 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2914                 if (!outp)
2915                         break;
2916
2917                 ret = nvif_outp_ctor(disp->disp, "kmsOutp", i, &outp->outp);
2918                 if (ret) {
2919                         kfree(outp);
2920                         continue;
2921                 }
2922
2923                 connector = nouveau_connector_create(dev, outp->outp.info.conn);
2924                 if (IS_ERR(connector)) {
2925                         nvif_outp_dtor(&outp->outp);
2926                         kfree(outp);
2927                         continue;
2928                 }
2929
2930                 outp->base.base.possible_crtcs = outp->outp.info.heads;
2931                 outp->base.base.possible_clones = 0;
2932                 outp->conn = nouveau_connector(connector);
2933
2934                 outp->dcb = kzalloc(sizeof(*outp->dcb), GFP_KERNEL);
2935                 if (!outp->dcb)
2936                         break;
2937
2938                 switch (outp->outp.info.proto) {
2939                 case NVIF_OUTP_RGB_CRT:
2940                         outp->dcb->type = DCB_OUTPUT_ANALOG;
2941                         outp->dcb->crtconf.maxfreq = outp->outp.info.rgb_crt.freq_max;
2942                         break;
2943                 case NVIF_OUTP_TMDS:
2944                         outp->dcb->type = DCB_OUTPUT_TMDS;
2945                         outp->dcb->duallink_possible = outp->outp.info.tmds.dual;
2946                         break;
2947                 case NVIF_OUTP_LVDS:
2948                         outp->dcb->type = DCB_OUTPUT_LVDS;
2949                         outp->dcb->lvdsconf.use_acpi_for_edid = outp->outp.info.lvds.acpi_edid;
2950                         break;
2951                 case NVIF_OUTP_DP:
2952                         outp->dcb->type = DCB_OUTPUT_DP;
2953                         outp->dcb->dpconf.link_nr = outp->outp.info.dp.link_nr;
2954                         outp->dcb->dpconf.link_bw = outp->outp.info.dp.link_bw;
2955                         if (outp->outp.info.dp.mst)
2956                                 has_mst = true;
2957                         break;
2958                 default:
2959                         WARN_ON(1);
2960                         continue;
2961                 }
2962
2963                 outp->dcb->heads = outp->outp.info.heads;
2964                 outp->dcb->connector = outp->outp.info.conn;
2965                 outp->dcb->i2c_index = outp->outp.info.ddc;
2966
2967                 switch (outp->outp.info.type) {
2968                 case NVIF_OUTP_DAC : ret = nv50_dac_create(outp); break;
2969                 case NVIF_OUTP_SOR : ret = nv50_sor_create(outp); break;
2970                 case NVIF_OUTP_PIOR: ret = nv50_pior_create(outp); break;
2971                 default:
2972                         WARN_ON(1);
2973                         continue;
2974                 }
2975
2976                 if (ret) {
2977                         NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2978                                 i, outp->outp.info.type, outp->outp.info.proto, ret);
2979                 }
2980         }
2981
2982         /* cull any connectors we created that don't have an encoder */
2983         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2984                 if (connector->possible_encoders)
2985                         continue;
2986
2987                 NV_WARN(drm, "%s has no encoders, removing\n",
2988                         connector->name);
2989                 connector->funcs->destroy(connector);
2990         }
2991
2992         /* create crtc objects to represent the hw heads */
2993         for_each_set_bit(i, &disp->disp->head_mask, sizeof(disp->disp->head_mask) * 8) {
2994                 struct nv50_head *head;
2995
2996                 head = nv50_head_create(dev, i);
2997                 if (IS_ERR(head)) {
2998                         ret = PTR_ERR(head);
2999                         goto out;
3000                 }
3001
3002                 if (has_mst) {
3003                         head->msto = nv50_msto_new(dev, head, i);
3004                         if (IS_ERR(head->msto)) {
3005                                 ret = PTR_ERR(head->msto);
3006                                 head->msto = NULL;
3007                                 goto out;
3008                         }
3009
3010                         /*
3011                          * FIXME: This is a hack to workaround the following
3012                          * issues:
3013                          *
3014                          * https://gitlab.gnome.org/GNOME/mutter/issues/759
3015                          * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277
3016                          *
3017                          * Once these issues are closed, this should be
3018                          * removed
3019                          */
3020                         head->msto->encoder.possible_crtcs = disp->disp->head_mask;
3021                 }
3022         }
3023
3024         /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */
3025         dev->vblank_disable_immediate = true;
3026
3027         nv50_audio_component_init(drm);
3028
3029 out:
3030         if (ret)
3031                 nv50_display_destroy(dev);
3032         return ret;
3033 }
3034
3035 /******************************************************************************
3036  * Format modifiers
3037  *****************************************************************************/
3038
3039 /****************************************************************
3040  *            Log2(block height) ----------------------------+  *
3041  *            Page Kind ----------------------------------+  |  *
3042  *            Gob Height/Page Kind Generation ------+     |  |  *
3043  *                          Sector layout -------+  |     |  |  *
3044  *                          Compression ------+  |  |     |  |  */
3045 const u64 disp50xx_modifiers[] = { /*         |  |  |     |  |  */
3046         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 0),
3047         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 1),
3048         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 2),
3049         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 3),
3050         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 4),
3051         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 5),
3052         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 0),
3053         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 1),
3054         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 2),
3055         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 3),
3056         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 4),
3057         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 5),
3058         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 0),
3059         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 1),
3060         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 2),
3061         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 3),
3062         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 4),
3063         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 5),
3064         DRM_FORMAT_MOD_LINEAR,
3065         DRM_FORMAT_MOD_INVALID
3066 };
3067
3068 /****************************************************************
3069  *            Log2(block height) ----------------------------+  *
3070  *            Page Kind ----------------------------------+  |  *
3071  *            Gob Height/Page Kind Generation ------+     |  |  *
3072  *                          Sector layout -------+  |     |  |  *
3073  *                          Compression ------+  |  |     |  |  */
3074 const u64 disp90xx_modifiers[] = { /*         |  |  |     |  |  */
3075         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 0),
3076         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 1),
3077         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 2),
3078         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 3),
3079         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 4),
3080         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 5),
3081         DRM_FORMAT_MOD_LINEAR,
3082         DRM_FORMAT_MOD_INVALID
3083 };