Merge drm/drm-next into drm-misc-next
[sfrench/cifs-2.6.git] / drivers / gpu / drm / nouveau / dispnv50 / disp.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 #include "disp.h"
25 #include "atom.h"
26 #include "core.h"
27 #include "head.h"
28 #include "wndw.h"
29 #include "handles.h"
30
31 #include <linux/dma-mapping.h>
32 #include <linux/hdmi.h>
33 #include <linux/component.h>
34 #include <linux/iopoll.h>
35
36 #include <drm/display/drm_dp_helper.h>
37 #include <drm/display/drm_scdc_helper.h>
38 #include <drm/drm_atomic.h>
39 #include <drm/drm_atomic_helper.h>
40 #include <drm/drm_edid.h>
41 #include <drm/drm_fb_helper.h>
42 #include <drm/drm_probe_helper.h>
43 #include <drm/drm_vblank.h>
44
45 #include <nvif/push507c.h>
46
47 #include <nvif/class.h>
48 #include <nvif/cl0002.h>
49 #include <nvif/event.h>
50 #include <nvif/if0012.h>
51 #include <nvif/if0014.h>
52 #include <nvif/timer.h>
53
54 #include <nvhw/class/cl507c.h>
55 #include <nvhw/class/cl507d.h>
56 #include <nvhw/class/cl837d.h>
57 #include <nvhw/class/cl887d.h>
58 #include <nvhw/class/cl907d.h>
59 #include <nvhw/class/cl917d.h>
60
61 #include "nouveau_drv.h"
62 #include "nouveau_dma.h"
63 #include "nouveau_gem.h"
64 #include "nouveau_connector.h"
65 #include "nouveau_encoder.h"
66 #include "nouveau_fence.h"
67 #include "nv50_display.h"
68
69 /******************************************************************************
70  * EVO channel
71  *****************************************************************************/
72
73 static int
74 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
75                  const s32 *oclass, u8 head, void *data, u32 size,
76                  struct nv50_chan *chan)
77 {
78         struct nvif_sclass *sclass;
79         int ret, i, n;
80
81         chan->device = device;
82
83         ret = n = nvif_object_sclass_get(disp, &sclass);
84         if (ret < 0)
85                 return ret;
86
87         while (oclass[0]) {
88                 for (i = 0; i < n; i++) {
89                         if (sclass[i].oclass == oclass[0]) {
90                                 ret = nvif_object_ctor(disp, "kmsChan", 0,
91                                                        oclass[0], data, size,
92                                                        &chan->user);
93                                 if (ret == 0)
94                                         nvif_object_map(&chan->user, NULL, 0);
95                                 nvif_object_sclass_put(&sclass);
96                                 return ret;
97                         }
98                 }
99                 oclass++;
100         }
101
102         nvif_object_sclass_put(&sclass);
103         return -ENOSYS;
104 }
105
106 static void
107 nv50_chan_destroy(struct nv50_chan *chan)
108 {
109         nvif_object_dtor(&chan->user);
110 }
111
112 /******************************************************************************
113  * DMA EVO channel
114  *****************************************************************************/
115
116 void
117 nv50_dmac_destroy(struct nv50_dmac *dmac)
118 {
119         nvif_object_dtor(&dmac->vram);
120         nvif_object_dtor(&dmac->sync);
121
122         nv50_chan_destroy(&dmac->base);
123
124         nvif_mem_dtor(&dmac->_push.mem);
125 }
126
127 static void
128 nv50_dmac_kick(struct nvif_push *push)
129 {
130         struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
131
132         dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr;
133         if (dmac->put != dmac->cur) {
134                 /* Push buffer fetches are not coherent with BAR1, we need to ensure
135                  * writes have been flushed right through to VRAM before writing PUT.
136                  */
137                 if (dmac->push->mem.type & NVIF_MEM_VRAM) {
138                         struct nvif_device *device = dmac->base.device;
139                         nvif_wr32(&device->object, 0x070000, 0x00000001);
140                         nvif_msec(device, 2000,
141                                 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002))
142                                         break;
143                         );
144                 }
145
146                 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur);
147                 dmac->put = dmac->cur;
148         }
149
150         push->bgn = push->cur;
151 }
152
153 static int
154 nv50_dmac_free(struct nv50_dmac *dmac)
155 {
156         u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
157         if (get > dmac->cur) /* NVIDIA stay 5 away from GET, do the same. */
158                 return get - dmac->cur - 5;
159         return dmac->max - dmac->cur;
160 }
161
162 static int
163 nv50_dmac_wind(struct nv50_dmac *dmac)
164 {
165         /* Wait for GET to depart from the beginning of the push buffer to
166          * prevent writing PUT == GET, which would be ignored by HW.
167          */
168         u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR);
169         if (get == 0) {
170                 /* Corner-case, HW idle, but non-committed work pending. */
171                 if (dmac->put == 0)
172                         nv50_dmac_kick(dmac->push);
173
174                 if (nvif_msec(dmac->base.device, 2000,
175                         if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0))
176                                 break;
177                 ) < 0)
178                         return -ETIMEDOUT;
179         }
180
181         PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0));
182         dmac->cur = 0;
183         return 0;
184 }
185
186 static int
187 nv50_dmac_wait(struct nvif_push *push, u32 size)
188 {
189         struct nv50_dmac *dmac = container_of(push, typeof(*dmac), _push);
190         int free;
191
192         if (WARN_ON(size > dmac->max))
193                 return -EINVAL;
194
195         dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr;
196         if (dmac->cur + size >= dmac->max) {
197                 int ret = nv50_dmac_wind(dmac);
198                 if (ret)
199                         return ret;
200
201                 push->cur = dmac->_push.mem.object.map.ptr;
202                 push->cur = push->cur + dmac->cur;
203                 nv50_dmac_kick(push);
204         }
205
206         if (nvif_msec(dmac->base.device, 2000,
207                 if ((free = nv50_dmac_free(dmac)) >= size)
208                         break;
209         ) < 0) {
210                 WARN_ON(1);
211                 return -ETIMEDOUT;
212         }
213
214         push->bgn = dmac->_push.mem.object.map.ptr;
215         push->bgn = push->bgn + dmac->cur;
216         push->cur = push->bgn;
217         push->end = push->cur + free;
218         return 0;
219 }
220
221 MODULE_PARM_DESC(kms_vram_pushbuf, "Place EVO/NVD push buffers in VRAM (default: auto)");
222 static int nv50_dmac_vram_pushbuf = -1;
223 module_param_named(kms_vram_pushbuf, nv50_dmac_vram_pushbuf, int, 0400);
224
225 int
226 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
227                  const s32 *oclass, u8 head, void *data, u32 size, s64 syncbuf,
228                  struct nv50_dmac *dmac)
229 {
230         struct nouveau_cli *cli = (void *)device->object.client;
231         struct nvif_disp_chan_v0 *args = data;
232         u8 type = NVIF_MEM_COHERENT;
233         int ret;
234
235         mutex_init(&dmac->lock);
236
237         /* Pascal added support for 47-bit physical addresses, but some
238          * parts of EVO still only accept 40-bit PAs.
239          *
240          * To avoid issues on systems with large amounts of RAM, and on
241          * systems where an IOMMU maps pages at a high address, we need
242          * to allocate push buffers in VRAM instead.
243          *
244          * This appears to match NVIDIA's behaviour on Pascal.
245          */
246         if ((nv50_dmac_vram_pushbuf > 0) ||
247             (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL))
248                 type |= NVIF_MEM_VRAM;
249
250         ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000,
251                                 &dmac->_push.mem);
252         if (ret)
253                 return ret;
254
255         dmac->ptr = dmac->_push.mem.object.map.ptr;
256         dmac->_push.wait = nv50_dmac_wait;
257         dmac->_push.kick = nv50_dmac_kick;
258         dmac->push = &dmac->_push;
259         dmac->push->bgn = dmac->_push.mem.object.map.ptr;
260         dmac->push->cur = dmac->push->bgn;
261         dmac->push->end = dmac->push->bgn;
262         dmac->max = 0x1000/4 - 1;
263
264         /* EVO channels are affected by a HW bug where the last 12 DWORDs
265          * of the push buffer aren't able to be used safely.
266          */
267         if (disp->oclass < GV100_DISP)
268                 dmac->max -= 12;
269
270         args->pushbuf = nvif_handle(&dmac->_push.mem.object);
271
272         ret = nv50_chan_create(device, disp, oclass, head, data, size,
273                                &dmac->base);
274         if (ret)
275                 return ret;
276
277         if (syncbuf < 0)
278                 return 0;
279
280         ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF,
281                                NV_DMA_IN_MEMORY,
282                                &(struct nv_dma_v0) {
283                                         .target = NV_DMA_V0_TARGET_VRAM,
284                                         .access = NV_DMA_V0_ACCESS_RDWR,
285                                         .start = syncbuf + 0x0000,
286                                         .limit = syncbuf + 0x0fff,
287                                }, sizeof(struct nv_dma_v0),
288                                &dmac->sync);
289         if (ret)
290                 return ret;
291
292         ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM,
293                                NV_DMA_IN_MEMORY,
294                                &(struct nv_dma_v0) {
295                                         .target = NV_DMA_V0_TARGET_VRAM,
296                                         .access = NV_DMA_V0_ACCESS_RDWR,
297                                         .start = 0,
298                                         .limit = device->info.ram_user - 1,
299                                }, sizeof(struct nv_dma_v0),
300                                &dmac->vram);
301         if (ret)
302                 return ret;
303
304         return ret;
305 }
306
307 /******************************************************************************
308  * Output path helpers
309  *****************************************************************************/
310 static void
311 nv50_outp_dump_caps(struct nouveau_drm *drm,
312                     struct nouveau_encoder *outp)
313 {
314         NV_DEBUG(drm, "%s caps: dp_interlace=%d\n",
315                  outp->base.base.name, outp->caps.dp_interlace);
316 }
317
318 static int
319 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
320                             struct drm_crtc_state *crtc_state,
321                             struct drm_connector_state *conn_state,
322                             struct drm_display_mode *native_mode)
323 {
324         struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
325         struct drm_display_mode *mode = &crtc_state->mode;
326         struct drm_connector *connector = conn_state->connector;
327         struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
328         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
329
330         NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
331         asyc->scaler.full = false;
332         if (!native_mode)
333                 return 0;
334
335         if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
336                 switch (connector->connector_type) {
337                 case DRM_MODE_CONNECTOR_LVDS:
338                 case DRM_MODE_CONNECTOR_eDP:
339                         /* Don't force scaler for EDID modes with
340                          * same size as the native one (e.g. different
341                          * refresh rate)
342                          */
343                         if (mode->hdisplay == native_mode->hdisplay &&
344                             mode->vdisplay == native_mode->vdisplay &&
345                             mode->type & DRM_MODE_TYPE_DRIVER)
346                                 break;
347                         mode = native_mode;
348                         asyc->scaler.full = true;
349                         break;
350                 default:
351                         break;
352                 }
353         } else {
354                 mode = native_mode;
355         }
356
357         if (!drm_mode_equal(adjusted_mode, mode)) {
358                 drm_mode_copy(adjusted_mode, mode);
359                 crtc_state->mode_changed = true;
360         }
361
362         return 0;
363 }
364
365 static void
366 nv50_outp_atomic_fix_depth(struct drm_encoder *encoder, struct drm_crtc_state *crtc_state)
367 {
368         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
369         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
370         struct drm_display_mode *mode = &asyh->state.adjusted_mode;
371         unsigned int max_rate, mode_rate;
372
373         switch (nv_encoder->dcb->type) {
374         case DCB_OUTPUT_DP:
375                 max_rate = nv_encoder->dp.link_nr * nv_encoder->dp.link_bw;
376
377                 /* we don't support more than 10 anyway */
378                 asyh->or.bpc = min_t(u8, asyh->or.bpc, 10);
379
380                 /* reduce the bpc until it works out */
381                 while (asyh->or.bpc > 6) {
382                         mode_rate = DIV_ROUND_UP(mode->clock * asyh->or.bpc * 3, 8);
383                         if (mode_rate <= max_rate)
384                                 break;
385
386                         asyh->or.bpc -= 2;
387                 }
388                 break;
389         default:
390                 break;
391         }
392 }
393
394 static int
395 nv50_outp_atomic_check(struct drm_encoder *encoder,
396                        struct drm_crtc_state *crtc_state,
397                        struct drm_connector_state *conn_state)
398 {
399         struct drm_connector *connector = conn_state->connector;
400         struct nouveau_connector *nv_connector = nouveau_connector(connector);
401         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
402         int ret;
403
404         ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
405                                           nv_connector->native_mode);
406         if (ret)
407                 return ret;
408
409         if (crtc_state->mode_changed || crtc_state->connectors_changed)
410                 asyh->or.bpc = connector->display_info.bpc;
411
412         /* We might have to reduce the bpc */
413         nv50_outp_atomic_fix_depth(encoder, crtc_state);
414
415         return 0;
416 }
417
418 struct nouveau_connector *
419 nv50_outp_get_new_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
420 {
421         struct drm_connector *connector;
422         struct drm_connector_state *connector_state;
423         struct drm_encoder *encoder = to_drm_encoder(outp);
424         int i;
425
426         for_each_new_connector_in_state(state, connector, connector_state, i) {
427                 if (connector_state->best_encoder == encoder)
428                         return nouveau_connector(connector);
429         }
430
431         return NULL;
432 }
433
434 struct nouveau_connector *
435 nv50_outp_get_old_connector(struct drm_atomic_state *state, struct nouveau_encoder *outp)
436 {
437         struct drm_connector *connector;
438         struct drm_connector_state *connector_state;
439         struct drm_encoder *encoder = to_drm_encoder(outp);
440         int i;
441
442         for_each_old_connector_in_state(state, connector, connector_state, i) {
443                 if (connector_state->best_encoder == encoder)
444                         return nouveau_connector(connector);
445         }
446
447         return NULL;
448 }
449
450 static struct nouveau_crtc *
451 nv50_outp_get_new_crtc(const struct drm_atomic_state *state, const struct nouveau_encoder *outp)
452 {
453         struct drm_crtc *crtc;
454         struct drm_crtc_state *crtc_state;
455         const u32 mask = drm_encoder_mask(&outp->base.base);
456         int i;
457
458         for_each_new_crtc_in_state(state, crtc, crtc_state, i) {
459                 if (crtc_state->encoder_mask & mask)
460                         return nouveau_crtc(crtc);
461         }
462
463         return NULL;
464 }
465
466 /******************************************************************************
467  * DAC
468  *****************************************************************************/
469 static void
470 nv50_dac_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
471 {
472         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
473         struct nv50_core *core = nv50_disp(encoder->dev)->core;
474         const u32 ctrl = NVDEF(NV507D, DAC_SET_CONTROL, OWNER, NONE);
475
476         core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL);
477         nv_encoder->crtc = NULL;
478 }
479
480 static void
481 nv50_dac_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
482 {
483         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
484         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
485         struct nv50_head_atom *asyh =
486                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
487         struct nv50_core *core = nv50_disp(encoder->dev)->core;
488         u32 ctrl = 0;
489
490         switch (nv_crtc->index) {
491         case 0: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD0); break;
492         case 1: ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, OWNER, HEAD1); break;
493         case 2: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD2); break;
494         case 3: ctrl |= NVDEF(NV907D, DAC_SET_CONTROL, OWNER_MASK, HEAD3); break;
495         default:
496                 WARN_ON(1);
497                 break;
498         }
499
500         ctrl |= NVDEF(NV507D, DAC_SET_CONTROL, PROTOCOL, RGB_CRT);
501
502         if (!nvif_outp_acquired(&nv_encoder->outp))
503                 nvif_outp_acquire_dac(&nv_encoder->outp);
504
505         core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh);
506         asyh->or.depth = 0;
507
508         nv_encoder->crtc = &nv_crtc->base;
509 }
510
511 static enum drm_connector_status
512 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
513 {
514         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
515         u32 loadval;
516         int ret;
517
518         loadval = nouveau_drm(encoder->dev)->vbios.dactestval;
519         if (loadval == 0)
520                 loadval = 340;
521
522         ret = nvif_outp_load_detect(&nv_encoder->outp, loadval);
523         if (ret <= 0)
524                 return connector_status_disconnected;
525
526         return connector_status_connected;
527 }
528
529 static const struct drm_encoder_helper_funcs
530 nv50_dac_help = {
531         .atomic_check = nv50_outp_atomic_check,
532         .atomic_enable = nv50_dac_atomic_enable,
533         .atomic_disable = nv50_dac_atomic_disable,
534         .detect = nv50_dac_detect
535 };
536
537 static void
538 nv50_dac_destroy(struct drm_encoder *encoder)
539 {
540         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
541
542         nvif_outp_dtor(&nv_encoder->outp);
543
544         drm_encoder_cleanup(encoder);
545         kfree(encoder);
546 }
547
548 static const struct drm_encoder_funcs
549 nv50_dac_func = {
550         .destroy = nv50_dac_destroy,
551 };
552
553 static int
554 nv50_dac_create(struct nouveau_encoder *nv_encoder)
555 {
556         struct drm_connector *connector = &nv_encoder->conn->base;
557         struct nouveau_drm *drm = nouveau_drm(connector->dev);
558         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
559         struct nvkm_i2c_bus *bus;
560         struct drm_encoder *encoder;
561         struct dcb_output *dcbe = nv_encoder->dcb;
562         int type = DRM_MODE_ENCODER_DAC;
563
564         bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
565         if (bus)
566                 nv_encoder->i2c = &bus->i2c;
567
568         encoder = to_drm_encoder(nv_encoder);
569         drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
570                          "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
571         drm_encoder_helper_add(encoder, &nv50_dac_help);
572
573         drm_connector_attach_encoder(connector, encoder);
574         return 0;
575 }
576
577 /*
578  * audio component binding for ELD notification
579  */
580 static void
581 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port,
582                                 int dev_id)
583 {
584         if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify)
585                 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr,
586                                                  port, dev_id);
587 }
588
589 static int
590 nv50_audio_component_get_eld(struct device *kdev, int port, int dev_id,
591                              bool *enabled, unsigned char *buf, int max_bytes)
592 {
593         struct drm_device *drm_dev = dev_get_drvdata(kdev);
594         struct nouveau_drm *drm = nouveau_drm(drm_dev);
595         struct drm_encoder *encoder;
596         struct nouveau_encoder *nv_encoder;
597         struct nouveau_crtc *nv_crtc;
598         int ret = 0;
599
600         *enabled = false;
601
602         mutex_lock(&drm->audio.lock);
603
604         drm_for_each_encoder(encoder, drm->dev) {
605                 struct nouveau_connector *nv_connector = NULL;
606
607                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST)
608                         continue; /* TODO */
609
610                 nv_encoder = nouveau_encoder(encoder);
611                 nv_connector = nv_encoder->conn;
612                 nv_crtc = nouveau_crtc(nv_encoder->crtc);
613
614                 if (!nv_crtc || nv_encoder->outp.or.id != port || nv_crtc->index != dev_id)
615                         continue;
616
617                 *enabled = nv_encoder->audio.enabled;
618                 if (*enabled) {
619                         ret = drm_eld_size(nv_connector->base.eld);
620                         memcpy(buf, nv_connector->base.eld,
621                                min(max_bytes, ret));
622                 }
623                 break;
624         }
625
626         mutex_unlock(&drm->audio.lock);
627
628         return ret;
629 }
630
631 static const struct drm_audio_component_ops nv50_audio_component_ops = {
632         .get_eld = nv50_audio_component_get_eld,
633 };
634
635 static int
636 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev,
637                           void *data)
638 {
639         struct drm_device *drm_dev = dev_get_drvdata(kdev);
640         struct nouveau_drm *drm = nouveau_drm(drm_dev);
641         struct drm_audio_component *acomp = data;
642
643         if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS)))
644                 return -ENOMEM;
645
646         drm_modeset_lock_all(drm_dev);
647         acomp->ops = &nv50_audio_component_ops;
648         acomp->dev = kdev;
649         drm->audio.component = acomp;
650         drm_modeset_unlock_all(drm_dev);
651         return 0;
652 }
653
654 static void
655 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev,
656                             void *data)
657 {
658         struct drm_device *drm_dev = dev_get_drvdata(kdev);
659         struct nouveau_drm *drm = nouveau_drm(drm_dev);
660         struct drm_audio_component *acomp = data;
661
662         drm_modeset_lock_all(drm_dev);
663         drm->audio.component = NULL;
664         acomp->ops = NULL;
665         acomp->dev = NULL;
666         drm_modeset_unlock_all(drm_dev);
667 }
668
669 static const struct component_ops nv50_audio_component_bind_ops = {
670         .bind   = nv50_audio_component_bind,
671         .unbind = nv50_audio_component_unbind,
672 };
673
674 static void
675 nv50_audio_component_init(struct nouveau_drm *drm)
676 {
677         if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops))
678                 return;
679
680         drm->audio.component_registered = true;
681         mutex_init(&drm->audio.lock);
682 }
683
684 static void
685 nv50_audio_component_fini(struct nouveau_drm *drm)
686 {
687         if (!drm->audio.component_registered)
688                 return;
689
690         component_del(drm->dev->dev, &nv50_audio_component_bind_ops);
691         drm->audio.component_registered = false;
692         mutex_destroy(&drm->audio.lock);
693 }
694
695 /******************************************************************************
696  * Audio
697  *****************************************************************************/
698 static bool
699 nv50_audio_supported(struct drm_encoder *encoder)
700 {
701         struct nv50_disp *disp = nv50_disp(encoder->dev);
702
703         if (disp->disp->object.oclass <= GT200_DISP ||
704             disp->disp->object.oclass == GT206_DISP)
705                 return false;
706
707         if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
708                 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
709
710                 switch (nv_encoder->dcb->type) {
711                 case DCB_OUTPUT_TMDS:
712                 case DCB_OUTPUT_DP:
713                         break;
714                 default:
715                         return false;
716                 }
717         }
718
719         return true;
720 }
721
722 static void
723 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
724 {
725         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
726         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
727         struct nvif_outp *outp = &nv_encoder->outp;
728
729         if (!nv50_audio_supported(encoder))
730                 return;
731
732         mutex_lock(&drm->audio.lock);
733         if (nv_encoder->audio.enabled) {
734                 nv_encoder->audio.enabled = false;
735                 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, NULL, 0);
736         }
737         mutex_unlock(&drm->audio.lock);
738
739         nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index);
740 }
741
742 static void
743 nv50_audio_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
744                   struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
745                   struct drm_display_mode *mode)
746 {
747         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
748         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
749         struct nvif_outp *outp = &nv_encoder->outp;
750
751         if (!nv50_audio_supported(encoder) || !drm_detect_monitor_audio(nv_connector->edid))
752                 return;
753
754         mutex_lock(&drm->audio.lock);
755
756         nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, nv_connector->base.eld,
757                           drm_eld_size(nv_connector->base.eld));
758         nv_encoder->audio.enabled = true;
759
760         mutex_unlock(&drm->audio.lock);
761
762         nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index);
763 }
764
765 /******************************************************************************
766  * HDMI
767  *****************************************************************************/
768 static void
769 nv50_hdmi_enable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc,
770                  struct nouveau_connector *nv_connector, struct drm_atomic_state *state,
771                  struct drm_display_mode *mode, bool hda)
772 {
773         struct nouveau_drm *drm = nouveau_drm(encoder->dev);
774         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
775         struct drm_hdmi_info *hdmi = &nv_connector->base.display_info.hdmi;
776         union hdmi_infoframe infoframe = { 0 };
777         const u8 rekey = 56; /* binary driver, and tegra, constant */
778         u32 max_ac_packet;
779         struct {
780                 struct nvif_outp_infoframe_v0 infoframe;
781                 u8 data[17];
782         } args = { 0 };
783         int ret, size;
784
785         max_ac_packet  = mode->htotal - mode->hdisplay;
786         max_ac_packet -= rekey;
787         max_ac_packet -= 18; /* constant from tegra */
788         max_ac_packet /= 32;
789
790         if (nv_encoder->i2c && hdmi->scdc.scrambling.supported) {
791                 const bool high_tmds_clock_ratio = mode->clock > 340000;
792                 u8 scdc;
793
794                 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &scdc);
795                 if (ret < 0) {
796                         NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret);
797                         return;
798                 }
799
800                 scdc &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE);
801                 if (high_tmds_clock_ratio || hdmi->scdc.scrambling.low_rates)
802                         scdc |= SCDC_SCRAMBLING_ENABLE;
803                 if (high_tmds_clock_ratio)
804                         scdc |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40;
805
806                 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, scdc);
807                 if (ret < 0)
808                         NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
809                                  scdc, ret);
810         }
811
812         ret = nvif_outp_hdmi(&nv_encoder->outp, nv_crtc->index, true, max_ac_packet, rekey,
813                              mode->clock, hdmi->scdc.supported, hdmi->scdc.scrambling.supported,
814                              hdmi->scdc.scrambling.low_rates);
815         if (ret)
816                 return;
817
818         /* AVI InfoFrame. */
819         args.infoframe.version = 0;
820         args.infoframe.head = nv_crtc->index;
821
822         if (!drm_hdmi_avi_infoframe_from_display_mode(&infoframe.avi, &nv_connector->base, mode)) {
823                 drm_hdmi_avi_infoframe_quant_range(&infoframe.avi, &nv_connector->base, mode,
824                                                    HDMI_QUANTIZATION_RANGE_FULL);
825
826                 size = hdmi_infoframe_pack(&infoframe, args.data, ARRAY_SIZE(args.data));
827         } else {
828                 size = 0;
829         }
830
831         nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_AVI, &args.infoframe, size);
832
833         /* Vendor InfoFrame. */
834         memset(&args.data, 0, sizeof(args.data));
835         if (!drm_hdmi_vendor_infoframe_from_display_mode(&infoframe.vendor.hdmi,
836                                                          &nv_connector->base, mode))
837                 size = hdmi_infoframe_pack(&infoframe, args.data, ARRAY_SIZE(args.data));
838         else
839                 size = 0;
840
841         nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_VSI, &args.infoframe, size);
842
843         nv_encoder->hdmi.enabled = true;
844 }
845
846 /******************************************************************************
847  * MST
848  *****************************************************************************/
849 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
850 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
851 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
852
853 struct nv50_mstc {
854         struct nv50_mstm *mstm;
855         struct drm_dp_mst_port *port;
856         struct drm_connector connector;
857
858         struct drm_display_mode *native;
859         struct edid *edid;
860 };
861
862 struct nv50_msto {
863         struct drm_encoder encoder;
864
865         /* head is statically assigned on msto creation */
866         struct nv50_head *head;
867         struct nv50_mstc *mstc;
868         bool disabled;
869         bool enabled;
870
871         u32 display_id;
872 };
873
874 struct nouveau_encoder *nv50_real_outp(struct drm_encoder *encoder)
875 {
876         struct nv50_msto *msto;
877
878         if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
879                 return nouveau_encoder(encoder);
880
881         msto = nv50_msto(encoder);
882         if (!msto->mstc)
883                 return NULL;
884         return msto->mstc->mstm->outp;
885 }
886
887 static void
888 nv50_msto_cleanup(struct drm_atomic_state *state,
889                   struct drm_dp_mst_topology_state *new_mst_state,
890                   struct drm_dp_mst_topology_mgr *mgr,
891                   struct nv50_msto *msto)
892 {
893         struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
894         struct drm_dp_mst_atomic_payload *new_payload =
895                 drm_atomic_get_mst_payload_state(new_mst_state, msto->mstc->port);
896         struct drm_dp_mst_topology_state *old_mst_state =
897                 drm_atomic_get_old_mst_topology_state(state, mgr);
898         const struct drm_dp_mst_atomic_payload *old_payload =
899                 drm_atomic_get_mst_payload_state(old_mst_state, msto->mstc->port);
900         struct nv50_mstc *mstc = msto->mstc;
901         struct nv50_mstm *mstm = mstc->mstm;
902
903         NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
904
905         if (msto->disabled) {
906                 if (msto->head->func->display_id) {
907                         nvif_outp_dp_mst_id_put(&mstm->outp->outp, msto->display_id);
908                         msto->display_id = 0;
909                 }
910
911                 msto->mstc = NULL;
912                 msto->disabled = false;
913                 drm_dp_remove_payload_part2(mgr, new_mst_state, old_payload, new_payload);
914         } else if (msto->enabled) {
915                 drm_dp_add_payload_part2(mgr, state, new_payload);
916                 msto->enabled = false;
917         }
918 }
919
920 static void
921 nv50_msto_prepare(struct drm_atomic_state *state,
922                   struct drm_dp_mst_topology_state *mst_state,
923                   struct drm_dp_mst_topology_mgr *mgr,
924                   struct nv50_msto *msto)
925 {
926         struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
927         struct nv50_mstc *mstc = msto->mstc;
928         struct nv50_mstm *mstm = mstc->mstm;
929         struct drm_dp_mst_atomic_payload *payload;
930         int ret = 0;
931
932         NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
933
934         payload = drm_atomic_get_mst_payload_state(mst_state, mstc->port);
935
936         if (msto->disabled) {
937                 drm_dp_remove_payload_part1(mgr, mst_state, payload);
938                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0);
939                 ret = 1;
940         } else {
941                 if (msto->enabled)
942                         ret = drm_dp_add_payload_part1(mgr, mst_state, payload);
943         }
944
945         if (ret == 0) {
946                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index,
947                                       payload->vc_start_slot, payload->time_slots,
948                                       payload->pbn, payload->time_slots * mst_state->pbn_div);
949         } else {
950                 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0);
951         }
952 }
953
954 static int
955 nv50_msto_atomic_check(struct drm_encoder *encoder,
956                        struct drm_crtc_state *crtc_state,
957                        struct drm_connector_state *conn_state)
958 {
959         struct drm_atomic_state *state = crtc_state->state;
960         struct drm_connector *connector = conn_state->connector;
961         struct drm_dp_mst_topology_state *mst_state;
962         struct nv50_mstc *mstc = nv50_mstc(connector);
963         struct nv50_mstm *mstm = mstc->mstm;
964         struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
965         int slots;
966         int ret;
967
968         ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
969                                           mstc->native);
970         if (ret)
971                 return ret;
972
973         if (!drm_atomic_crtc_needs_modeset(crtc_state))
974                 return 0;
975
976         /*
977          * When restoring duplicated states, we need to make sure that the bw
978          * remains the same and avoid recalculating it, as the connector's bpc
979          * may have changed after the state was duplicated
980          */
981         if (!state->duplicated) {
982                 const int clock = crtc_state->adjusted_mode.clock;
983
984                 asyh->or.bpc = connector->display_info.bpc;
985                 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3,
986                                                     false);
987         }
988
989         mst_state = drm_atomic_get_mst_topology_state(state, &mstm->mgr);
990         if (IS_ERR(mst_state))
991                 return PTR_ERR(mst_state);
992
993         if (!mst_state->pbn_div) {
994                 struct nouveau_encoder *outp = mstc->mstm->outp;
995
996                 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&mstm->mgr,
997                                                               outp->dp.link_bw, outp->dp.link_nr);
998         }
999
1000         slots = drm_dp_atomic_find_time_slots(state, &mstm->mgr, mstc->port, asyh->dp.pbn);
1001         if (slots < 0)
1002                 return slots;
1003
1004         asyh->dp.tu = slots;
1005
1006         return 0;
1007 }
1008
1009 static u8
1010 nv50_dp_bpc_to_depth(unsigned int bpc)
1011 {
1012         switch (bpc) {
1013         case  6: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444;
1014         case  8: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444;
1015         case 10:
1016         default: return NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444;
1017         }
1018 }
1019
1020 static void
1021 nv50_msto_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1022 {
1023         struct nv50_msto *msto = nv50_msto(encoder);
1024         struct nv50_head *head = msto->head;
1025         struct nv50_head_atom *asyh =
1026                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base));
1027         struct nv50_mstc *mstc = NULL;
1028         struct nv50_mstm *mstm = NULL;
1029         struct drm_connector *connector;
1030         struct drm_connector_list_iter conn_iter;
1031         u8 proto;
1032
1033         drm_connector_list_iter_begin(encoder->dev, &conn_iter);
1034         drm_for_each_connector_iter(connector, &conn_iter) {
1035                 if (connector->state->best_encoder == &msto->encoder) {
1036                         mstc = nv50_mstc(connector);
1037                         mstm = mstc->mstm;
1038                         break;
1039                 }
1040         }
1041         drm_connector_list_iter_end(&conn_iter);
1042
1043         if (WARN_ON(!mstc))
1044                 return;
1045
1046         if (!mstm->links++) {
1047                 nvif_outp_acquire_sor(&mstm->outp->outp, false /*TODO: MST audio... */);
1048                 nouveau_dp_train(mstm->outp, true, 0, 0);
1049         }
1050
1051         if (head->func->display_id) {
1052                 if (!WARN_ON(nvif_outp_dp_mst_id_get(&mstm->outp->outp, &msto->display_id)))
1053                         head->func->display_id(head, msto->display_id);
1054         }
1055
1056         if (mstm->outp->outp.or.link & 1)
1057                 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1058         else
1059                 proto = NV917D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1060
1061         mstm->outp->update(mstm->outp, head->base.index, asyh, proto,
1062                            nv50_dp_bpc_to_depth(asyh->or.bpc));
1063
1064         msto->mstc = mstc;
1065         msto->enabled = true;
1066         mstm->modified = true;
1067 }
1068
1069 static void
1070 nv50_msto_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1071 {
1072         struct nv50_msto *msto = nv50_msto(encoder);
1073         struct nv50_mstc *mstc = msto->mstc;
1074         struct nv50_mstm *mstm = mstc->mstm;
1075
1076         if (msto->head->func->display_id)
1077                 msto->head->func->display_id(msto->head, 0);
1078
1079         mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
1080         mstm->modified = true;
1081         if (!--mstm->links)
1082                 mstm->disabled = true;
1083         msto->disabled = true;
1084 }
1085
1086 static const struct drm_encoder_helper_funcs
1087 nv50_msto_help = {
1088         .atomic_disable = nv50_msto_atomic_disable,
1089         .atomic_enable = nv50_msto_atomic_enable,
1090         .atomic_check = nv50_msto_atomic_check,
1091 };
1092
1093 static void
1094 nv50_msto_destroy(struct drm_encoder *encoder)
1095 {
1096         struct nv50_msto *msto = nv50_msto(encoder);
1097         drm_encoder_cleanup(&msto->encoder);
1098         kfree(msto);
1099 }
1100
1101 static const struct drm_encoder_funcs
1102 nv50_msto = {
1103         .destroy = nv50_msto_destroy,
1104 };
1105
1106 static struct nv50_msto *
1107 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id)
1108 {
1109         struct nv50_msto *msto;
1110         int ret;
1111
1112         msto = kzalloc(sizeof(*msto), GFP_KERNEL);
1113         if (!msto)
1114                 return ERR_PTR(-ENOMEM);
1115
1116         ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
1117                                DRM_MODE_ENCODER_DPMST, "mst-%d", id);
1118         if (ret) {
1119                 kfree(msto);
1120                 return ERR_PTR(ret);
1121         }
1122
1123         drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
1124         msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base);
1125         msto->head = head;
1126         return msto;
1127 }
1128
1129 static struct drm_encoder *
1130 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
1131                               struct drm_atomic_state *state)
1132 {
1133         struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state,
1134                                                                                          connector);
1135         struct nv50_mstc *mstc = nv50_mstc(connector);
1136         struct drm_crtc *crtc = connector_state->crtc;
1137
1138         if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1139                 return NULL;
1140
1141         return &nv50_head(crtc)->msto->encoder;
1142 }
1143
1144 static enum drm_mode_status
1145 nv50_mstc_mode_valid(struct drm_connector *connector,
1146                      struct drm_display_mode *mode)
1147 {
1148         struct nv50_mstc *mstc = nv50_mstc(connector);
1149         struct nouveau_encoder *outp = mstc->mstm->outp;
1150
1151         /* TODO: calculate the PBN from the dotclock and validate against the
1152          * MSTB's max possible PBN
1153          */
1154
1155         return nv50_dp_mode_valid(outp, mode, NULL);
1156 }
1157
1158 static int
1159 nv50_mstc_get_modes(struct drm_connector *connector)
1160 {
1161         struct nv50_mstc *mstc = nv50_mstc(connector);
1162         int ret = 0;
1163
1164         mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
1165         drm_connector_update_edid_property(&mstc->connector, mstc->edid);
1166         if (mstc->edid)
1167                 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
1168
1169         /*
1170          * XXX: Since we don't use HDR in userspace quite yet, limit the bpc
1171          * to 8 to save bandwidth on the topology. In the future, we'll want
1172          * to properly fix this by dynamically selecting the highest possible
1173          * bpc that would fit in the topology
1174          */
1175         if (connector->display_info.bpc)
1176                 connector->display_info.bpc =
1177                         clamp(connector->display_info.bpc, 6U, 8U);
1178         else
1179                 connector->display_info.bpc = 8;
1180
1181         if (mstc->native)
1182                 drm_mode_destroy(mstc->connector.dev, mstc->native);
1183         mstc->native = nouveau_conn_native_mode(&mstc->connector);
1184         return ret;
1185 }
1186
1187 static int
1188 nv50_mstc_atomic_check(struct drm_connector *connector,
1189                        struct drm_atomic_state *state)
1190 {
1191         struct nv50_mstc *mstc = nv50_mstc(connector);
1192         struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr;
1193
1194         return drm_dp_atomic_release_time_slots(state, mgr, mstc->port);
1195 }
1196
1197 static int
1198 nv50_mstc_detect(struct drm_connector *connector,
1199                  struct drm_modeset_acquire_ctx *ctx, bool force)
1200 {
1201         struct nv50_mstc *mstc = nv50_mstc(connector);
1202         int ret;
1203
1204         if (drm_connector_is_unregistered(connector))
1205                 return connector_status_disconnected;
1206
1207         ret = pm_runtime_get_sync(connector->dev->dev);
1208         if (ret < 0 && ret != -EACCES) {
1209                 pm_runtime_put_autosuspend(connector->dev->dev);
1210                 return connector_status_disconnected;
1211         }
1212
1213         ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr,
1214                                      mstc->port);
1215         if (ret != connector_status_connected)
1216                 goto out;
1217
1218 out:
1219         pm_runtime_mark_last_busy(connector->dev->dev);
1220         pm_runtime_put_autosuspend(connector->dev->dev);
1221         return ret;
1222 }
1223
1224 static const struct drm_connector_helper_funcs
1225 nv50_mstc_help = {
1226         .get_modes = nv50_mstc_get_modes,
1227         .mode_valid = nv50_mstc_mode_valid,
1228         .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
1229         .atomic_check = nv50_mstc_atomic_check,
1230         .detect_ctx = nv50_mstc_detect,
1231 };
1232
1233 static void
1234 nv50_mstc_destroy(struct drm_connector *connector)
1235 {
1236         struct nv50_mstc *mstc = nv50_mstc(connector);
1237
1238         drm_connector_cleanup(&mstc->connector);
1239         drm_dp_mst_put_port_malloc(mstc->port);
1240
1241         kfree(mstc);
1242 }
1243
1244 static const struct drm_connector_funcs
1245 nv50_mstc = {
1246         .reset = nouveau_conn_reset,
1247         .fill_modes = drm_helper_probe_single_connector_modes,
1248         .destroy = nv50_mstc_destroy,
1249         .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
1250         .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
1251         .atomic_set_property = nouveau_conn_atomic_set_property,
1252         .atomic_get_property = nouveau_conn_atomic_get_property,
1253 };
1254
1255 static int
1256 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
1257               const char *path, struct nv50_mstc **pmstc)
1258 {
1259         struct drm_device *dev = mstm->outp->base.base.dev;
1260         struct drm_crtc *crtc;
1261         struct nv50_mstc *mstc;
1262         int ret;
1263
1264         if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
1265                 return -ENOMEM;
1266         mstc->mstm = mstm;
1267         mstc->port = port;
1268
1269         ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
1270                                  DRM_MODE_CONNECTOR_DisplayPort);
1271         if (ret) {
1272                 kfree(*pmstc);
1273                 *pmstc = NULL;
1274                 return ret;
1275         }
1276
1277         drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
1278
1279         mstc->connector.funcs->reset(&mstc->connector);
1280         nouveau_conn_attach_properties(&mstc->connector);
1281
1282         drm_for_each_crtc(crtc, dev) {
1283                 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1284                         continue;
1285
1286                 drm_connector_attach_encoder(&mstc->connector,
1287                                              &nv50_head(crtc)->msto->encoder);
1288         }
1289
1290         drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
1291         drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
1292         drm_connector_set_path_property(&mstc->connector, path);
1293         drm_dp_mst_get_port_malloc(port);
1294         return 0;
1295 }
1296
1297 static void
1298 nv50_mstm_cleanup(struct drm_atomic_state *state,
1299                   struct drm_dp_mst_topology_state *mst_state,
1300                   struct nv50_mstm *mstm)
1301 {
1302         struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1303         struct drm_encoder *encoder;
1304
1305         NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
1306         drm_dp_check_act_status(&mstm->mgr);
1307
1308         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1309                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1310                         struct nv50_msto *msto = nv50_msto(encoder);
1311                         struct nv50_mstc *mstc = msto->mstc;
1312                         if (mstc && mstc->mstm == mstm)
1313                                 nv50_msto_cleanup(state, mst_state, &mstm->mgr, msto);
1314                 }
1315         }
1316
1317         if (mstm->disabled) {
1318                 nouveau_dp_power_down(mstm->outp);
1319                 nvif_outp_release(&mstm->outp->outp);
1320                 mstm->disabled = false;
1321         }
1322
1323         mstm->modified = false;
1324 }
1325
1326 static void
1327 nv50_mstm_prepare(struct drm_atomic_state *state,
1328                   struct drm_dp_mst_topology_state *mst_state,
1329                   struct nv50_mstm *mstm)
1330 {
1331         struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1332         struct drm_encoder *encoder;
1333
1334         NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
1335
1336         /* Disable payloads first */
1337         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1338                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1339                         struct nv50_msto *msto = nv50_msto(encoder);
1340                         struct nv50_mstc *mstc = msto->mstc;
1341                         if (mstc && mstc->mstm == mstm && msto->disabled)
1342                                 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto);
1343                 }
1344         }
1345
1346         /* Add payloads for new heads, while also updating the start slots of any unmodified (but
1347          * active) heads that may have had their VC slots shifted left after the previous step
1348          */
1349         drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1350                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1351                         struct nv50_msto *msto = nv50_msto(encoder);
1352                         struct nv50_mstc *mstc = msto->mstc;
1353                         if (mstc && mstc->mstm == mstm && !msto->disabled)
1354                                 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto);
1355                 }
1356         }
1357 }
1358
1359 static struct drm_connector *
1360 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1361                         struct drm_dp_mst_port *port, const char *path)
1362 {
1363         struct nv50_mstm *mstm = nv50_mstm(mgr);
1364         struct nv50_mstc *mstc;
1365         int ret;
1366
1367         ret = nv50_mstc_new(mstm, port, path, &mstc);
1368         if (ret)
1369                 return NULL;
1370
1371         return &mstc->connector;
1372 }
1373
1374 static const struct drm_dp_mst_topology_cbs
1375 nv50_mstm = {
1376         .add_connector = nv50_mstm_add_connector,
1377 };
1378
1379 bool
1380 nv50_mstm_service(struct nouveau_drm *drm,
1381                   struct nouveau_connector *nv_connector,
1382                   struct nv50_mstm *mstm)
1383 {
1384         struct drm_dp_aux *aux = &nv_connector->aux;
1385         bool handled = true, ret = true;
1386         int rc;
1387         u8 esi[8] = {};
1388
1389         while (handled) {
1390                 u8 ack[8] = {};
1391
1392                 rc = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1393                 if (rc != 8) {
1394                         ret = false;
1395                         break;
1396                 }
1397
1398                 drm_dp_mst_hpd_irq_handle_event(&mstm->mgr, esi, ack, &handled);
1399                 if (!handled)
1400                         break;
1401
1402                 rc = drm_dp_dpcd_writeb(aux, DP_SINK_COUNT_ESI + 1, ack[1]);
1403
1404                 if (rc != 1) {
1405                         ret = false;
1406                         break;
1407                 }
1408
1409                 drm_dp_mst_hpd_irq_send_new_request(&mstm->mgr);
1410         }
1411
1412         if (!ret)
1413                 NV_DEBUG(drm, "Failed to handle ESI on %s: %d\n",
1414                          nv_connector->base.name, rc);
1415
1416         return ret;
1417 }
1418
1419 void
1420 nv50_mstm_remove(struct nv50_mstm *mstm)
1421 {
1422         mstm->is_mst = false;
1423         drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1424 }
1425
1426 int
1427 nv50_mstm_detect(struct nouveau_encoder *outp)
1428 {
1429         struct nv50_mstm *mstm = outp->dp.mstm;
1430         struct drm_dp_aux *aux;
1431         int ret;
1432
1433         if (!mstm || !mstm->can_mst)
1434                 return 0;
1435
1436         aux = mstm->mgr.aux;
1437
1438         /* Clear any leftover MST state we didn't set ourselves by first
1439          * disabling MST if it was already enabled
1440          */
1441         ret = drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1442         if (ret < 0)
1443                 return ret;
1444
1445         /* And start enabling */
1446         ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true);
1447         if (ret)
1448                 return ret;
1449
1450         mstm->is_mst = true;
1451         return 1;
1452 }
1453
1454 static void
1455 nv50_mstm_fini(struct nouveau_encoder *outp)
1456 {
1457         struct nv50_mstm *mstm = outp->dp.mstm;
1458
1459         if (!mstm)
1460                 return;
1461
1462         /* Don't change the MST state of this connector until we've finished
1463          * resuming, since we can't safely grab hpd_irq_lock in our resume
1464          * path to protect mstm->is_mst without potentially deadlocking
1465          */
1466         mutex_lock(&outp->dp.hpd_irq_lock);
1467         mstm->suspended = true;
1468         mutex_unlock(&outp->dp.hpd_irq_lock);
1469
1470         if (mstm->is_mst)
1471                 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1472 }
1473
1474 static void
1475 nv50_mstm_init(struct nouveau_encoder *outp, bool runtime)
1476 {
1477         struct nv50_mstm *mstm = outp->dp.mstm;
1478         int ret = 0;
1479
1480         if (!mstm)
1481                 return;
1482
1483         if (mstm->is_mst) {
1484                 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime);
1485                 if (ret == -1)
1486                         nv50_mstm_remove(mstm);
1487         }
1488
1489         mutex_lock(&outp->dp.hpd_irq_lock);
1490         mstm->suspended = false;
1491         mutex_unlock(&outp->dp.hpd_irq_lock);
1492
1493         if (ret == -1)
1494                 drm_kms_helper_hotplug_event(mstm->mgr.dev);
1495 }
1496
1497 static void
1498 nv50_mstm_del(struct nv50_mstm **pmstm)
1499 {
1500         struct nv50_mstm *mstm = *pmstm;
1501         if (mstm) {
1502                 drm_dp_mst_topology_mgr_destroy(&mstm->mgr);
1503                 kfree(*pmstm);
1504                 *pmstm = NULL;
1505         }
1506 }
1507
1508 static int
1509 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1510               int conn_base_id, struct nv50_mstm **pmstm)
1511 {
1512         const int max_payloads = hweight8(outp->dcb->heads);
1513         struct drm_device *dev = outp->base.base.dev;
1514         struct nv50_mstm *mstm;
1515         int ret;
1516
1517         if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1518                 return -ENOMEM;
1519         mstm->outp = outp;
1520         mstm->mgr.cbs = &nv50_mstm;
1521
1522         ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1523                                            max_payloads, conn_base_id);
1524         if (ret)
1525                 return ret;
1526
1527         return 0;
1528 }
1529
1530 /******************************************************************************
1531  * SOR
1532  *****************************************************************************/
1533 static void
1534 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1535                 struct nv50_head_atom *asyh, u8 proto, u8 depth)
1536 {
1537         struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1538         struct nv50_core *core = disp->core;
1539
1540         if (!asyh) {
1541                 nv_encoder->ctrl &= ~BIT(head);
1542                 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE))
1543                         nv_encoder->ctrl = 0;
1544         } else {
1545                 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto);
1546                 nv_encoder->ctrl |= BIT(head);
1547                 asyh->or.depth = depth;
1548         }
1549
1550         core->func->sor->ctrl(core, nv_encoder->outp.or.id, nv_encoder->ctrl, asyh);
1551 }
1552
1553 /* TODO: Should we extend this to PWM-only backlights?
1554  * As well, should we add a DRM helper for waiting for the backlight to acknowledge
1555  * the panel backlight has been shut off? Intel doesn't seem to do this, and uses a
1556  * fixed time delay from the vbios…
1557  */
1558 static void
1559 nv50_sor_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1560 {
1561         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1562         struct nv50_head *head = nv50_head(nv_encoder->crtc);
1563 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1564         struct nouveau_connector *nv_connector = nv50_outp_get_old_connector(state, nv_encoder);
1565         struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
1566         struct nouveau_backlight *backlight = nv_connector->backlight;
1567         struct drm_dp_aux *aux = &nv_connector->aux;
1568         int ret;
1569
1570         if (backlight && backlight->uses_dpcd) {
1571                 ret = drm_edp_backlight_disable(aux, &backlight->edp_info);
1572                 if (ret < 0)
1573                         NV_ERROR(drm, "Failed to disable backlight on [CONNECTOR:%d:%s]: %d\n",
1574                                  nv_connector->base.base.id, nv_connector->base.name, ret);
1575         }
1576 #endif
1577
1578         if (nv_encoder->dcb->type == DCB_OUTPUT_TMDS && nv_encoder->hdmi.enabled) {
1579                 nvif_outp_hdmi(&nv_encoder->outp, head->base.index,
1580                                false, 0, 0, 0, false, false, false);
1581                 nv_encoder->hdmi.enabled = false;
1582         }
1583
1584         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1585                 nouveau_dp_power_down(nv_encoder);
1586
1587         if (head->func->display_id)
1588                 head->func->display_id(head, 0);
1589
1590         nv_encoder->update(nv_encoder, head->base.index, NULL, 0, 0);
1591         nv50_audio_disable(encoder, &head->base);
1592         nv_encoder->crtc = NULL;
1593 }
1594
1595 static void
1596 nv50_sor_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1597 {
1598         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1599         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1600         struct nv50_head_atom *asyh =
1601                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1602         struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1603         struct nv50_disp *disp = nv50_disp(encoder->dev);
1604         struct nv50_head *head = nv50_head(&nv_crtc->base);
1605         struct nvif_outp *outp = &nv_encoder->outp;
1606         struct drm_device *dev = encoder->dev;
1607         struct nouveau_drm *drm = nouveau_drm(dev);
1608         struct nouveau_connector *nv_connector;
1609 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1610         struct nouveau_backlight *backlight;
1611 #endif
1612         struct nvbios *bios = &drm->vbios;
1613         bool lvds_dual = false, lvds_8bpc = false, hda = false;
1614         u8 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_CUSTOM;
1615         u8 depth = NV837D_SOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT;
1616
1617         nv_connector = nv50_outp_get_new_connector(state, nv_encoder);
1618         nv_encoder->crtc = &nv_crtc->base;
1619
1620         if ((disp->disp->object.oclass == GT214_DISP ||
1621              disp->disp->object.oclass >= GF110_DISP) &&
1622             nv_encoder->dcb->type != DCB_OUTPUT_LVDS &&
1623             drm_detect_monitor_audio(nv_connector->edid))
1624                 hda = true;
1625
1626         if (!nvif_outp_acquired(outp))
1627                 nvif_outp_acquire_sor(outp, hda);
1628
1629         switch (nv_encoder->dcb->type) {
1630         case DCB_OUTPUT_TMDS:
1631                 if (disp->disp->object.oclass != NV50_DISP &&
1632                     drm_detect_hdmi_monitor(nv_connector->edid))
1633                         nv50_hdmi_enable(encoder, nv_crtc, nv_connector, state, mode, hda);
1634
1635                 if (nv_encoder->outp.or.link & 1) {
1636                         proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_A;
1637                         /* Only enable dual-link if:
1638                          *  - Need to (i.e. rate > 165MHz)
1639                          *  - DCB says we can
1640                          *  - Not an HDMI monitor, since there's no dual-link
1641                          *    on HDMI.
1642                          */
1643                         if (mode->clock >= 165000 &&
1644                             nv_encoder->dcb->duallink_possible &&
1645                             !drm_detect_hdmi_monitor(nv_connector->edid))
1646                                 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_DUAL_TMDS;
1647                 } else {
1648                         proto = NV507D_SOR_SET_CONTROL_PROTOCOL_SINGLE_TMDS_B;
1649                 }
1650                 break;
1651         case DCB_OUTPUT_LVDS:
1652                 proto = NV507D_SOR_SET_CONTROL_PROTOCOL_LVDS_CUSTOM;
1653
1654                 if (bios->fp_no_ddc) {
1655                         lvds_dual = bios->fp.dual_link;
1656                         lvds_8bpc = bios->fp.if_is_24bit;
1657                 } else {
1658                         if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1659                                 if (((u8 *)nv_connector->edid)[121] == 2)
1660                                         lvds_dual = true;
1661                         } else
1662                         if (mode->clock >= bios->fp.duallink_transition_clk) {
1663                                 lvds_dual = true;
1664                         }
1665
1666                         if (lvds_dual) {
1667                                 if (bios->fp.strapless_is_24bit & 2)
1668                                         lvds_8bpc = true;
1669                         } else {
1670                                 if (bios->fp.strapless_is_24bit & 1)
1671                                         lvds_8bpc = true;
1672                         }
1673
1674                         if (asyh->or.bpc == 8)
1675                                 lvds_8bpc = true;
1676                 }
1677
1678                 nvif_outp_lvds(&nv_encoder->outp, lvds_dual, lvds_8bpc);
1679                 break;
1680         case DCB_OUTPUT_DP:
1681                 nouveau_dp_train(nv_encoder, false, mode->clock, asyh->or.bpc);
1682                 depth = nv50_dp_bpc_to_depth(asyh->or.bpc);
1683
1684                 if (nv_encoder->outp.or.link & 1)
1685                         proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_A;
1686                 else
1687                         proto = NV887D_SOR_SET_CONTROL_PROTOCOL_DP_B;
1688
1689 #ifdef CONFIG_DRM_NOUVEAU_BACKLIGHT
1690                 backlight = nv_connector->backlight;
1691                 if (backlight && backlight->uses_dpcd)
1692                         drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info,
1693                                                  (u16)backlight->dev->props.brightness);
1694 #endif
1695
1696                 break;
1697         default:
1698                 BUG();
1699                 break;
1700         }
1701
1702         if (head->func->display_id)
1703                 head->func->display_id(head, BIT(nv_encoder->outp.id));
1704
1705         nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1706 }
1707
1708 static const struct drm_encoder_helper_funcs
1709 nv50_sor_help = {
1710         .atomic_check = nv50_outp_atomic_check,
1711         .atomic_enable = nv50_sor_atomic_enable,
1712         .atomic_disable = nv50_sor_atomic_disable,
1713 };
1714
1715 static void
1716 nv50_sor_destroy(struct drm_encoder *encoder)
1717 {
1718         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1719
1720         nv50_mstm_del(&nv_encoder->dp.mstm);
1721         drm_encoder_cleanup(encoder);
1722
1723         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1724                 mutex_destroy(&nv_encoder->dp.hpd_irq_lock);
1725
1726         nvif_outp_dtor(&nv_encoder->outp);
1727         kfree(encoder);
1728 }
1729
1730 static const struct drm_encoder_funcs
1731 nv50_sor_func = {
1732         .destroy = nv50_sor_destroy,
1733 };
1734
1735 static int
1736 nv50_sor_create(struct nouveau_encoder *nv_encoder)
1737 {
1738         struct drm_connector *connector = &nv_encoder->conn->base;
1739         struct nouveau_connector *nv_connector = nouveau_connector(connector);
1740         struct nouveau_drm *drm = nouveau_drm(connector->dev);
1741         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1742         struct drm_encoder *encoder;
1743         struct dcb_output *dcbe = nv_encoder->dcb;
1744         struct nv50_disp *disp = nv50_disp(connector->dev);
1745         int type, ret;
1746
1747         switch (dcbe->type) {
1748         case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1749         case DCB_OUTPUT_TMDS:
1750         case DCB_OUTPUT_DP:
1751         default:
1752                 type = DRM_MODE_ENCODER_TMDS;
1753                 break;
1754         }
1755
1756         nv_encoder->update = nv50_sor_update;
1757
1758         encoder = to_drm_encoder(nv_encoder);
1759         drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1760                          "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1761         drm_encoder_helper_add(encoder, &nv50_sor_help);
1762
1763         drm_connector_attach_encoder(connector, encoder);
1764
1765         disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1766         nv50_outp_dump_caps(drm, nv_encoder);
1767
1768         if (dcbe->type == DCB_OUTPUT_DP) {
1769                 mutex_init(&nv_encoder->dp.hpd_irq_lock);
1770
1771                 if (disp->disp->object.oclass < GF110_DISP) {
1772                         /* HW has no support for address-only
1773                          * transactions, so we're required to
1774                          * use custom I2C-over-AUX code.
1775                          */
1776                         struct nvkm_i2c_aux *aux;
1777
1778                         aux = nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1779                         if (!aux)
1780                                 return -EINVAL;
1781
1782                         nv_encoder->i2c = &aux->i2c;
1783                 } else {
1784                         nv_encoder->i2c = &nv_connector->aux.ddc;
1785                 }
1786
1787                 if (nv_connector->type != DCB_CONNECTOR_eDP && nv_encoder->outp.info.dp.mst) {
1788                         ret = nv50_mstm_new(nv_encoder, &nv_connector->aux,
1789                                             16, nv_connector->base.base.id,
1790                                             &nv_encoder->dp.mstm);
1791                         if (ret)
1792                                 return ret;
1793                 }
1794         } else
1795         if (nv_encoder->outp.info.ddc != NVIF_OUTP_DDC_INVALID) {
1796                 struct nvkm_i2c_bus *bus =
1797                         nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1798                 if (bus)
1799                         nv_encoder->i2c = &bus->i2c;
1800         }
1801
1802         return 0;
1803 }
1804
1805 /******************************************************************************
1806  * PIOR
1807  *****************************************************************************/
1808 static int
1809 nv50_pior_atomic_check(struct drm_encoder *encoder,
1810                        struct drm_crtc_state *crtc_state,
1811                        struct drm_connector_state *conn_state)
1812 {
1813         int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1814         if (ret)
1815                 return ret;
1816         crtc_state->adjusted_mode.clock *= 2;
1817         return 0;
1818 }
1819
1820 static void
1821 nv50_pior_atomic_disable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1822 {
1823         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1824         struct nv50_core *core = nv50_disp(encoder->dev)->core;
1825         const u32 ctrl = NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, NONE);
1826
1827         core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL);
1828         nv_encoder->crtc = NULL;
1829 }
1830
1831 static void
1832 nv50_pior_atomic_enable(struct drm_encoder *encoder, struct drm_atomic_state *state)
1833 {
1834         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1835         struct nouveau_crtc *nv_crtc = nv50_outp_get_new_crtc(state, nv_encoder);
1836         struct nv50_head_atom *asyh =
1837                 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base));
1838         struct nv50_core *core = nv50_disp(encoder->dev)->core;
1839         u32 ctrl = 0;
1840
1841         switch (nv_crtc->index) {
1842         case 0: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD0); break;
1843         case 1: ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, OWNER, HEAD1); break;
1844         default:
1845                 WARN_ON(1);
1846                 break;
1847         }
1848
1849         switch (asyh->or.bpc) {
1850         case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break;
1851         case  8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break;
1852         case  6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break;
1853         default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break;
1854         }
1855
1856         if (!nvif_outp_acquired(&nv_encoder->outp))
1857                 nvif_outp_acquire_pior(&nv_encoder->outp);
1858
1859         switch (nv_encoder->dcb->type) {
1860         case DCB_OUTPUT_TMDS:
1861                 ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, PROTOCOL, EXT_TMDS_ENC);
1862                 break;
1863         case DCB_OUTPUT_DP:
1864                 ctrl |= NVDEF(NV507D, PIOR_SET_CONTROL, PROTOCOL, EXT_TMDS_ENC);
1865                 nouveau_dp_train(nv_encoder, false, asyh->state.adjusted_mode.clock, 6);
1866                 break;
1867         default:
1868                 BUG();
1869                 break;
1870         }
1871
1872         core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh);
1873         nv_encoder->crtc = &nv_crtc->base;
1874 }
1875
1876 static const struct drm_encoder_helper_funcs
1877 nv50_pior_help = {
1878         .atomic_check = nv50_pior_atomic_check,
1879         .atomic_enable = nv50_pior_atomic_enable,
1880         .atomic_disable = nv50_pior_atomic_disable,
1881 };
1882
1883 static void
1884 nv50_pior_destroy(struct drm_encoder *encoder)
1885 {
1886         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1887
1888         nvif_outp_dtor(&nv_encoder->outp);
1889
1890         drm_encoder_cleanup(encoder);
1891
1892         mutex_destroy(&nv_encoder->dp.hpd_irq_lock);
1893         kfree(encoder);
1894 }
1895
1896 static const struct drm_encoder_funcs
1897 nv50_pior_func = {
1898         .destroy = nv50_pior_destroy,
1899 };
1900
1901 static int
1902 nv50_pior_create(struct nouveau_encoder *nv_encoder)
1903 {
1904         struct drm_connector *connector = &nv_encoder->conn->base;
1905         struct drm_device *dev = connector->dev;
1906         struct nouveau_drm *drm = nouveau_drm(dev);
1907         struct nv50_disp *disp = nv50_disp(dev);
1908         struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1909         struct nvkm_i2c_bus *bus = NULL;
1910         struct nvkm_i2c_aux *aux = NULL;
1911         struct i2c_adapter *ddc;
1912         struct drm_encoder *encoder;
1913         struct dcb_output *dcbe = nv_encoder->dcb;
1914         int type;
1915
1916         switch (dcbe->type) {
1917         case DCB_OUTPUT_TMDS:
1918                 bus  = nvkm_i2c_bus_find(i2c, nv_encoder->outp.info.ddc);
1919                 ddc  = bus ? &bus->i2c : NULL;
1920                 type = DRM_MODE_ENCODER_TMDS;
1921                 break;
1922         case DCB_OUTPUT_DP:
1923                 aux  = nvkm_i2c_aux_find(i2c, nv_encoder->outp.info.dp.aux);
1924                 ddc  = aux ? &aux->i2c : NULL;
1925                 type = DRM_MODE_ENCODER_TMDS;
1926                 break;
1927         default:
1928                 return -ENODEV;
1929         }
1930
1931         nv_encoder->i2c = ddc;
1932
1933         mutex_init(&nv_encoder->dp.hpd_irq_lock);
1934
1935         encoder = to_drm_encoder(nv_encoder);
1936         drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
1937                          "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
1938         drm_encoder_helper_add(encoder, &nv50_pior_help);
1939
1940         drm_connector_attach_encoder(connector, encoder);
1941
1942         disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1943         nv50_outp_dump_caps(drm, nv_encoder);
1944
1945         return 0;
1946 }
1947
1948 /******************************************************************************
1949  * Atomic
1950  *****************************************************************************/
1951
1952 static void
1953 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock)
1954 {
1955         struct drm_dp_mst_topology_mgr *mgr;
1956         struct drm_dp_mst_topology_state *mst_state;
1957         struct nouveau_drm *drm = nouveau_drm(state->dev);
1958         struct nv50_disp *disp = nv50_disp(drm->dev);
1959         struct nv50_atom *atom = nv50_atom(state);
1960         struct nv50_core *core = disp->core;
1961         struct nv50_outp_atom *outp;
1962         struct nv50_mstm *mstm;
1963         int i;
1964
1965         NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
1966
1967         for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) {
1968                 mstm = nv50_mstm(mgr);
1969                 if (mstm->modified)
1970                         nv50_mstm_prepare(state, mst_state, mstm);
1971         }
1972
1973         core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
1974         core->func->update(core, interlock, true);
1975         if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
1976                                        disp->core->chan.base.device))
1977                 NV_ERROR(drm, "core notifier timeout\n");
1978
1979         for_each_new_mst_mgr_in_state(state, mgr, mst_state, i) {
1980                 mstm = nv50_mstm(mgr);
1981                 if (mstm->modified)
1982                         nv50_mstm_cleanup(state, mst_state, mstm);
1983         }
1984
1985         list_for_each_entry(outp, &atom->outp, head) {
1986                 if (outp->encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1987                         struct nouveau_encoder *nv_encoder = nouveau_encoder(outp->encoder);
1988
1989                         if (outp->enabled) {
1990                                 nv50_audio_enable(outp->encoder, nouveau_crtc(nv_encoder->crtc),
1991                                                   nv_encoder->conn, NULL, NULL);
1992                                 outp->enabled = outp->disabled = false;
1993                         } else {
1994                                 if (outp->disabled) {
1995                                         nvif_outp_release(&nv_encoder->outp);
1996                                         outp->disabled = false;
1997                                 }
1998                         }
1999                 }
2000         }
2001 }
2002
2003 static void
2004 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock)
2005 {
2006         struct drm_plane_state *new_plane_state;
2007         struct drm_plane *plane;
2008         int i;
2009
2010         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2011                 struct nv50_wndw *wndw = nv50_wndw(plane);
2012                 if (interlock[wndw->interlock.type] & wndw->interlock.data) {
2013                         if (wndw->func->update)
2014                                 wndw->func->update(wndw, interlock);
2015                 }
2016         }
2017 }
2018
2019 static void
2020 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
2021 {
2022         struct drm_device *dev = state->dev;
2023         struct drm_crtc_state *new_crtc_state, *old_crtc_state;
2024         struct drm_crtc *crtc;
2025         struct drm_plane_state *new_plane_state;
2026         struct drm_plane *plane;
2027         struct nouveau_drm *drm = nouveau_drm(dev);
2028         struct nv50_disp *disp = nv50_disp(dev);
2029         struct nv50_atom *atom = nv50_atom(state);
2030         struct nv50_core *core = disp->core;
2031         struct nv50_outp_atom *outp, *outt;
2032         u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
2033         int i;
2034         bool flushed = false;
2035
2036         NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
2037         nv50_crc_atomic_stop_reporting(state);
2038         drm_atomic_helper_wait_for_fences(dev, state, false);
2039         drm_atomic_helper_wait_for_dependencies(state);
2040         drm_dp_mst_atomic_wait_for_dependencies(state);
2041         drm_atomic_helper_update_legacy_modeset_state(dev, state);
2042         drm_atomic_helper_calc_timestamping_constants(state);
2043
2044         if (atom->lock_core)
2045                 mutex_lock(&disp->mutex);
2046
2047         /* Disable head(s). */
2048         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2049                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2050                 struct nv50_head *head = nv50_head(crtc);
2051
2052                 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
2053                           asyh->clr.mask, asyh->set.mask);
2054
2055                 if (old_crtc_state->active && !new_crtc_state->active) {
2056                         pm_runtime_put_noidle(dev->dev);
2057                         drm_crtc_vblank_off(crtc);
2058                 }
2059
2060                 if (asyh->clr.mask) {
2061                         nv50_head_flush_clr(head, asyh, atom->flush_disable);
2062                         interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2063                 }
2064         }
2065
2066         /* Disable plane(s). */
2067         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2068                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2069                 struct nv50_wndw *wndw = nv50_wndw(plane);
2070
2071                 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
2072                           asyw->clr.mask, asyw->set.mask);
2073                 if (!asyw->clr.mask)
2074                         continue;
2075
2076                 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
2077         }
2078
2079         /* Disable output path(s). */
2080         list_for_each_entry(outp, &atom->outp, head) {
2081                 const struct drm_encoder_helper_funcs *help;
2082                 struct drm_encoder *encoder;
2083
2084                 encoder = outp->encoder;
2085                 help = encoder->helper_private;
2086
2087                 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
2088                           outp->clr.mask, outp->set.mask);
2089
2090                 if (outp->clr.mask) {
2091                         help->atomic_disable(encoder, state);
2092                         outp->disabled = true;
2093                         interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
2094                 }
2095         }
2096
2097         /* Flush disable. */
2098         if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2099                 if (atom->flush_disable) {
2100                         nv50_disp_atomic_commit_wndw(state, interlock);
2101                         nv50_disp_atomic_commit_core(state, interlock);
2102                         memset(interlock, 0x00, sizeof(interlock));
2103
2104                         flushed = true;
2105                 }
2106         }
2107
2108         if (flushed)
2109                 nv50_crc_atomic_release_notifier_contexts(state);
2110         nv50_crc_atomic_init_notifier_contexts(state);
2111
2112         /* Update output path(s). */
2113         list_for_each_entry(outp, &atom->outp, head) {
2114                 const struct drm_encoder_helper_funcs *help;
2115                 struct drm_encoder *encoder;
2116
2117                 encoder = outp->encoder;
2118                 help = encoder->helper_private;
2119
2120                 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
2121                           outp->set.mask, outp->clr.mask);
2122
2123                 if (outp->set.mask) {
2124                         help->atomic_enable(encoder, state);
2125                         outp->enabled = true;
2126                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2127                 }
2128         }
2129
2130         /* Update head(s). */
2131         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2132                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2133                 struct nv50_head *head = nv50_head(crtc);
2134
2135                 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2136                           asyh->set.mask, asyh->clr.mask);
2137
2138                 if (asyh->set.mask) {
2139                         nv50_head_flush_set(head, asyh);
2140                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2141                 }
2142
2143                 if (new_crtc_state->active) {
2144                         if (!old_crtc_state->active) {
2145                                 drm_crtc_vblank_on(crtc);
2146                                 pm_runtime_get_noresume(dev->dev);
2147                         }
2148                         if (new_crtc_state->event)
2149                                 drm_crtc_vblank_get(crtc);
2150                 }
2151         }
2152
2153         /* Update window->head assignment.
2154          *
2155          * This has to happen in an update that's not interlocked with
2156          * any window channels to avoid hitting HW error checks.
2157          *
2158          *TODO: Proper handling of window ownership (Turing apparently
2159          *      supports non-fixed mappings).
2160          */
2161         if (core->assign_windows) {
2162                 core->func->wndw.owner(core);
2163                 nv50_disp_atomic_commit_core(state, interlock);
2164                 core->assign_windows = false;
2165                 interlock[NV50_DISP_INTERLOCK_CORE] = 0;
2166         }
2167
2168         /* Finish updating head(s)...
2169          *
2170          * NVD is rather picky about both where window assignments can change,
2171          * *and* about certain core and window channel states matching.
2172          *
2173          * The EFI GOP driver on newer GPUs configures window channels with a
2174          * different output format to what we do, and the core channel update
2175          * in the assign_windows case above would result in a state mismatch.
2176          *
2177          * Delay some of the head update until after that point to workaround
2178          * the issue.  This only affects the initial modeset.
2179          *
2180          * TODO: handle this better when adding flexible window mapping
2181          */
2182         for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2183                 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2184                 struct nv50_head *head = nv50_head(crtc);
2185
2186                 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2187                           asyh->set.mask, asyh->clr.mask);
2188
2189                 if (asyh->set.mask) {
2190                         nv50_head_flush_set_wndw(head, asyh);
2191                         interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2192                 }
2193         }
2194
2195         /* Update plane(s). */
2196         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2197                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2198                 struct nv50_wndw *wndw = nv50_wndw(plane);
2199
2200                 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
2201                           asyw->set.mask, asyw->clr.mask);
2202                 if ( !asyw->set.mask &&
2203                     (!asyw->clr.mask || atom->flush_disable))
2204                         continue;
2205
2206                 nv50_wndw_flush_set(wndw, interlock, asyw);
2207         }
2208
2209         /* Flush update. */
2210         nv50_disp_atomic_commit_wndw(state, interlock);
2211
2212         if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2213                 if (interlock[NV50_DISP_INTERLOCK_BASE] ||
2214                     interlock[NV50_DISP_INTERLOCK_OVLY] ||
2215                     interlock[NV50_DISP_INTERLOCK_WNDW] ||
2216                     !atom->state.legacy_cursor_update)
2217                         nv50_disp_atomic_commit_core(state, interlock);
2218                 else
2219                         disp->core->func->update(disp->core, interlock, false);
2220         }
2221
2222         if (atom->lock_core)
2223                 mutex_unlock(&disp->mutex);
2224
2225         list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2226                 list_del(&outp->head);
2227                 kfree(outp);
2228         }
2229
2230         /* Wait for HW to signal completion. */
2231         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2232                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2233                 struct nv50_wndw *wndw = nv50_wndw(plane);
2234                 int ret = nv50_wndw_wait_armed(wndw, asyw);
2235                 if (ret)
2236                         NV_ERROR(drm, "%s: timeout\n", plane->name);
2237         }
2238
2239         for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2240                 if (new_crtc_state->event) {
2241                         unsigned long flags;
2242                         /* Get correct count/ts if racing with vblank irq */
2243                         if (new_crtc_state->active)
2244                                 drm_crtc_accurate_vblank_count(crtc);
2245                         spin_lock_irqsave(&crtc->dev->event_lock, flags);
2246                         drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
2247                         spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
2248
2249                         new_crtc_state->event = NULL;
2250                         if (new_crtc_state->active)
2251                                 drm_crtc_vblank_put(crtc);
2252                 }
2253         }
2254
2255         nv50_crc_atomic_start_reporting(state);
2256         if (!flushed)
2257                 nv50_crc_atomic_release_notifier_contexts(state);
2258
2259         drm_atomic_helper_commit_hw_done(state);
2260         drm_atomic_helper_cleanup_planes(dev, state);
2261         drm_atomic_helper_commit_cleanup_done(state);
2262         drm_atomic_state_put(state);
2263
2264         /* Drop the RPM ref we got from nv50_disp_atomic_commit() */
2265         pm_runtime_mark_last_busy(dev->dev);
2266         pm_runtime_put_autosuspend(dev->dev);
2267 }
2268
2269 static void
2270 nv50_disp_atomic_commit_work(struct work_struct *work)
2271 {
2272         struct drm_atomic_state *state =
2273                 container_of(work, typeof(*state), commit_work);
2274         nv50_disp_atomic_commit_tail(state);
2275 }
2276
2277 static int
2278 nv50_disp_atomic_commit(struct drm_device *dev,
2279                         struct drm_atomic_state *state, bool nonblock)
2280 {
2281         struct drm_plane_state *new_plane_state;
2282         struct drm_plane *plane;
2283         int ret, i;
2284
2285         ret = pm_runtime_get_sync(dev->dev);
2286         if (ret < 0 && ret != -EACCES) {
2287                 pm_runtime_put_autosuspend(dev->dev);
2288                 return ret;
2289         }
2290
2291         ret = drm_atomic_helper_setup_commit(state, nonblock);
2292         if (ret)
2293                 goto done;
2294
2295         INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
2296
2297         ret = drm_atomic_helper_prepare_planes(dev, state);
2298         if (ret)
2299                 goto done;
2300
2301         if (!nonblock) {
2302                 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
2303                 if (ret)
2304                         goto err_cleanup;
2305         }
2306
2307         ret = drm_atomic_helper_swap_state(state, true);
2308         if (ret)
2309                 goto err_cleanup;
2310
2311         for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2312                 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2313                 struct nv50_wndw *wndw = nv50_wndw(plane);
2314
2315                 if (asyw->set.image)
2316                         nv50_wndw_ntfy_enable(wndw, asyw);
2317         }
2318
2319         drm_atomic_state_get(state);
2320
2321         /*
2322          * Grab another RPM ref for the commit tail, which will release the
2323          * ref when it's finished
2324          */
2325         pm_runtime_get_noresume(dev->dev);
2326
2327         if (nonblock)
2328                 queue_work(system_unbound_wq, &state->commit_work);
2329         else
2330                 nv50_disp_atomic_commit_tail(state);
2331
2332 err_cleanup:
2333         if (ret)
2334                 drm_atomic_helper_cleanup_planes(dev, state);
2335 done:
2336         pm_runtime_put_autosuspend(dev->dev);
2337         return ret;
2338 }
2339
2340 static struct nv50_outp_atom *
2341 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
2342 {
2343         struct nv50_outp_atom *outp;
2344
2345         list_for_each_entry(outp, &atom->outp, head) {
2346                 if (outp->encoder == encoder)
2347                         return outp;
2348         }
2349
2350         outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2351         if (!outp)
2352                 return ERR_PTR(-ENOMEM);
2353
2354         list_add(&outp->head, &atom->outp);
2355         outp->encoder = encoder;
2356         return outp;
2357 }
2358
2359 static int
2360 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
2361                                 struct drm_connector_state *old_connector_state)
2362 {
2363         struct drm_encoder *encoder = old_connector_state->best_encoder;
2364         struct drm_crtc_state *old_crtc_state, *new_crtc_state;
2365         struct drm_crtc *crtc;
2366         struct nv50_outp_atom *outp;
2367
2368         if (!(crtc = old_connector_state->crtc))
2369                 return 0;
2370
2371         old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
2372         new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2373         if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2374                 outp = nv50_disp_outp_atomic_add(atom, encoder);
2375                 if (IS_ERR(outp))
2376                         return PTR_ERR(outp);
2377
2378                 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST ||
2379                     nouveau_encoder(outp->encoder)->dcb->type == DCB_OUTPUT_DP)
2380                         atom->flush_disable = true;
2381                 outp->clr.ctrl = true;
2382                 atom->lock_core = true;
2383         }
2384
2385         return 0;
2386 }
2387
2388 static int
2389 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
2390                                 struct drm_connector_state *connector_state)
2391 {
2392         struct drm_encoder *encoder = connector_state->best_encoder;
2393         struct drm_crtc_state *new_crtc_state;
2394         struct drm_crtc *crtc;
2395         struct nv50_outp_atom *outp;
2396
2397         if (!(crtc = connector_state->crtc))
2398                 return 0;
2399
2400         new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2401         if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2402                 outp = nv50_disp_outp_atomic_add(atom, encoder);
2403                 if (IS_ERR(outp))
2404                         return PTR_ERR(outp);
2405
2406                 outp->set.ctrl = true;
2407                 atom->lock_core = true;
2408         }
2409
2410         return 0;
2411 }
2412
2413 static int
2414 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
2415 {
2416         struct nv50_atom *atom = nv50_atom(state);
2417         struct nv50_core *core = nv50_disp(dev)->core;
2418         struct drm_connector_state *old_connector_state, *new_connector_state;
2419         struct drm_connector *connector;
2420         struct drm_crtc_state *new_crtc_state;
2421         struct drm_crtc *crtc;
2422         struct nv50_head *head;
2423         struct nv50_head_atom *asyh;
2424         int ret, i;
2425
2426         if (core->assign_windows && core->func->head->static_wndw_map) {
2427                 drm_for_each_crtc(crtc, dev) {
2428                         new_crtc_state = drm_atomic_get_crtc_state(state,
2429                                                                    crtc);
2430                         if (IS_ERR(new_crtc_state))
2431                                 return PTR_ERR(new_crtc_state);
2432
2433                         head = nv50_head(crtc);
2434                         asyh = nv50_head_atom(new_crtc_state);
2435                         core->func->head->static_wndw_map(head, asyh);
2436                 }
2437         }
2438
2439         /* We need to handle colour management on a per-plane basis. */
2440         for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2441                 if (new_crtc_state->color_mgmt_changed) {
2442                         ret = drm_atomic_add_affected_planes(state, crtc);
2443                         if (ret)
2444                                 return ret;
2445                 }
2446         }
2447
2448         ret = drm_atomic_helper_check(dev, state);
2449         if (ret)
2450                 return ret;
2451
2452         for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
2453                 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
2454                 if (ret)
2455                         return ret;
2456
2457                 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2458                 if (ret)
2459                         return ret;
2460         }
2461
2462         ret = drm_dp_mst_atomic_check(state);
2463         if (ret)
2464                 return ret;
2465
2466         nv50_crc_atomic_check_outp(atom);
2467
2468         return 0;
2469 }
2470
2471 static void
2472 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2473 {
2474         struct nv50_atom *atom = nv50_atom(state);
2475         struct nv50_outp_atom *outp, *outt;
2476
2477         list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2478                 list_del(&outp->head);
2479                 kfree(outp);
2480         }
2481
2482         drm_atomic_state_default_clear(state);
2483 }
2484
2485 static void
2486 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2487 {
2488         struct nv50_atom *atom = nv50_atom(state);
2489         drm_atomic_state_default_release(&atom->state);
2490         kfree(atom);
2491 }
2492
2493 static struct drm_atomic_state *
2494 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2495 {
2496         struct nv50_atom *atom;
2497         if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2498             drm_atomic_state_init(dev, &atom->state) < 0) {
2499                 kfree(atom);
2500                 return NULL;
2501         }
2502         INIT_LIST_HEAD(&atom->outp);
2503         return &atom->state;
2504 }
2505
2506 static const struct drm_mode_config_funcs
2507 nv50_disp_func = {
2508         .fb_create = nouveau_user_framebuffer_create,
2509         .output_poll_changed = drm_fb_helper_output_poll_changed,
2510         .atomic_check = nv50_disp_atomic_check,
2511         .atomic_commit = nv50_disp_atomic_commit,
2512         .atomic_state_alloc = nv50_disp_atomic_state_alloc,
2513         .atomic_state_clear = nv50_disp_atomic_state_clear,
2514         .atomic_state_free = nv50_disp_atomic_state_free,
2515 };
2516
2517 static const struct drm_mode_config_helper_funcs
2518 nv50_disp_helper_func = {
2519         .atomic_commit_setup = drm_dp_mst_atomic_setup_commit,
2520 };
2521
2522 /******************************************************************************
2523  * Init
2524  *****************************************************************************/
2525
2526 static void
2527 nv50_display_fini(struct drm_device *dev, bool runtime, bool suspend)
2528 {
2529         struct nouveau_drm *drm = nouveau_drm(dev);
2530         struct drm_encoder *encoder;
2531
2532         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2533                 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST)
2534                         nv50_mstm_fini(nouveau_encoder(encoder));
2535         }
2536
2537         if (!runtime)
2538                 cancel_work_sync(&drm->hpd_work);
2539 }
2540
2541 static inline void
2542 nv50_display_read_hw_or_state(struct drm_device *dev, struct nv50_disp *disp,
2543                               struct nouveau_encoder *outp)
2544 {
2545         struct drm_crtc *crtc;
2546         struct drm_connector_list_iter conn_iter;
2547         struct drm_connector *conn;
2548         struct nv50_head_atom *armh;
2549         const u32 encoder_mask = drm_encoder_mask(&outp->base.base);
2550         bool found_conn = false, found_head = false;
2551         u8 proto;
2552         int head_idx;
2553         int ret;
2554
2555         switch (outp->dcb->type) {
2556         case DCB_OUTPUT_TMDS:
2557                 ret = nvif_outp_inherit_tmds(&outp->outp, &proto);
2558                 break;
2559         case DCB_OUTPUT_DP:
2560                 ret = nvif_outp_inherit_dp(&outp->outp, &proto);
2561                 break;
2562         case DCB_OUTPUT_LVDS:
2563                 ret = nvif_outp_inherit_lvds(&outp->outp, &proto);
2564                 break;
2565         case DCB_OUTPUT_ANALOG:
2566                 ret = nvif_outp_inherit_rgb_crt(&outp->outp, &proto);
2567                 break;
2568         default:
2569                 drm_dbg_kms(dev, "Readback for %s not implemented yet, skipping\n",
2570                             outp->base.base.name);
2571                 drm_WARN_ON(dev, true);
2572                 return;
2573         }
2574
2575         if (ret < 0)
2576                 return;
2577
2578         head_idx = ret;
2579
2580         drm_for_each_crtc(crtc, dev) {
2581                 if (crtc->index != head_idx)
2582                         continue;
2583
2584                 armh = nv50_head_atom(crtc->state);
2585                 found_head = true;
2586                 break;
2587         }
2588         if (drm_WARN_ON(dev, !found_head))
2589                 return;
2590
2591         /* Figure out which connector is being used by this encoder */
2592         drm_connector_list_iter_begin(dev, &conn_iter);
2593         nouveau_for_each_non_mst_connector_iter(conn, &conn_iter) {
2594                 if (nouveau_connector(conn)->index == outp->dcb->connector) {
2595                         found_conn = true;
2596                         break;
2597                 }
2598         }
2599         drm_connector_list_iter_end(&conn_iter);
2600         if (drm_WARN_ON(dev, !found_conn))
2601                 return;
2602
2603         armh->state.encoder_mask = encoder_mask;
2604         armh->state.connector_mask = drm_connector_mask(conn);
2605         armh->state.active = true;
2606         armh->state.enable = true;
2607         pm_runtime_get_noresume(dev->dev);
2608
2609         outp->crtc = crtc;
2610         outp->ctrl = NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto) | BIT(crtc->index);
2611
2612         drm_connector_get(conn);
2613         conn->state->crtc = crtc;
2614         conn->state->best_encoder = &outp->base.base;
2615 }
2616
2617 /* Read back the currently programmed display state */
2618 static void
2619 nv50_display_read_hw_state(struct nouveau_drm *drm)
2620 {
2621         struct drm_device *dev = drm->dev;
2622         struct drm_encoder *encoder;
2623         struct drm_modeset_acquire_ctx ctx;
2624         struct nv50_disp *disp = nv50_disp(dev);
2625         int ret;
2626
2627         DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, ret);
2628
2629         drm_for_each_encoder(encoder, dev) {
2630                 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST)
2631                         continue;
2632
2633                 nv50_display_read_hw_or_state(dev, disp, nouveau_encoder(encoder));
2634         }
2635
2636         DRM_MODESET_LOCK_ALL_END(dev, ctx, ret);
2637 }
2638
2639 static int
2640 nv50_display_init(struct drm_device *dev, bool resume, bool runtime)
2641 {
2642         struct nv50_core *core = nv50_disp(dev)->core;
2643         struct drm_encoder *encoder;
2644
2645         if (resume || runtime)
2646                 core->func->init(core);
2647
2648         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2649                 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2650                         struct nouveau_encoder *nv_encoder =
2651                                 nouveau_encoder(encoder);
2652                         nv50_mstm_init(nv_encoder, runtime);
2653                 }
2654         }
2655
2656         if (!resume)
2657                 nv50_display_read_hw_state(nouveau_drm(dev));
2658
2659         return 0;
2660 }
2661
2662 static void
2663 nv50_display_destroy(struct drm_device *dev)
2664 {
2665         struct nv50_disp *disp = nv50_disp(dev);
2666
2667         nv50_audio_component_fini(nouveau_drm(dev));
2668
2669         nvif_object_unmap(&disp->caps);
2670         nvif_object_dtor(&disp->caps);
2671         nv50_core_del(&disp->core);
2672
2673         nouveau_bo_unmap(disp->sync);
2674         if (disp->sync)
2675                 nouveau_bo_unpin(disp->sync);
2676         nouveau_bo_ref(NULL, &disp->sync);
2677
2678         nouveau_display(dev)->priv = NULL;
2679         kfree(disp);
2680 }
2681
2682 int
2683 nv50_display_create(struct drm_device *dev)
2684 {
2685         struct nouveau_drm *drm = nouveau_drm(dev);
2686         struct drm_connector *connector, *tmp;
2687         struct nv50_disp *disp;
2688         int ret, i;
2689         bool has_mst = false;
2690
2691         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2692         if (!disp)
2693                 return -ENOMEM;
2694
2695         mutex_init(&disp->mutex);
2696
2697         nouveau_display(dev)->priv = disp;
2698         nouveau_display(dev)->dtor = nv50_display_destroy;
2699         nouveau_display(dev)->init = nv50_display_init;
2700         nouveau_display(dev)->fini = nv50_display_fini;
2701         disp->disp = &nouveau_display(dev)->disp;
2702         dev->mode_config.funcs = &nv50_disp_func;
2703         dev->mode_config.helper_private = &nv50_disp_helper_func;
2704         dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true;
2705         dev->mode_config.normalize_zpos = true;
2706
2707         /* small shared memory area we use for notifiers and semaphores */
2708         ret = nouveau_bo_new(&drm->client, 4096, 0x1000,
2709                              NOUVEAU_GEM_DOMAIN_VRAM,
2710                              0, 0x0000, NULL, NULL, &disp->sync);
2711         if (!ret) {
2712                 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true);
2713                 if (!ret) {
2714                         ret = nouveau_bo_map(disp->sync);
2715                         if (ret)
2716                                 nouveau_bo_unpin(disp->sync);
2717                 }
2718                 if (ret)
2719                         nouveau_bo_ref(NULL, &disp->sync);
2720         }
2721
2722         if (ret)
2723                 goto out;
2724
2725         /* allocate master evo channel */
2726         ret = nv50_core_new(drm, &disp->core);
2727         if (ret)
2728                 goto out;
2729
2730         disp->core->func->init(disp->core);
2731         if (disp->core->func->caps_init) {
2732                 ret = disp->core->func->caps_init(drm, disp);
2733                 if (ret)
2734                         goto out;
2735         }
2736
2737         /* Assign the correct format modifiers */
2738         if (disp->disp->object.oclass >= TU102_DISP)
2739                 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers;
2740         else
2741         if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI)
2742                 nouveau_display(dev)->format_modifiers = disp90xx_modifiers;
2743         else
2744                 nouveau_display(dev)->format_modifiers = disp50xx_modifiers;
2745
2746         /* FIXME: 256x256 cursors are supported on Kepler, however unlike Maxwell and later
2747          * generations Kepler requires that we use small pages (4K) for cursor scanout surfaces. The
2748          * proper fix for this is to teach nouveau to migrate fbs being used for the cursor plane to
2749          * small page allocations in prepare_fb(). When this is implemented, we should also force
2750          * large pages (128K) for ovly fbs in order to fix Kepler ovlys.
2751          * But until then, just limit cursors to 128x128 - which is small enough to avoid ever using
2752          * large pages.
2753          */
2754         if (disp->disp->object.oclass >= GM107_DISP) {
2755                 dev->mode_config.cursor_width = 256;
2756                 dev->mode_config.cursor_height = 256;
2757         } else if (disp->disp->object.oclass >= GK104_DISP) {
2758                 dev->mode_config.cursor_width = 128;
2759                 dev->mode_config.cursor_height = 128;
2760         } else {
2761                 dev->mode_config.cursor_width = 64;
2762                 dev->mode_config.cursor_height = 64;
2763         }
2764
2765         /* create encoder/connector objects based on VBIOS DCB table */
2766         for_each_set_bit(i, &disp->disp->outp_mask, sizeof(disp->disp->outp_mask) * 8) {
2767                 struct nouveau_encoder *outp;
2768
2769                 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2770                 if (!outp)
2771                         break;
2772
2773                 ret = nvif_outp_ctor(disp->disp, "kmsOutp", i, &outp->outp);
2774                 if (ret) {
2775                         kfree(outp);
2776                         continue;
2777                 }
2778
2779                 connector = nouveau_connector_create(dev, outp->outp.info.conn);
2780                 if (IS_ERR(connector)) {
2781                         nvif_outp_dtor(&outp->outp);
2782                         kfree(outp);
2783                         continue;
2784                 }
2785
2786                 outp->base.base.possible_crtcs = outp->outp.info.heads;
2787                 outp->base.base.possible_clones = 0;
2788                 outp->conn = nouveau_connector(connector);
2789
2790                 outp->dcb = kzalloc(sizeof(*outp->dcb), GFP_KERNEL);
2791                 if (!outp->dcb)
2792                         break;
2793
2794                 switch (outp->outp.info.proto) {
2795                 case NVIF_OUTP_RGB_CRT:
2796                         outp->dcb->type = DCB_OUTPUT_ANALOG;
2797                         outp->dcb->crtconf.maxfreq = outp->outp.info.rgb_crt.freq_max;
2798                         break;
2799                 case NVIF_OUTP_TMDS:
2800                         outp->dcb->type = DCB_OUTPUT_TMDS;
2801                         outp->dcb->duallink_possible = outp->outp.info.tmds.dual;
2802                         break;
2803                 case NVIF_OUTP_LVDS:
2804                         outp->dcb->type = DCB_OUTPUT_LVDS;
2805                         outp->dcb->lvdsconf.use_acpi_for_edid = outp->outp.info.lvds.acpi_edid;
2806                         break;
2807                 case NVIF_OUTP_DP:
2808                         outp->dcb->type = DCB_OUTPUT_DP;
2809                         outp->dcb->dpconf.link_nr = outp->outp.info.dp.link_nr;
2810                         outp->dcb->dpconf.link_bw = outp->outp.info.dp.link_bw;
2811                         if (outp->outp.info.dp.mst)
2812                                 has_mst = true;
2813                         break;
2814                 default:
2815                         WARN_ON(1);
2816                         continue;
2817                 }
2818
2819                 outp->dcb->heads = outp->outp.info.heads;
2820                 outp->dcb->connector = outp->outp.info.conn;
2821                 outp->dcb->i2c_index = outp->outp.info.ddc;
2822
2823                 switch (outp->outp.info.type) {
2824                 case NVIF_OUTP_DAC : ret = nv50_dac_create(outp); break;
2825                 case NVIF_OUTP_SOR : ret = nv50_sor_create(outp); break;
2826                 case NVIF_OUTP_PIOR: ret = nv50_pior_create(outp); break;
2827                 default:
2828                         WARN_ON(1);
2829                         continue;
2830                 }
2831
2832                 if (ret) {
2833                         NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2834                                 i, outp->outp.info.type, outp->outp.info.proto, ret);
2835                 }
2836         }
2837
2838         /* cull any connectors we created that don't have an encoder */
2839         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2840                 if (connector->possible_encoders)
2841                         continue;
2842
2843                 NV_WARN(drm, "%s has no encoders, removing\n",
2844                         connector->name);
2845                 connector->funcs->destroy(connector);
2846         }
2847
2848         /* create crtc objects to represent the hw heads */
2849         for_each_set_bit(i, &disp->disp->head_mask, sizeof(disp->disp->head_mask) * 8) {
2850                 struct nv50_head *head;
2851
2852                 head = nv50_head_create(dev, i);
2853                 if (IS_ERR(head)) {
2854                         ret = PTR_ERR(head);
2855                         goto out;
2856                 }
2857
2858                 if (has_mst) {
2859                         head->msto = nv50_msto_new(dev, head, i);
2860                         if (IS_ERR(head->msto)) {
2861                                 ret = PTR_ERR(head->msto);
2862                                 head->msto = NULL;
2863                                 goto out;
2864                         }
2865
2866                         /*
2867                          * FIXME: This is a hack to workaround the following
2868                          * issues:
2869                          *
2870                          * https://gitlab.gnome.org/GNOME/mutter/issues/759
2871                          * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277
2872                          *
2873                          * Once these issues are closed, this should be
2874                          * removed
2875                          */
2876                         head->msto->encoder.possible_crtcs = disp->disp->head_mask;
2877                 }
2878         }
2879
2880         /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */
2881         dev->vblank_disable_immediate = true;
2882
2883         nv50_audio_component_init(drm);
2884
2885 out:
2886         if (ret)
2887                 nv50_display_destroy(dev);
2888         return ret;
2889 }
2890
2891 /******************************************************************************
2892  * Format modifiers
2893  *****************************************************************************/
2894
2895 /****************************************************************
2896  *            Log2(block height) ----------------------------+  *
2897  *            Page Kind ----------------------------------+  |  *
2898  *            Gob Height/Page Kind Generation ------+     |  |  *
2899  *                          Sector layout -------+  |     |  |  *
2900  *                          Compression ------+  |  |     |  |  */
2901 const u64 disp50xx_modifiers[] = { /*         |  |  |     |  |  */
2902         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 0),
2903         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 1),
2904         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 2),
2905         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 3),
2906         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 4),
2907         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 5),
2908         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 0),
2909         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 1),
2910         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 2),
2911         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 3),
2912         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 4),
2913         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 5),
2914         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 0),
2915         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 1),
2916         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 2),
2917         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 3),
2918         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 4),
2919         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 5),
2920         DRM_FORMAT_MOD_LINEAR,
2921         DRM_FORMAT_MOD_INVALID
2922 };
2923
2924 /****************************************************************
2925  *            Log2(block height) ----------------------------+  *
2926  *            Page Kind ----------------------------------+  |  *
2927  *            Gob Height/Page Kind Generation ------+     |  |  *
2928  *                          Sector layout -------+  |     |  |  *
2929  *                          Compression ------+  |  |     |  |  */
2930 const u64 disp90xx_modifiers[] = { /*         |  |  |     |  |  */
2931         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 0),
2932         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 1),
2933         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 2),
2934         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 3),
2935         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 4),
2936         DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 5),
2937         DRM_FORMAT_MOD_LINEAR,
2938         DRM_FORMAT_MOD_INVALID
2939 };