dt-bindings: reset: imx7: Fix the spelling of 'indices'
[sfrench/cifs-2.6.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_encoder_phys_cmd.c
1 /*
2  * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 and
6  * only version 2 as published by the Free Software Foundation.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  *
13  */
14
15 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
16 #include "dpu_encoder_phys.h"
17 #include "dpu_hw_interrupts.h"
18 #include "dpu_core_irq.h"
19 #include "dpu_formats.h"
20 #include "dpu_trace.h"
21
22 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
23                 (e) && (e)->base.parent ? \
24                 (e)->base.parent->base.id : -1, \
25                 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
26
27 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
28                 (e) && (e)->base.parent ? \
29                 (e)->base.parent->base.id : -1, \
30                 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
31
32 #define to_dpu_encoder_phys_cmd(x) \
33         container_of(x, struct dpu_encoder_phys_cmd, base)
34
35 #define PP_TIMEOUT_MAX_TRIALS   10
36
37 /*
38  * Tearcheck sync start and continue thresholds are empirically found
39  * based on common panels In the future, may want to allow panels to override
40  * these default values
41  */
42 #define DEFAULT_TEARCHECK_SYNC_THRESH_START     4
43 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE  4
44
45 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
46
47 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
48 {
49         return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
50 }
51
52 static bool dpu_encoder_phys_cmd_mode_fixup(
53                 struct dpu_encoder_phys *phys_enc,
54                 const struct drm_display_mode *mode,
55                 struct drm_display_mode *adj_mode)
56 {
57         if (phys_enc)
58                 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
59         return true;
60 }
61
62 static void _dpu_encoder_phys_cmd_update_intf_cfg(
63                 struct dpu_encoder_phys *phys_enc)
64 {
65         struct dpu_encoder_phys_cmd *cmd_enc =
66                         to_dpu_encoder_phys_cmd(phys_enc);
67         struct dpu_hw_ctl *ctl;
68         struct dpu_hw_intf_cfg intf_cfg = { 0 };
69
70         if (!phys_enc)
71                 return;
72
73         ctl = phys_enc->hw_ctl;
74         if (!ctl || !ctl->ops.setup_intf_cfg)
75                 return;
76
77         intf_cfg.intf = phys_enc->intf_idx;
78         intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
79         intf_cfg.stream_sel = cmd_enc->stream_sel;
80         intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
81         ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
82 }
83
84 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
85 {
86         struct dpu_encoder_phys *phys_enc = arg;
87         unsigned long lock_flags;
88         int new_cnt;
89         u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
90
91         if (!phys_enc || !phys_enc->hw_pp)
92                 return;
93
94         DPU_ATRACE_BEGIN("pp_done_irq");
95         /* notify all synchronous clients first, then asynchronous clients */
96         if (phys_enc->parent_ops->handle_frame_done)
97                 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
98                                 phys_enc, event);
99
100         spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
101         new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
102         spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
103
104         trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
105                                           phys_enc->hw_pp->idx - PINGPONG_0,
106                                           new_cnt, event);
107
108         /* Signal any waiting atomic commit thread */
109         wake_up_all(&phys_enc->pending_kickoff_wq);
110         DPU_ATRACE_END("pp_done_irq");
111 }
112
113 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
114 {
115         struct dpu_encoder_phys *phys_enc = arg;
116         struct dpu_encoder_phys_cmd *cmd_enc;
117
118         if (!phys_enc || !phys_enc->hw_pp)
119                 return;
120
121         DPU_ATRACE_BEGIN("rd_ptr_irq");
122         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
123
124         if (phys_enc->parent_ops->handle_vblank_virt)
125                 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
126                         phys_enc);
127
128         atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
129         wake_up_all(&cmd_enc->pending_vblank_wq);
130         DPU_ATRACE_END("rd_ptr_irq");
131 }
132
133 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
134 {
135         struct dpu_encoder_phys *phys_enc = arg;
136         struct dpu_encoder_phys_cmd *cmd_enc;
137
138         if (!phys_enc || !phys_enc->hw_ctl)
139                 return;
140
141         DPU_ATRACE_BEGIN("ctl_start_irq");
142         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
143
144         atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
145
146         /* Signal any waiting ctl start interrupt */
147         wake_up_all(&phys_enc->pending_kickoff_wq);
148         DPU_ATRACE_END("ctl_start_irq");
149 }
150
151 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
152 {
153         struct dpu_encoder_phys *phys_enc = arg;
154
155         if (!phys_enc)
156                 return;
157
158         if (phys_enc->parent_ops->handle_underrun_virt)
159                 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
160                         phys_enc);
161 }
162
163 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
164                 struct dpu_encoder_phys *phys_enc)
165 {
166         struct dpu_encoder_irq *irq;
167
168         irq = &phys_enc->irq[INTR_IDX_CTL_START];
169         irq->hw_idx = phys_enc->hw_ctl->idx;
170         irq->irq_idx = -EINVAL;
171
172         irq = &phys_enc->irq[INTR_IDX_PINGPONG];
173         irq->hw_idx = phys_enc->hw_pp->idx;
174         irq->irq_idx = -EINVAL;
175
176         irq = &phys_enc->irq[INTR_IDX_RDPTR];
177         irq->hw_idx = phys_enc->hw_pp->idx;
178         irq->irq_idx = -EINVAL;
179
180         irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
181         irq->hw_idx = phys_enc->intf_idx;
182         irq->irq_idx = -EINVAL;
183 }
184
185 static void dpu_encoder_phys_cmd_mode_set(
186                 struct dpu_encoder_phys *phys_enc,
187                 struct drm_display_mode *mode,
188                 struct drm_display_mode *adj_mode)
189 {
190         struct dpu_encoder_phys_cmd *cmd_enc =
191                 to_dpu_encoder_phys_cmd(phys_enc);
192
193         if (!phys_enc || !mode || !adj_mode) {
194                 DPU_ERROR("invalid args\n");
195                 return;
196         }
197         phys_enc->cached_mode = *adj_mode;
198         DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
199         drm_mode_debug_printmodeline(adj_mode);
200
201         _dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
202 }
203
204 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
205                 struct dpu_encoder_phys *phys_enc)
206 {
207         struct dpu_encoder_phys_cmd *cmd_enc =
208                         to_dpu_encoder_phys_cmd(phys_enc);
209         u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
210         bool do_log = false;
211
212         if (!phys_enc || !phys_enc->hw_pp || !phys_enc->hw_ctl)
213                 return -EINVAL;
214
215         cmd_enc->pp_timeout_report_cnt++;
216         if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
217                 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
218                 do_log = true;
219         } else if (cmd_enc->pp_timeout_report_cnt == 1) {
220                 do_log = true;
221         }
222
223         trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
224                      phys_enc->hw_pp->idx - PINGPONG_0,
225                      cmd_enc->pp_timeout_report_cnt,
226                      atomic_read(&phys_enc->pending_kickoff_cnt),
227                      frame_event);
228
229         /* to avoid flooding, only log first time, and "dead" time */
230         if (do_log) {
231                 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
232                           DRMID(phys_enc->parent),
233                           phys_enc->hw_pp->idx - PINGPONG_0,
234                           phys_enc->hw_ctl->idx - CTL_0,
235                           cmd_enc->pp_timeout_report_cnt,
236                           atomic_read(&phys_enc->pending_kickoff_cnt));
237
238                 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
239         }
240
241         atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
242
243         /* request a ctl reset before the next kickoff */
244         phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
245
246         if (phys_enc->parent_ops->handle_frame_done)
247                 phys_enc->parent_ops->handle_frame_done(
248                                 phys_enc->parent, phys_enc, frame_event);
249
250         return -ETIMEDOUT;
251 }
252
253 static int _dpu_encoder_phys_cmd_wait_for_idle(
254                 struct dpu_encoder_phys *phys_enc)
255 {
256         struct dpu_encoder_phys_cmd *cmd_enc =
257                         to_dpu_encoder_phys_cmd(phys_enc);
258         struct dpu_encoder_wait_info wait_info;
259         int ret;
260
261         if (!phys_enc) {
262                 DPU_ERROR("invalid encoder\n");
263                 return -EINVAL;
264         }
265
266         wait_info.wq = &phys_enc->pending_kickoff_wq;
267         wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
268         wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
269
270         ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
271                         &wait_info);
272         if (ret == -ETIMEDOUT)
273                 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
274         else if (!ret)
275                 cmd_enc->pp_timeout_report_cnt = 0;
276
277         return ret;
278 }
279
280 static int dpu_encoder_phys_cmd_control_vblank_irq(
281                 struct dpu_encoder_phys *phys_enc,
282                 bool enable)
283 {
284         int ret = 0;
285         int refcount;
286
287         if (!phys_enc || !phys_enc->hw_pp) {
288                 DPU_ERROR("invalid encoder\n");
289                 return -EINVAL;
290         }
291
292         refcount = atomic_read(&phys_enc->vblank_refcount);
293
294         /* Slave encoders don't report vblank */
295         if (!dpu_encoder_phys_cmd_is_master(phys_enc))
296                 goto end;
297
298         /* protect against negative */
299         if (!enable && refcount == 0) {
300                 ret = -EINVAL;
301                 goto end;
302         }
303
304         DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
305                       phys_enc->hw_pp->idx - PINGPONG_0,
306                       enable ? "true" : "false", refcount);
307
308         if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
309                 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
310         else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
311                 ret = dpu_encoder_helper_unregister_irq(phys_enc,
312                                 INTR_IDX_RDPTR);
313
314 end:
315         if (ret) {
316                 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
317                           DRMID(phys_enc->parent),
318                           phys_enc->hw_pp->idx - PINGPONG_0, ret,
319                           enable ? "true" : "false", refcount);
320         }
321
322         return ret;
323 }
324
325 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
326                 bool enable)
327 {
328         struct dpu_encoder_phys_cmd *cmd_enc;
329
330         if (!phys_enc)
331                 return;
332
333         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
334
335         trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
336                         phys_enc->hw_pp->idx - PINGPONG_0,
337                         enable, atomic_read(&phys_enc->vblank_refcount));
338
339         if (enable) {
340                 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
341                 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
342                 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
343
344                 if (dpu_encoder_phys_cmd_is_master(phys_enc))
345                         dpu_encoder_helper_register_irq(phys_enc,
346                                         INTR_IDX_CTL_START);
347         } else {
348                 if (dpu_encoder_phys_cmd_is_master(phys_enc))
349                         dpu_encoder_helper_unregister_irq(phys_enc,
350                                         INTR_IDX_CTL_START);
351
352                 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
353                 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
354                 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
355         }
356 }
357
358 static void dpu_encoder_phys_cmd_tearcheck_config(
359                 struct dpu_encoder_phys *phys_enc)
360 {
361         struct dpu_encoder_phys_cmd *cmd_enc =
362                 to_dpu_encoder_phys_cmd(phys_enc);
363         struct dpu_hw_tear_check tc_cfg = { 0 };
364         struct drm_display_mode *mode;
365         bool tc_enable = true;
366         u32 vsync_hz;
367         struct msm_drm_private *priv;
368         struct dpu_kms *dpu_kms;
369
370         if (!phys_enc || !phys_enc->hw_pp) {
371                 DPU_ERROR("invalid encoder\n");
372                 return;
373         }
374         mode = &phys_enc->cached_mode;
375
376         DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
377
378         if (!phys_enc->hw_pp->ops.setup_tearcheck ||
379                 !phys_enc->hw_pp->ops.enable_tearcheck) {
380                 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
381                 return;
382         }
383
384         dpu_kms = phys_enc->dpu_kms;
385         if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
386                 DPU_ERROR("invalid device\n");
387                 return;
388         }
389         priv = dpu_kms->dev->dev_private;
390
391         /*
392          * TE default: dsi byte clock calculated base on 70 fps;
393          * around 14 ms to complete a kickoff cycle if te disabled;
394          * vclk_line base on 60 fps; write is faster than read;
395          * init == start == rdptr;
396          *
397          * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
398          * frequency divided by the no. of rows (lines) in the LCDpanel.
399          */
400         vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
401         if (vsync_hz <= 0) {
402                 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
403                                  vsync_hz);
404                 return;
405         }
406
407         tc_cfg.vsync_count = vsync_hz /
408                                 (mode->vtotal * drm_mode_vrefresh(mode));
409
410         /* enable external TE after kickoff to avoid premature autorefresh */
411         tc_cfg.hw_vsync_mode = 0;
412
413         /*
414          * By setting sync_cfg_height to near max register value, we essentially
415          * disable dpu hw generated TE signal, since hw TE will arrive first.
416          * Only caveat is if due to error, we hit wrap-around.
417          */
418         tc_cfg.sync_cfg_height = 0xFFF0;
419         tc_cfg.vsync_init_val = mode->vdisplay;
420         tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
421         tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
422         tc_cfg.start_pos = mode->vdisplay;
423         tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
424
425         DPU_DEBUG_CMDENC(cmd_enc,
426                 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
427                 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
428                 mode->vtotal, drm_mode_vrefresh(mode));
429         DPU_DEBUG_CMDENC(cmd_enc,
430                 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
431                 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
432                 tc_cfg.rd_ptr_irq);
433         DPU_DEBUG_CMDENC(cmd_enc,
434                 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
435                 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
436                 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
437         DPU_DEBUG_CMDENC(cmd_enc,
438                 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
439                 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
440                 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
441
442         phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
443         phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
444 }
445
446 static void _dpu_encoder_phys_cmd_pingpong_config(
447                 struct dpu_encoder_phys *phys_enc)
448 {
449         struct dpu_encoder_phys_cmd *cmd_enc =
450                 to_dpu_encoder_phys_cmd(phys_enc);
451
452         if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp
453                         || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
454                 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != 0);
455                 return;
456         }
457
458         DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
459                         phys_enc->hw_pp->idx - PINGPONG_0);
460         drm_mode_debug_printmodeline(&phys_enc->cached_mode);
461
462         _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
463         dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
464 }
465
466 static bool dpu_encoder_phys_cmd_needs_single_flush(
467                 struct dpu_encoder_phys *phys_enc)
468 {
469         /**
470          * we do separate flush for each CTL and let
471          * CTL_START synchronize them
472          */
473         return false;
474 }
475
476 static void dpu_encoder_phys_cmd_enable_helper(
477                 struct dpu_encoder_phys *phys_enc)
478 {
479         struct dpu_hw_ctl *ctl;
480         u32 flush_mask = 0;
481
482         if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp) {
483                 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
484                 return;
485         }
486
487         dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
488
489         _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
490
491         if (!dpu_encoder_phys_cmd_is_master(phys_enc))
492                 return;
493
494         ctl = phys_enc->hw_ctl;
495         ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
496         ctl->ops.update_pending_flush(ctl, flush_mask);
497 }
498
499 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
500 {
501         struct dpu_encoder_phys_cmd *cmd_enc =
502                 to_dpu_encoder_phys_cmd(phys_enc);
503
504         if (!phys_enc || !phys_enc->hw_pp) {
505                 DPU_ERROR("invalid phys encoder\n");
506                 return;
507         }
508
509         DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
510
511         if (phys_enc->enable_state == DPU_ENC_ENABLED) {
512                 DPU_ERROR("already enabled\n");
513                 return;
514         }
515
516         dpu_encoder_phys_cmd_enable_helper(phys_enc);
517         phys_enc->enable_state = DPU_ENC_ENABLED;
518 }
519
520 static void _dpu_encoder_phys_cmd_connect_te(
521                 struct dpu_encoder_phys *phys_enc, bool enable)
522 {
523         if (!phys_enc || !phys_enc->hw_pp ||
524                         !phys_enc->hw_pp->ops.connect_external_te)
525                 return;
526
527         trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
528         phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
529 }
530
531 static void dpu_encoder_phys_cmd_prepare_idle_pc(
532                 struct dpu_encoder_phys *phys_enc)
533 {
534         _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
535 }
536
537 static int dpu_encoder_phys_cmd_get_line_count(
538                 struct dpu_encoder_phys *phys_enc)
539 {
540         struct dpu_hw_pingpong *hw_pp;
541
542         if (!phys_enc || !phys_enc->hw_pp)
543                 return -EINVAL;
544
545         if (!dpu_encoder_phys_cmd_is_master(phys_enc))
546                 return -EINVAL;
547
548         hw_pp = phys_enc->hw_pp;
549         if (!hw_pp->ops.get_line_count)
550                 return -EINVAL;
551
552         return hw_pp->ops.get_line_count(hw_pp);
553 }
554
555 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
556 {
557         struct dpu_encoder_phys_cmd *cmd_enc =
558                 to_dpu_encoder_phys_cmd(phys_enc);
559
560         if (!phys_enc || !phys_enc->hw_pp) {
561                 DPU_ERROR("invalid encoder\n");
562                 return;
563         }
564         DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
565                       phys_enc->hw_pp->idx - PINGPONG_0,
566                       phys_enc->enable_state);
567
568         if (phys_enc->enable_state == DPU_ENC_DISABLED) {
569                 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
570                 return;
571         }
572
573         if (phys_enc->hw_pp->ops.enable_tearcheck)
574                 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
575         phys_enc->enable_state = DPU_ENC_DISABLED;
576 }
577
578 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
579 {
580         struct dpu_encoder_phys_cmd *cmd_enc =
581                 to_dpu_encoder_phys_cmd(phys_enc);
582
583         if (!phys_enc) {
584                 DPU_ERROR("invalid encoder\n");
585                 return;
586         }
587         kfree(cmd_enc);
588 }
589
590 static void dpu_encoder_phys_cmd_get_hw_resources(
591                 struct dpu_encoder_phys *phys_enc,
592                 struct dpu_encoder_hw_resources *hw_res)
593 {
594         hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
595 }
596
597 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
598                 struct dpu_encoder_phys *phys_enc)
599 {
600         struct dpu_encoder_phys_cmd *cmd_enc =
601                         to_dpu_encoder_phys_cmd(phys_enc);
602         int ret;
603
604         if (!phys_enc || !phys_enc->hw_pp) {
605                 DPU_ERROR("invalid encoder\n");
606                 return;
607         }
608         DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
609                       phys_enc->hw_pp->idx - PINGPONG_0,
610                       atomic_read(&phys_enc->pending_kickoff_cnt));
611
612         /*
613          * Mark kickoff request as outstanding. If there are more than one,
614          * outstanding, then we have to wait for the previous one to complete
615          */
616         ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
617         if (ret) {
618                 /* force pending_kickoff_cnt 0 to discard failed kickoff */
619                 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
620                 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
621                           DRMID(phys_enc->parent), ret,
622                           phys_enc->hw_pp->idx - PINGPONG_0);
623         }
624
625         DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
626                         phys_enc->hw_pp->idx - PINGPONG_0,
627                         atomic_read(&phys_enc->pending_kickoff_cnt));
628 }
629
630 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
631                 struct dpu_encoder_phys *phys_enc)
632 {
633         struct dpu_encoder_phys_cmd *cmd_enc =
634                         to_dpu_encoder_phys_cmd(phys_enc);
635         struct dpu_encoder_wait_info wait_info;
636         int ret;
637
638         if (!phys_enc || !phys_enc->hw_ctl) {
639                 DPU_ERROR("invalid argument(s)\n");
640                 return -EINVAL;
641         }
642
643         wait_info.wq = &phys_enc->pending_kickoff_wq;
644         wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
645         wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
646
647         ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
648                         &wait_info);
649         if (ret == -ETIMEDOUT) {
650                 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
651                 ret = -EINVAL;
652         } else if (!ret)
653                 ret = 0;
654
655         return ret;
656 }
657
658 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
659                 struct dpu_encoder_phys *phys_enc)
660 {
661         int rc;
662         struct dpu_encoder_phys_cmd *cmd_enc;
663
664         if (!phys_enc)
665                 return -EINVAL;
666
667         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
668
669         rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
670         if (rc) {
671                 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
672                           DRMID(phys_enc->parent), rc,
673                           phys_enc->intf_idx - INTF_0);
674         }
675
676         return rc;
677 }
678
679 static int dpu_encoder_phys_cmd_wait_for_commit_done(
680                 struct dpu_encoder_phys *phys_enc)
681 {
682         int rc = 0;
683         struct dpu_encoder_phys_cmd *cmd_enc;
684
685         if (!phys_enc)
686                 return -EINVAL;
687
688         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
689
690         /* only required for master controller */
691         if (dpu_encoder_phys_cmd_is_master(phys_enc))
692                 rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
693
694         /* required for both controllers */
695         if (!rc && cmd_enc->serialize_wait4pp)
696                 dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc);
697
698         return rc;
699 }
700
701 static int dpu_encoder_phys_cmd_wait_for_vblank(
702                 struct dpu_encoder_phys *phys_enc)
703 {
704         int rc = 0;
705         struct dpu_encoder_phys_cmd *cmd_enc;
706         struct dpu_encoder_wait_info wait_info;
707
708         if (!phys_enc)
709                 return -EINVAL;
710
711         cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
712
713         /* only required for master controller */
714         if (!dpu_encoder_phys_cmd_is_master(phys_enc))
715                 return rc;
716
717         wait_info.wq = &cmd_enc->pending_vblank_wq;
718         wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
719         wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
720
721         atomic_inc(&cmd_enc->pending_vblank_cnt);
722
723         rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
724                         &wait_info);
725
726         return rc;
727 }
728
729 static void dpu_encoder_phys_cmd_handle_post_kickoff(
730                 struct dpu_encoder_phys *phys_enc)
731 {
732         if (!phys_enc)
733                 return;
734
735         /**
736          * re-enable external TE, either for the first time after enabling
737          * or if disabled for Autorefresh
738          */
739         _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
740 }
741
742 static void dpu_encoder_phys_cmd_trigger_start(
743                 struct dpu_encoder_phys *phys_enc)
744 {
745         if (!phys_enc)
746                 return;
747
748         dpu_encoder_helper_trigger_start(phys_enc);
749 }
750
751 static void dpu_encoder_phys_cmd_init_ops(
752                 struct dpu_encoder_phys_ops *ops)
753 {
754         ops->is_master = dpu_encoder_phys_cmd_is_master;
755         ops->mode_set = dpu_encoder_phys_cmd_mode_set;
756         ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
757         ops->enable = dpu_encoder_phys_cmd_enable;
758         ops->disable = dpu_encoder_phys_cmd_disable;
759         ops->destroy = dpu_encoder_phys_cmd_destroy;
760         ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
761         ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
762         ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
763         ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
764         ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
765         ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
766         ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
767         ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
768         ops->irq_control = dpu_encoder_phys_cmd_irq_control;
769         ops->restore = dpu_encoder_phys_cmd_enable_helper;
770         ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
771         ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
772         ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
773 }
774
775 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
776                 struct dpu_enc_phys_init_params *p)
777 {
778         struct dpu_encoder_phys *phys_enc = NULL;
779         struct dpu_encoder_phys_cmd *cmd_enc = NULL;
780         struct dpu_encoder_irq *irq;
781         int i, ret = 0;
782
783         DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
784
785         cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
786         if (!cmd_enc) {
787                 ret = -ENOMEM;
788                 DPU_ERROR("failed to allocate\n");
789                 return ERR_PTR(ret);
790         }
791         phys_enc = &cmd_enc->base;
792         phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
793         phys_enc->intf_idx = p->intf_idx;
794
795         dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
796         phys_enc->parent = p->parent;
797         phys_enc->parent_ops = p->parent_ops;
798         phys_enc->dpu_kms = p->dpu_kms;
799         phys_enc->split_role = p->split_role;
800         phys_enc->intf_mode = INTF_MODE_CMD;
801         phys_enc->enc_spinlock = p->enc_spinlock;
802         cmd_enc->stream_sel = 0;
803         phys_enc->enable_state = DPU_ENC_DISABLED;
804         for (i = 0; i < INTR_IDX_MAX; i++) {
805                 irq = &phys_enc->irq[i];
806                 INIT_LIST_HEAD(&irq->cb.list);
807                 irq->irq_idx = -EINVAL;
808                 irq->hw_idx = -EINVAL;
809                 irq->cb.arg = phys_enc;
810         }
811
812         irq = &phys_enc->irq[INTR_IDX_CTL_START];
813         irq->name = "ctl_start";
814         irq->intr_type = DPU_IRQ_TYPE_CTL_START;
815         irq->intr_idx = INTR_IDX_CTL_START;
816         irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
817
818         irq = &phys_enc->irq[INTR_IDX_PINGPONG];
819         irq->name = "pp_done";
820         irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
821         irq->intr_idx = INTR_IDX_PINGPONG;
822         irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
823
824         irq = &phys_enc->irq[INTR_IDX_RDPTR];
825         irq->name = "pp_rd_ptr";
826         irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
827         irq->intr_idx = INTR_IDX_RDPTR;
828         irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
829
830         irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
831         irq->name = "underrun";
832         irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
833         irq->intr_idx = INTR_IDX_UNDERRUN;
834         irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
835
836         atomic_set(&phys_enc->vblank_refcount, 0);
837         atomic_set(&phys_enc->pending_kickoff_cnt, 0);
838         atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
839         atomic_set(&cmd_enc->pending_vblank_cnt, 0);
840         init_waitqueue_head(&phys_enc->pending_kickoff_wq);
841         init_waitqueue_head(&cmd_enc->pending_vblank_wq);
842
843         DPU_DEBUG_CMDENC(cmd_enc, "created\n");
844
845         return phys_enc;
846
847         return ERR_PTR(ret);
848 }