2 * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 and
6 * only version 2 as published by the Free Software Foundation.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
15 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
16 #include "dpu_encoder_phys.h"
17 #include "dpu_hw_interrupts.h"
18 #include "dpu_core_irq.h"
19 #include "dpu_formats.h"
20 #include "dpu_trace.h"
22 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
23 (e) && (e)->base.parent ? \
24 (e)->base.parent->base.id : -1, \
25 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
27 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
28 (e) && (e)->base.parent ? \
29 (e)->base.parent->base.id : -1, \
30 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
32 #define to_dpu_encoder_phys_cmd(x) \
33 container_of(x, struct dpu_encoder_phys_cmd, base)
35 #define PP_TIMEOUT_MAX_TRIALS 10
38 * Tearcheck sync start and continue thresholds are empirically found
39 * based on common panels In the future, may want to allow panels to override
40 * these default values
42 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
43 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
45 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
47 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
49 return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
52 static bool dpu_encoder_phys_cmd_mode_fixup(
53 struct dpu_encoder_phys *phys_enc,
54 const struct drm_display_mode *mode,
55 struct drm_display_mode *adj_mode)
58 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
62 static void _dpu_encoder_phys_cmd_update_intf_cfg(
63 struct dpu_encoder_phys *phys_enc)
65 struct dpu_encoder_phys_cmd *cmd_enc =
66 to_dpu_encoder_phys_cmd(phys_enc);
67 struct dpu_hw_ctl *ctl;
68 struct dpu_hw_intf_cfg intf_cfg = { 0 };
73 ctl = phys_enc->hw_ctl;
74 if (!ctl || !ctl->ops.setup_intf_cfg)
77 intf_cfg.intf = phys_enc->intf_idx;
78 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
79 intf_cfg.stream_sel = cmd_enc->stream_sel;
80 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
81 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
84 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
86 struct dpu_encoder_phys *phys_enc = arg;
87 unsigned long lock_flags;
89 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
91 if (!phys_enc || !phys_enc->hw_pp)
94 DPU_ATRACE_BEGIN("pp_done_irq");
95 /* notify all synchronous clients first, then asynchronous clients */
96 if (phys_enc->parent_ops->handle_frame_done)
97 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
100 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
101 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
102 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
104 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
105 phys_enc->hw_pp->idx - PINGPONG_0,
108 /* Signal any waiting atomic commit thread */
109 wake_up_all(&phys_enc->pending_kickoff_wq);
110 DPU_ATRACE_END("pp_done_irq");
113 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
115 struct dpu_encoder_phys *phys_enc = arg;
116 struct dpu_encoder_phys_cmd *cmd_enc;
118 if (!phys_enc || !phys_enc->hw_pp)
121 DPU_ATRACE_BEGIN("rd_ptr_irq");
122 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
124 if (phys_enc->parent_ops->handle_vblank_virt)
125 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
128 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
129 wake_up_all(&cmd_enc->pending_vblank_wq);
130 DPU_ATRACE_END("rd_ptr_irq");
133 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
135 struct dpu_encoder_phys *phys_enc = arg;
136 struct dpu_encoder_phys_cmd *cmd_enc;
138 if (!phys_enc || !phys_enc->hw_ctl)
141 DPU_ATRACE_BEGIN("ctl_start_irq");
142 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
144 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
146 /* Signal any waiting ctl start interrupt */
147 wake_up_all(&phys_enc->pending_kickoff_wq);
148 DPU_ATRACE_END("ctl_start_irq");
151 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
153 struct dpu_encoder_phys *phys_enc = arg;
158 if (phys_enc->parent_ops->handle_underrun_virt)
159 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
163 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
164 struct dpu_encoder_phys *phys_enc)
166 struct dpu_encoder_irq *irq;
168 irq = &phys_enc->irq[INTR_IDX_CTL_START];
169 irq->hw_idx = phys_enc->hw_ctl->idx;
170 irq->irq_idx = -EINVAL;
172 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
173 irq->hw_idx = phys_enc->hw_pp->idx;
174 irq->irq_idx = -EINVAL;
176 irq = &phys_enc->irq[INTR_IDX_RDPTR];
177 irq->hw_idx = phys_enc->hw_pp->idx;
178 irq->irq_idx = -EINVAL;
180 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
181 irq->hw_idx = phys_enc->intf_idx;
182 irq->irq_idx = -EINVAL;
185 static void dpu_encoder_phys_cmd_mode_set(
186 struct dpu_encoder_phys *phys_enc,
187 struct drm_display_mode *mode,
188 struct drm_display_mode *adj_mode)
190 struct dpu_encoder_phys_cmd *cmd_enc =
191 to_dpu_encoder_phys_cmd(phys_enc);
193 if (!phys_enc || !mode || !adj_mode) {
194 DPU_ERROR("invalid args\n");
197 phys_enc->cached_mode = *adj_mode;
198 DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
199 drm_mode_debug_printmodeline(adj_mode);
201 _dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
204 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
205 struct dpu_encoder_phys *phys_enc)
207 struct dpu_encoder_phys_cmd *cmd_enc =
208 to_dpu_encoder_phys_cmd(phys_enc);
209 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
212 if (!phys_enc || !phys_enc->hw_pp || !phys_enc->hw_ctl)
215 cmd_enc->pp_timeout_report_cnt++;
216 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
217 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
219 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
223 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
224 phys_enc->hw_pp->idx - PINGPONG_0,
225 cmd_enc->pp_timeout_report_cnt,
226 atomic_read(&phys_enc->pending_kickoff_cnt),
229 /* to avoid flooding, only log first time, and "dead" time */
231 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
232 DRMID(phys_enc->parent),
233 phys_enc->hw_pp->idx - PINGPONG_0,
234 phys_enc->hw_ctl->idx - CTL_0,
235 cmd_enc->pp_timeout_report_cnt,
236 atomic_read(&phys_enc->pending_kickoff_cnt));
238 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
241 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
243 /* request a ctl reset before the next kickoff */
244 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
246 if (phys_enc->parent_ops->handle_frame_done)
247 phys_enc->parent_ops->handle_frame_done(
248 phys_enc->parent, phys_enc, frame_event);
253 static int _dpu_encoder_phys_cmd_wait_for_idle(
254 struct dpu_encoder_phys *phys_enc)
256 struct dpu_encoder_phys_cmd *cmd_enc =
257 to_dpu_encoder_phys_cmd(phys_enc);
258 struct dpu_encoder_wait_info wait_info;
262 DPU_ERROR("invalid encoder\n");
266 wait_info.wq = &phys_enc->pending_kickoff_wq;
267 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
268 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
270 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
272 if (ret == -ETIMEDOUT)
273 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
275 cmd_enc->pp_timeout_report_cnt = 0;
280 static int dpu_encoder_phys_cmd_control_vblank_irq(
281 struct dpu_encoder_phys *phys_enc,
287 if (!phys_enc || !phys_enc->hw_pp) {
288 DPU_ERROR("invalid encoder\n");
292 refcount = atomic_read(&phys_enc->vblank_refcount);
294 /* Slave encoders don't report vblank */
295 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
298 /* protect against negative */
299 if (!enable && refcount == 0) {
304 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
305 phys_enc->hw_pp->idx - PINGPONG_0,
306 enable ? "true" : "false", refcount);
308 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
309 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
310 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
311 ret = dpu_encoder_helper_unregister_irq(phys_enc,
316 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
317 DRMID(phys_enc->parent),
318 phys_enc->hw_pp->idx - PINGPONG_0, ret,
319 enable ? "true" : "false", refcount);
325 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
328 struct dpu_encoder_phys_cmd *cmd_enc;
333 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
335 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
336 phys_enc->hw_pp->idx - PINGPONG_0,
337 enable, atomic_read(&phys_enc->vblank_refcount));
340 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
341 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
342 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
344 if (dpu_encoder_phys_cmd_is_master(phys_enc))
345 dpu_encoder_helper_register_irq(phys_enc,
348 if (dpu_encoder_phys_cmd_is_master(phys_enc))
349 dpu_encoder_helper_unregister_irq(phys_enc,
352 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
353 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
354 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
358 static void dpu_encoder_phys_cmd_tearcheck_config(
359 struct dpu_encoder_phys *phys_enc)
361 struct dpu_encoder_phys_cmd *cmd_enc =
362 to_dpu_encoder_phys_cmd(phys_enc);
363 struct dpu_hw_tear_check tc_cfg = { 0 };
364 struct drm_display_mode *mode;
365 bool tc_enable = true;
367 struct msm_drm_private *priv;
368 struct dpu_kms *dpu_kms;
370 if (!phys_enc || !phys_enc->hw_pp) {
371 DPU_ERROR("invalid encoder\n");
374 mode = &phys_enc->cached_mode;
376 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
378 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
379 !phys_enc->hw_pp->ops.enable_tearcheck) {
380 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
384 dpu_kms = phys_enc->dpu_kms;
385 if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
386 DPU_ERROR("invalid device\n");
389 priv = dpu_kms->dev->dev_private;
392 * TE default: dsi byte clock calculated base on 70 fps;
393 * around 14 ms to complete a kickoff cycle if te disabled;
394 * vclk_line base on 60 fps; write is faster than read;
395 * init == start == rdptr;
397 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
398 * frequency divided by the no. of rows (lines) in the LCDpanel.
400 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
402 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
407 tc_cfg.vsync_count = vsync_hz /
408 (mode->vtotal * drm_mode_vrefresh(mode));
410 /* enable external TE after kickoff to avoid premature autorefresh */
411 tc_cfg.hw_vsync_mode = 0;
414 * By setting sync_cfg_height to near max register value, we essentially
415 * disable dpu hw generated TE signal, since hw TE will arrive first.
416 * Only caveat is if due to error, we hit wrap-around.
418 tc_cfg.sync_cfg_height = 0xFFF0;
419 tc_cfg.vsync_init_val = mode->vdisplay;
420 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
421 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
422 tc_cfg.start_pos = mode->vdisplay;
423 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
425 DPU_DEBUG_CMDENC(cmd_enc,
426 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
427 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
428 mode->vtotal, drm_mode_vrefresh(mode));
429 DPU_DEBUG_CMDENC(cmd_enc,
430 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
431 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
433 DPU_DEBUG_CMDENC(cmd_enc,
434 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
435 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
436 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
437 DPU_DEBUG_CMDENC(cmd_enc,
438 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
439 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
440 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
442 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
443 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
446 static void _dpu_encoder_phys_cmd_pingpong_config(
447 struct dpu_encoder_phys *phys_enc)
449 struct dpu_encoder_phys_cmd *cmd_enc =
450 to_dpu_encoder_phys_cmd(phys_enc);
452 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp
453 || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
454 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != 0);
458 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
459 phys_enc->hw_pp->idx - PINGPONG_0);
460 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
462 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
463 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
466 static bool dpu_encoder_phys_cmd_needs_single_flush(
467 struct dpu_encoder_phys *phys_enc)
470 * we do separate flush for each CTL and let
471 * CTL_START synchronize them
476 static void dpu_encoder_phys_cmd_enable_helper(
477 struct dpu_encoder_phys *phys_enc)
479 struct dpu_hw_ctl *ctl;
482 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp) {
483 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
487 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
489 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
491 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
494 ctl = phys_enc->hw_ctl;
495 ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
496 ctl->ops.update_pending_flush(ctl, flush_mask);
499 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
501 struct dpu_encoder_phys_cmd *cmd_enc =
502 to_dpu_encoder_phys_cmd(phys_enc);
504 if (!phys_enc || !phys_enc->hw_pp) {
505 DPU_ERROR("invalid phys encoder\n");
509 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
511 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
512 DPU_ERROR("already enabled\n");
516 dpu_encoder_phys_cmd_enable_helper(phys_enc);
517 phys_enc->enable_state = DPU_ENC_ENABLED;
520 static void _dpu_encoder_phys_cmd_connect_te(
521 struct dpu_encoder_phys *phys_enc, bool enable)
523 if (!phys_enc || !phys_enc->hw_pp ||
524 !phys_enc->hw_pp->ops.connect_external_te)
527 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
528 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
531 static void dpu_encoder_phys_cmd_prepare_idle_pc(
532 struct dpu_encoder_phys *phys_enc)
534 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
537 static int dpu_encoder_phys_cmd_get_line_count(
538 struct dpu_encoder_phys *phys_enc)
540 struct dpu_hw_pingpong *hw_pp;
542 if (!phys_enc || !phys_enc->hw_pp)
545 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
548 hw_pp = phys_enc->hw_pp;
549 if (!hw_pp->ops.get_line_count)
552 return hw_pp->ops.get_line_count(hw_pp);
555 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
557 struct dpu_encoder_phys_cmd *cmd_enc =
558 to_dpu_encoder_phys_cmd(phys_enc);
560 if (!phys_enc || !phys_enc->hw_pp) {
561 DPU_ERROR("invalid encoder\n");
564 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
565 phys_enc->hw_pp->idx - PINGPONG_0,
566 phys_enc->enable_state);
568 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
569 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
573 if (phys_enc->hw_pp->ops.enable_tearcheck)
574 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
575 phys_enc->enable_state = DPU_ENC_DISABLED;
578 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
580 struct dpu_encoder_phys_cmd *cmd_enc =
581 to_dpu_encoder_phys_cmd(phys_enc);
584 DPU_ERROR("invalid encoder\n");
590 static void dpu_encoder_phys_cmd_get_hw_resources(
591 struct dpu_encoder_phys *phys_enc,
592 struct dpu_encoder_hw_resources *hw_res)
594 hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
597 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
598 struct dpu_encoder_phys *phys_enc)
600 struct dpu_encoder_phys_cmd *cmd_enc =
601 to_dpu_encoder_phys_cmd(phys_enc);
604 if (!phys_enc || !phys_enc->hw_pp) {
605 DPU_ERROR("invalid encoder\n");
608 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
609 phys_enc->hw_pp->idx - PINGPONG_0,
610 atomic_read(&phys_enc->pending_kickoff_cnt));
613 * Mark kickoff request as outstanding. If there are more than one,
614 * outstanding, then we have to wait for the previous one to complete
616 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
618 /* force pending_kickoff_cnt 0 to discard failed kickoff */
619 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
620 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
621 DRMID(phys_enc->parent), ret,
622 phys_enc->hw_pp->idx - PINGPONG_0);
625 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
626 phys_enc->hw_pp->idx - PINGPONG_0,
627 atomic_read(&phys_enc->pending_kickoff_cnt));
630 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
631 struct dpu_encoder_phys *phys_enc)
633 struct dpu_encoder_phys_cmd *cmd_enc =
634 to_dpu_encoder_phys_cmd(phys_enc);
635 struct dpu_encoder_wait_info wait_info;
638 if (!phys_enc || !phys_enc->hw_ctl) {
639 DPU_ERROR("invalid argument(s)\n");
643 wait_info.wq = &phys_enc->pending_kickoff_wq;
644 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
645 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
647 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
649 if (ret == -ETIMEDOUT) {
650 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
658 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
659 struct dpu_encoder_phys *phys_enc)
662 struct dpu_encoder_phys_cmd *cmd_enc;
667 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
669 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
671 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
672 DRMID(phys_enc->parent), rc,
673 phys_enc->intf_idx - INTF_0);
679 static int dpu_encoder_phys_cmd_wait_for_commit_done(
680 struct dpu_encoder_phys *phys_enc)
683 struct dpu_encoder_phys_cmd *cmd_enc;
688 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
690 /* only required for master controller */
691 if (dpu_encoder_phys_cmd_is_master(phys_enc))
692 rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
694 /* required for both controllers */
695 if (!rc && cmd_enc->serialize_wait4pp)
696 dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc);
701 static int dpu_encoder_phys_cmd_wait_for_vblank(
702 struct dpu_encoder_phys *phys_enc)
705 struct dpu_encoder_phys_cmd *cmd_enc;
706 struct dpu_encoder_wait_info wait_info;
711 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
713 /* only required for master controller */
714 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
717 wait_info.wq = &cmd_enc->pending_vblank_wq;
718 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
719 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
721 atomic_inc(&cmd_enc->pending_vblank_cnt);
723 rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
729 static void dpu_encoder_phys_cmd_handle_post_kickoff(
730 struct dpu_encoder_phys *phys_enc)
736 * re-enable external TE, either for the first time after enabling
737 * or if disabled for Autorefresh
739 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
742 static void dpu_encoder_phys_cmd_trigger_start(
743 struct dpu_encoder_phys *phys_enc)
748 dpu_encoder_helper_trigger_start(phys_enc);
751 static void dpu_encoder_phys_cmd_init_ops(
752 struct dpu_encoder_phys_ops *ops)
754 ops->is_master = dpu_encoder_phys_cmd_is_master;
755 ops->mode_set = dpu_encoder_phys_cmd_mode_set;
756 ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
757 ops->enable = dpu_encoder_phys_cmd_enable;
758 ops->disable = dpu_encoder_phys_cmd_disable;
759 ops->destroy = dpu_encoder_phys_cmd_destroy;
760 ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
761 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
762 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
763 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
764 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
765 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
766 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
767 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
768 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
769 ops->restore = dpu_encoder_phys_cmd_enable_helper;
770 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
771 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
772 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
775 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
776 struct dpu_enc_phys_init_params *p)
778 struct dpu_encoder_phys *phys_enc = NULL;
779 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
780 struct dpu_encoder_irq *irq;
783 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
785 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
788 DPU_ERROR("failed to allocate\n");
791 phys_enc = &cmd_enc->base;
792 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
793 phys_enc->intf_idx = p->intf_idx;
795 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
796 phys_enc->parent = p->parent;
797 phys_enc->parent_ops = p->parent_ops;
798 phys_enc->dpu_kms = p->dpu_kms;
799 phys_enc->split_role = p->split_role;
800 phys_enc->intf_mode = INTF_MODE_CMD;
801 phys_enc->enc_spinlock = p->enc_spinlock;
802 cmd_enc->stream_sel = 0;
803 phys_enc->enable_state = DPU_ENC_DISABLED;
804 for (i = 0; i < INTR_IDX_MAX; i++) {
805 irq = &phys_enc->irq[i];
806 INIT_LIST_HEAD(&irq->cb.list);
807 irq->irq_idx = -EINVAL;
808 irq->hw_idx = -EINVAL;
809 irq->cb.arg = phys_enc;
812 irq = &phys_enc->irq[INTR_IDX_CTL_START];
813 irq->name = "ctl_start";
814 irq->intr_type = DPU_IRQ_TYPE_CTL_START;
815 irq->intr_idx = INTR_IDX_CTL_START;
816 irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
818 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
819 irq->name = "pp_done";
820 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
821 irq->intr_idx = INTR_IDX_PINGPONG;
822 irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
824 irq = &phys_enc->irq[INTR_IDX_RDPTR];
825 irq->name = "pp_rd_ptr";
826 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
827 irq->intr_idx = INTR_IDX_RDPTR;
828 irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
830 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
831 irq->name = "underrun";
832 irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
833 irq->intr_idx = INTR_IDX_UNDERRUN;
834 irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
836 atomic_set(&phys_enc->vblank_refcount, 0);
837 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
838 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
839 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
840 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
841 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
843 DPU_DEBUG_CMDENC(cmd_enc, "created\n");