1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
4 * Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5 */
6
7 #include "dpu_hwio.h"
8 #include "dpu_hw_catalog.h"
9 #include "dpu_hw_intf.h"
10 #include "dpu_kms.h"
11 #include "dpu_trace.h"
12
13 #include <linux/iopoll.h>
14
15 #define INTF_TIMING_ENGINE_EN 0x000
16 #define INTF_CONFIG 0x004
17 #define INTF_HSYNC_CTL 0x008
18 #define INTF_VSYNC_PERIOD_F0 0x00C
19 #define INTF_VSYNC_PERIOD_F1 0x010
20 #define INTF_VSYNC_PULSE_WIDTH_F0 0x014
21 #define INTF_VSYNC_PULSE_WIDTH_F1 0x018
22 #define INTF_DISPLAY_V_START_F0 0x01C
23 #define INTF_DISPLAY_V_START_F1 0x020
24 #define INTF_DISPLAY_V_END_F0 0x024
25 #define INTF_DISPLAY_V_END_F1 0x028
26 #define INTF_ACTIVE_V_START_F0 0x02C
27 #define INTF_ACTIVE_V_START_F1 0x030
28 #define INTF_ACTIVE_V_END_F0 0x034
29 #define INTF_ACTIVE_V_END_F1 0x038
30 #define INTF_DISPLAY_HCTL 0x03C
31 #define INTF_ACTIVE_HCTL 0x040
32 #define INTF_BORDER_COLOR 0x044
33 #define INTF_UNDERFLOW_COLOR 0x048
34 #define INTF_HSYNC_SKEW 0x04C
35 #define INTF_POLARITY_CTL 0x050
36 #define INTF_TEST_CTL 0x054
37 #define INTF_TP_COLOR0 0x058
38 #define INTF_TP_COLOR1 0x05C
39 #define INTF_CONFIG2 0x060
40 #define INTF_DISPLAY_DATA_HCTL 0x064
41 #define INTF_ACTIVE_DATA_HCTL 0x068
42
43 #define INTF_DSI_CMD_MODE_TRIGGER_EN 0x084
44 #define INTF_PANEL_FORMAT 0x090
45
46 #define INTF_FRAME_LINE_COUNT_EN 0x0A8
47 #define INTF_FRAME_COUNT 0x0AC
48 #define INTF_LINE_COUNT 0x0B0
49
50 #define INTF_DEFLICKER_CONFIG 0x0F0
51 #define INTF_DEFLICKER_STRNG_COEFF 0x0F4
52 #define INTF_DEFLICKER_WEAK_COEFF 0x0F8
53
54 #define INTF_TPG_ENABLE 0x100
55 #define INTF_TPG_MAIN_CONTROL 0x104
56 #define INTF_TPG_VIDEO_CONFIG 0x108
57 #define INTF_TPG_COMPONENT_LIMITS 0x10C
58 #define INTF_TPG_RECTANGLE 0x110
59 #define INTF_TPG_INITIAL_VALUE 0x114
60 #define INTF_TPG_BLK_WHITE_PATTERN_FRAMES 0x118
61 #define INTF_TPG_RGB_MAPPING 0x11C
62 #define INTF_PROG_FETCH_START 0x170
63 #define INTF_PROG_ROT_START 0x174
64
65 #define INTF_MISR_CTRL 0x180
66 #define INTF_MISR_SIGNATURE 0x184
67
68 #define INTF_MUX 0x25C
69 #define INTF_STATUS 0x26C
70 #define INTF_AVR_CONTROL 0x270
71 #define INTF_AVR_MODE 0x274
72 #define INTF_AVR_TRIGGER 0x278
73 #define INTF_AVR_VTOTAL 0x27C
74 #define INTF_TEAR_MDP_VSYNC_SEL 0x280
75 #define INTF_TEAR_TEAR_CHECK_EN 0x284
76 #define INTF_TEAR_SYNC_CONFIG_VSYNC 0x288
77 #define INTF_TEAR_SYNC_CONFIG_HEIGHT 0x28C
78 #define INTF_TEAR_SYNC_WRCOUNT 0x290
79 #define INTF_TEAR_VSYNC_INIT_VAL 0x294
80 #define INTF_TEAR_INT_COUNT_VAL 0x298
81 #define INTF_TEAR_SYNC_THRESH 0x29C
82 #define INTF_TEAR_START_POS 0x2A0
83 #define INTF_TEAR_RD_PTR_IRQ 0x2A4
84 #define INTF_TEAR_WR_PTR_IRQ 0x2A8
85 #define INTF_TEAR_OUT_LINE_COUNT 0x2AC
86 #define INTF_TEAR_LINE_COUNT 0x2B0
87 #define INTF_TEAR_AUTOREFRESH_CONFIG 0x2B4
88
89 #define INTF_CFG_ACTIVE_H_EN BIT(29)
90 #define INTF_CFG_ACTIVE_V_EN BIT(30)
91
92 #define INTF_CFG2_DATABUS_WIDEN BIT(0)
93 #define INTF_CFG2_DATA_HCTL_EN BIT(4)
94 #define INTF_CFG2_DCE_DATA_COMPRESS BIT(12)
95
96
dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf * ctx,const struct dpu_hw_intf_timing_params * p,const struct dpu_format * fmt)97 static void dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf *ctx,
98 const struct dpu_hw_intf_timing_params *p,
99 const struct dpu_format *fmt)
100 {
101 struct dpu_hw_blk_reg_map *c = &ctx->hw;
102 u32 hsync_period, vsync_period;
103 u32 display_v_start, display_v_end;
104 u32 hsync_start_x, hsync_end_x;
105 u32 hsync_data_start_x, hsync_data_end_x;
106 u32 active_h_start, active_h_end;
107 u32 active_v_start, active_v_end;
108 u32 active_hctl, display_hctl, hsync_ctl;
109 u32 polarity_ctl, den_polarity;
110 u32 panel_format;
111 u32 intf_cfg, intf_cfg2 = 0;
112 u32 display_data_hctl = 0, active_data_hctl = 0;
113 u32 data_width;
114 bool dp_intf = false;
115
116 /* read interface_cfg */
117 intf_cfg = DPU_REG_READ(c, INTF_CONFIG);
118
119 if (ctx->cap->type == INTF_DP)
120 dp_intf = true;
121
122 hsync_period = p->hsync_pulse_width + p->h_back_porch + p->width +
123 p->h_front_porch;
124 vsync_period = p->vsync_pulse_width + p->v_back_porch + p->height +
125 p->v_front_porch;
126
127 display_v_start = ((p->vsync_pulse_width + p->v_back_porch) *
128 hsync_period) + p->hsync_skew;
129 display_v_end = ((vsync_period - p->v_front_porch) * hsync_period) +
130 p->hsync_skew - 1;
131
132 hsync_start_x = p->h_back_porch + p->hsync_pulse_width;
133 hsync_end_x = hsync_period - p->h_front_porch - 1;
134
135 if (p->width != p->xres) { /* border fill added */
136 active_h_start = hsync_start_x;
137 active_h_end = active_h_start + p->xres - 1;
138 } else {
139 active_h_start = 0;
140 active_h_end = 0;
141 }
142
143 if (p->height != p->yres) { /* border fill added */
144 active_v_start = display_v_start;
145 active_v_end = active_v_start + (p->yres * hsync_period) - 1;
146 } else {
147 active_v_start = 0;
148 active_v_end = 0;
149 }
150
151 if (active_h_end) {
152 active_hctl = (active_h_end << 16) | active_h_start;
153 intf_cfg |= INTF_CFG_ACTIVE_H_EN;
154 } else {
155 active_hctl = 0;
156 }
157
158 if (active_v_end)
159 intf_cfg |= INTF_CFG_ACTIVE_V_EN;
160
161 hsync_ctl = (hsync_period << 16) | p->hsync_pulse_width;
162 display_hctl = (hsync_end_x << 16) | hsync_start_x;
163
164 /*
165 * DATA_HCTL_EN controls data timing which can be different from
166 * video timing. It is recommended to enable it for all cases, except
167 * if compression is enabled in 1 pixel per clock mode
168 */
169 if (p->wide_bus_en)
170 intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN | INTF_CFG2_DATA_HCTL_EN;
171
172 data_width = p->width;
173
174 hsync_data_start_x = hsync_start_x;
175 hsync_data_end_x = hsync_start_x + data_width - 1;
176
177 display_data_hctl = (hsync_data_end_x << 16) | hsync_data_start_x;
178
179 if (dp_intf) {
180 /* DP timing adjustment */
181 display_v_start += p->hsync_pulse_width + p->h_back_porch;
182 display_v_end -= p->h_front_porch;
183
184 active_h_start = hsync_start_x;
185 active_h_end = active_h_start + p->xres - 1;
186 active_v_start = display_v_start;
187 active_v_end = active_v_start + (p->yres * hsync_period) - 1;
188
189 active_hctl = (active_h_end << 16) | active_h_start;
190 display_hctl = active_hctl;
191
192 intf_cfg |= INTF_CFG_ACTIVE_H_EN | INTF_CFG_ACTIVE_V_EN;
193 }
194
195 den_polarity = 0;
196 polarity_ctl = (den_polarity << 2) | /* DEN Polarity */
197 (p->vsync_polarity << 1) | /* VSYNC Polarity */
198 (p->hsync_polarity << 0); /* HSYNC Polarity */
199
200 if (!DPU_FORMAT_IS_YUV(fmt))
201 panel_format = (fmt->bits[C0_G_Y] |
202 (fmt->bits[C1_B_Cb] << 2) |
203 (fmt->bits[C2_R_Cr] << 4) |
204 (0x21 << 8));
205 else
206 /* Interface treats all the pixel data in RGB888 format */
207 panel_format = (COLOR_8BIT |
208 (COLOR_8BIT << 2) |
209 (COLOR_8BIT << 4) |
210 (0x21 << 8));
211
212 DPU_REG_WRITE(c, INTF_HSYNC_CTL, hsync_ctl);
213 DPU_REG_WRITE(c, INTF_VSYNC_PERIOD_F0, vsync_period * hsync_period);
214 DPU_REG_WRITE(c, INTF_VSYNC_PULSE_WIDTH_F0,
215 p->vsync_pulse_width * hsync_period);
216 DPU_REG_WRITE(c, INTF_DISPLAY_HCTL, display_hctl);
217 DPU_REG_WRITE(c, INTF_DISPLAY_V_START_F0, display_v_start);
218 DPU_REG_WRITE(c, INTF_DISPLAY_V_END_F0, display_v_end);
219 DPU_REG_WRITE(c, INTF_ACTIVE_HCTL, active_hctl);
220 DPU_REG_WRITE(c, INTF_ACTIVE_V_START_F0, active_v_start);
221 DPU_REG_WRITE(c, INTF_ACTIVE_V_END_F0, active_v_end);
222 DPU_REG_WRITE(c, INTF_BORDER_COLOR, p->border_clr);
223 DPU_REG_WRITE(c, INTF_UNDERFLOW_COLOR, p->underflow_clr);
224 DPU_REG_WRITE(c, INTF_HSYNC_SKEW, p->hsync_skew);
225 DPU_REG_WRITE(c, INTF_POLARITY_CTL, polarity_ctl);
226 DPU_REG_WRITE(c, INTF_FRAME_LINE_COUNT_EN, 0x3);
227 DPU_REG_WRITE(c, INTF_CONFIG, intf_cfg);
228 DPU_REG_WRITE(c, INTF_PANEL_FORMAT, panel_format);
229 if (ctx->cap->features & BIT(DPU_DATA_HCTL_EN)) {
230 DPU_REG_WRITE(c, INTF_CONFIG2, intf_cfg2);
231 DPU_REG_WRITE(c, INTF_DISPLAY_DATA_HCTL, display_data_hctl);
232 DPU_REG_WRITE(c, INTF_ACTIVE_DATA_HCTL, active_data_hctl);
233 }
234 }
235
dpu_hw_intf_enable_timing_engine(struct dpu_hw_intf * intf,u8 enable)236 static void dpu_hw_intf_enable_timing_engine(
237 struct dpu_hw_intf *intf,
238 u8 enable)
239 {
240 struct dpu_hw_blk_reg_map *c = &intf->hw;
241 /* Note: Display interface select is handled in top block hw layer */
242 DPU_REG_WRITE(c, INTF_TIMING_ENGINE_EN, enable != 0);
243 }
244
dpu_hw_intf_setup_prg_fetch(struct dpu_hw_intf * intf,const struct dpu_hw_intf_prog_fetch * fetch)245 static void dpu_hw_intf_setup_prg_fetch(
246 struct dpu_hw_intf *intf,
247 const struct dpu_hw_intf_prog_fetch *fetch)
248 {
249 struct dpu_hw_blk_reg_map *c = &intf->hw;
250 int fetch_enable;
251
252 /*
253 * Fetch should always be outside the active lines. If the fetching
254 * is programmed within active region, hardware behavior is unknown.
255 */
256
257 fetch_enable = DPU_REG_READ(c, INTF_CONFIG);
258 if (fetch->enable) {
259 fetch_enable |= BIT(31);
260 DPU_REG_WRITE(c, INTF_PROG_FETCH_START,
261 fetch->fetch_start);
262 } else {
263 fetch_enable &= ~BIT(31);
264 }
265
266 DPU_REG_WRITE(c, INTF_CONFIG, fetch_enable);
267 }
268
dpu_hw_intf_bind_pingpong_blk(struct dpu_hw_intf * intf,const enum dpu_pingpong pp)269 static void dpu_hw_intf_bind_pingpong_blk(
270 struct dpu_hw_intf *intf,
271 const enum dpu_pingpong pp)
272 {
273 struct dpu_hw_blk_reg_map *c = &intf->hw;
274 u32 mux_cfg;
275
276 mux_cfg = DPU_REG_READ(c, INTF_MUX);
277 mux_cfg &= ~0xf;
278
279 if (pp)
280 mux_cfg |= (pp - PINGPONG_0) & 0x7;
281 else
282 mux_cfg |= 0xf;
283
284 DPU_REG_WRITE(c, INTF_MUX, mux_cfg);
285 }
286
dpu_hw_intf_get_status(struct dpu_hw_intf * intf,struct dpu_hw_intf_status * s)287 static void dpu_hw_intf_get_status(
288 struct dpu_hw_intf *intf,
289 struct dpu_hw_intf_status *s)
290 {
291 struct dpu_hw_blk_reg_map *c = &intf->hw;
292 unsigned long cap = intf->cap->features;
293
294 if (cap & BIT(DPU_INTF_STATUS_SUPPORTED))
295 s->is_en = DPU_REG_READ(c, INTF_STATUS) & BIT(0);
296 else
297 s->is_en = DPU_REG_READ(c, INTF_TIMING_ENGINE_EN);
298
299 s->is_prog_fetch_en = !!(DPU_REG_READ(c, INTF_CONFIG) & BIT(31));
300 if (s->is_en) {
301 s->frame_count = DPU_REG_READ(c, INTF_FRAME_COUNT);
302 s->line_count = DPU_REG_READ(c, INTF_LINE_COUNT);
303 } else {
304 s->line_count = 0;
305 s->frame_count = 0;
306 }
307 }
308
dpu_hw_intf_get_line_count(struct dpu_hw_intf * intf)309 static u32 dpu_hw_intf_get_line_count(struct dpu_hw_intf *intf)
310 {
311 struct dpu_hw_blk_reg_map *c;
312
313 if (!intf)
314 return 0;
315
316 c = &intf->hw;
317
318 return DPU_REG_READ(c, INTF_LINE_COUNT);
319 }
320
dpu_hw_intf_setup_misr(struct dpu_hw_intf * intf)321 static void dpu_hw_intf_setup_misr(struct dpu_hw_intf *intf)
322 {
323 dpu_hw_setup_misr(&intf->hw, INTF_MISR_CTRL, 0x1);
324 }
325
dpu_hw_intf_collect_misr(struct dpu_hw_intf * intf,u32 * misr_value)326 static int dpu_hw_intf_collect_misr(struct dpu_hw_intf *intf, u32 *misr_value)
327 {
328 return dpu_hw_collect_misr(&intf->hw, INTF_MISR_CTRL, INTF_MISR_SIGNATURE, misr_value);
329 }
330
dpu_hw_intf_enable_te(struct dpu_hw_intf * intf,struct dpu_hw_tear_check * te)331 static int dpu_hw_intf_enable_te(struct dpu_hw_intf *intf,
332 struct dpu_hw_tear_check *te)
333 {
334 struct dpu_hw_blk_reg_map *c;
335 int cfg;
336
337 if (!intf)
338 return -EINVAL;
339
340 c = &intf->hw;
341
342 cfg = BIT(19); /* VSYNC_COUNTER_EN */
343 if (te->hw_vsync_mode)
344 cfg |= BIT(20);
345
346 cfg |= te->vsync_count;
347
348 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
349 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_HEIGHT, te->sync_cfg_height);
350 DPU_REG_WRITE(c, INTF_TEAR_VSYNC_INIT_VAL, te->vsync_init_val);
351 DPU_REG_WRITE(c, INTF_TEAR_RD_PTR_IRQ, te->rd_ptr_irq);
352 DPU_REG_WRITE(c, INTF_TEAR_START_POS, te->start_pos);
353 DPU_REG_WRITE(c, INTF_TEAR_SYNC_THRESH,
354 ((te->sync_threshold_continue << 16) |
355 te->sync_threshold_start));
356 DPU_REG_WRITE(c, INTF_TEAR_SYNC_WRCOUNT,
357 (te->start_pos + te->sync_threshold_start + 1));
358
359 DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 1);
360
361 return 0;
362 }
363
dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf * intf,u32 frame_count,bool enable)364 static void dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf *intf,
365 u32 frame_count, bool enable)
366 {
367 struct dpu_hw_blk_reg_map *c;
368 u32 refresh_cfg;
369
370 c = &intf->hw;
371 refresh_cfg = DPU_REG_READ(c, INTF_TEAR_AUTOREFRESH_CONFIG);
372 if (enable)
373 refresh_cfg = BIT(31) | frame_count;
374 else
375 refresh_cfg &= ~BIT(31);
376
377 DPU_REG_WRITE(c, INTF_TEAR_AUTOREFRESH_CONFIG, refresh_cfg);
378 }
379
380 /*
381 * dpu_hw_intf_get_autorefresh_config - Get autorefresh config from HW
382 * @intf: DPU intf structure
383 * @frame_count: Used to return the current frame count from hw
384 *
385 * Returns: True if autorefresh enabled, false if disabled.
386 */
dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf * intf,u32 * frame_count)387 static bool dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf *intf,
388 u32 *frame_count)
389 {
390 u32 val = DPU_REG_READ(&intf->hw, INTF_TEAR_AUTOREFRESH_CONFIG);
391
392 if (frame_count != NULL)
393 *frame_count = val & 0xffff;
394 return !!((val & BIT(31)) >> 31);
395 }
396
dpu_hw_intf_disable_te(struct dpu_hw_intf * intf)397 static int dpu_hw_intf_disable_te(struct dpu_hw_intf *intf)
398 {
399 struct dpu_hw_blk_reg_map *c;
400
401 if (!intf)
402 return -EINVAL;
403
404 c = &intf->hw;
405 DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 0);
406 return 0;
407 }
408
dpu_hw_intf_connect_external_te(struct dpu_hw_intf * intf,bool enable_external_te)409 static int dpu_hw_intf_connect_external_te(struct dpu_hw_intf *intf,
410 bool enable_external_te)
411 {
412 struct dpu_hw_blk_reg_map *c = &intf->hw;
413 u32 cfg;
414 int orig;
415
416 if (!intf)
417 return -EINVAL;
418
419 c = &intf->hw;
420 cfg = DPU_REG_READ(c, INTF_TEAR_SYNC_CONFIG_VSYNC);
421 orig = (bool)(cfg & BIT(20));
422 if (enable_external_te)
423 cfg |= BIT(20);
424 else
425 cfg &= ~BIT(20);
426 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
427 trace_dpu_intf_connect_ext_te(intf->idx - INTF_0, cfg);
428
429 return orig;
430 }
431
dpu_hw_intf_get_vsync_info(struct dpu_hw_intf * intf,struct dpu_hw_pp_vsync_info * info)432 static int dpu_hw_intf_get_vsync_info(struct dpu_hw_intf *intf,
433 struct dpu_hw_pp_vsync_info *info)
434 {
435 struct dpu_hw_blk_reg_map *c = &intf->hw;
436 u32 val;
437
438 if (!intf || !info)
439 return -EINVAL;
440
441 c = &intf->hw;
442
443 val = DPU_REG_READ(c, INTF_TEAR_VSYNC_INIT_VAL);
444 info->rd_ptr_init_val = val & 0xffff;
445
446 val = DPU_REG_READ(c, INTF_TEAR_INT_COUNT_VAL);
447 info->rd_ptr_frame_count = (val & 0xffff0000) >> 16;
448 info->rd_ptr_line_count = val & 0xffff;
449
450 val = DPU_REG_READ(c, INTF_TEAR_LINE_COUNT);
451 info->wr_ptr_line_count = val & 0xffff;
452
453 val = DPU_REG_READ(c, INTF_FRAME_COUNT);
454 info->intf_frame_count = val;
455
456 return 0;
457 }
458
dpu_hw_intf_vsync_sel(struct dpu_hw_intf * intf,u32 vsync_source)459 static void dpu_hw_intf_vsync_sel(struct dpu_hw_intf *intf,
460 u32 vsync_source)
461 {
462 struct dpu_hw_blk_reg_map *c;
463
464 if (!intf)
465 return;
466
467 c = &intf->hw;
468
469 DPU_REG_WRITE(c, INTF_TEAR_MDP_VSYNC_SEL, (vsync_source & 0xf));
470 }
471
dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf * intf,uint32_t encoder_id,u16 vdisplay)472 static void dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf *intf,
473 uint32_t encoder_id, u16 vdisplay)
474 {
475 struct dpu_hw_pp_vsync_info info;
476 int trial = 0;
477
478 /* If autorefresh is already disabled, we have nothing to do */
479 if (!dpu_hw_intf_get_autorefresh_config(intf, NULL))
480 return;
481
482 /*
483 * If autorefresh is enabled, disable it and make sure it is safe to
484 * proceed with current frame commit/push. Sequence followed is,
485 * 1. Disable TE
486 * 2. Disable autorefresh config
487 * 4. Poll for frame transfer ongoing to be false
488 * 5. Enable TE back
489 */
490
491 dpu_hw_intf_connect_external_te(intf, false);
492 dpu_hw_intf_setup_autorefresh_config(intf, 0, false);
493
494 do {
495 udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
496 if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
497 > (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
498 DPU_ERROR("enc%d intf%d disable autorefresh failed\n",
499 encoder_id, intf->idx - INTF_0);
500 break;
501 }
502
503 trial++;
504
505 dpu_hw_intf_get_vsync_info(intf, &info);
506 } while (info.wr_ptr_line_count > 0 &&
507 info.wr_ptr_line_count < vdisplay);
508
509 dpu_hw_intf_connect_external_te(intf, true);
510
511 DPU_DEBUG("enc%d intf%d disabled autorefresh\n",
512 encoder_id, intf->idx - INTF_0);
513
514 }
515
dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf * ctx,struct dpu_hw_intf_cmd_mode_cfg * cmd_mode_cfg)516 static void dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf *ctx,
517 struct dpu_hw_intf_cmd_mode_cfg *cmd_mode_cfg)
518 {
519 u32 intf_cfg2 = DPU_REG_READ(&ctx->hw, INTF_CONFIG2);
520
521 if (cmd_mode_cfg->data_compress)
522 intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
523
524 DPU_REG_WRITE(&ctx->hw, INTF_CONFIG2, intf_cfg2);
525 }
526
_setup_intf_ops(struct dpu_hw_intf_ops * ops,unsigned long cap,const struct dpu_mdss_version * mdss_rev)527 static void _setup_intf_ops(struct dpu_hw_intf_ops *ops,
528 unsigned long cap, const struct dpu_mdss_version *mdss_rev)
529 {
530 ops->setup_timing_gen = dpu_hw_intf_setup_timing_engine;
531 ops->setup_prg_fetch = dpu_hw_intf_setup_prg_fetch;
532 ops->get_status = dpu_hw_intf_get_status;
533 ops->enable_timing = dpu_hw_intf_enable_timing_engine;
534 ops->get_line_count = dpu_hw_intf_get_line_count;
535 if (cap & BIT(DPU_INTF_INPUT_CTRL))
536 ops->bind_pingpong_blk = dpu_hw_intf_bind_pingpong_blk;
537 ops->setup_misr = dpu_hw_intf_setup_misr;
538 ops->collect_misr = dpu_hw_intf_collect_misr;
539
540 if (cap & BIT(DPU_INTF_TE)) {
541 ops->enable_tearcheck = dpu_hw_intf_enable_te;
542 ops->disable_tearcheck = dpu_hw_intf_disable_te;
543 ops->connect_external_te = dpu_hw_intf_connect_external_te;
544 ops->vsync_sel = dpu_hw_intf_vsync_sel;
545 ops->disable_autorefresh = dpu_hw_intf_disable_autorefresh;
546 }
547
548 if (mdss_rev->core_major_ver >= 7)
549 ops->program_intf_cmd_cfg = dpu_hw_intf_program_intf_cmd_cfg;
550 }
551
dpu_hw_intf_init(const struct dpu_intf_cfg * cfg,void __iomem * addr,const struct dpu_mdss_version * mdss_rev)552 struct dpu_hw_intf *dpu_hw_intf_init(const struct dpu_intf_cfg *cfg,
553 void __iomem *addr, const struct dpu_mdss_version *mdss_rev)
554 {
555 struct dpu_hw_intf *c;
556
557 if (cfg->type == INTF_NONE) {
558 DPU_DEBUG("Skip intf %d with type NONE\n", cfg->id - INTF_0);
559 return NULL;
560 }
561
562 c = kzalloc(sizeof(*c), GFP_KERNEL);
563 if (!c)
564 return ERR_PTR(-ENOMEM);
565
566 c->hw.blk_addr = addr + cfg->base;
567 c->hw.log_mask = DPU_DBG_MASK_INTF;
568
569 /*
570 * Assign ops
571 */
572 c->idx = cfg->id;
573 c->cap = cfg;
574 _setup_intf_ops(&c->ops, c->cap->features, mdss_rev);
575
576 return c;
577 }
578
dpu_hw_intf_destroy(struct dpu_hw_intf * intf)579 void dpu_hw_intf_destroy(struct dpu_hw_intf *intf)
580 {
581 kfree(intf);
582 }
583
584