1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2020 Unisoc Inc.
4 */
5
6#include <linux/component.h>
7#include <linux/module.h>
8#include <linux/of.h>
9#include <linux/platform_device.h>
10#include <video/mipi_display.h>
11
12#include <drm/drm_atomic_helper.h>
13#include <drm/drm_bridge.h>
14#include <drm/drm_of.h>
15#include <drm/drm_probe_helper.h>
16
17#include "sprd_drm.h"
18#include "sprd_dpu.h"
19#include "sprd_dsi.h"
20
21#define SOFT_RESET 0x04
22#define MASK_PROTOCOL_INT 0x0C
23#define MASK_INTERNAL_INT 0x14
24#define DSI_MODE_CFG 0x18
25
26#define VIRTUAL_CHANNEL_ID 0x1C
27#define GEN_RX_VCID GENMASK(1, 0)
28#define VIDEO_PKT_VCID GENMASK(3, 2)
29
30#define DPI_VIDEO_FORMAT 0x20
31#define DPI_VIDEO_MODE_FORMAT GENMASK(5, 0)
32#define LOOSELY18_EN BIT(6)
33
34#define VIDEO_PKT_CONFIG 0x24
35#define VIDEO_PKT_SIZE GENMASK(15, 0)
36#define VIDEO_LINE_CHUNK_NUM GENMASK(31, 16)
37
38#define VIDEO_LINE_HBLK_TIME 0x28
39#define VIDEO_LINE_HBP_TIME GENMASK(15, 0)
40#define VIDEO_LINE_HSA_TIME GENMASK(31, 16)
41
42#define VIDEO_LINE_TIME 0x2C
43
44#define VIDEO_VBLK_LINES 0x30
45#define VFP_LINES GENMASK(9, 0)
46#define VBP_LINES GENMASK(19, 10)
47#define VSA_LINES GENMASK(29, 20)
48
49#define VIDEO_VACTIVE_LINES 0x34
50
51#define VID_MODE_CFG 0x38
52#define VID_MODE_TYPE GENMASK(1, 0)
53#define LP_VSA_EN BIT(8)
54#define LP_VBP_EN BIT(9)
55#define LP_VFP_EN BIT(10)
56#define LP_VACT_EN BIT(11)
57#define LP_HBP_EN BIT(12)
58#define LP_HFP_EN BIT(13)
59#define FRAME_BTA_ACK_EN BIT(14)
60
61#define TIMEOUT_CNT_CLK_CONFIG 0x40
62#define HTX_TO_CONFIG 0x44
63#define LRX_H_TO_CONFIG 0x48
64
65#define TX_ESC_CLK_CONFIG 0x5C
66
67#define CMD_MODE_CFG 0x68
68#define TEAR_FX_EN BIT(0)
69
70#define GEN_HDR 0x6C
71#define GEN_DT GENMASK(5, 0)
72#define GEN_VC GENMASK(7, 6)
73
74#define GEN_PLD_DATA 0x70
75
76#define PHY_CLK_LANE_LP_CTRL 0x74
77#define PHY_CLKLANE_TX_REQ_HS BIT(0)
78#define AUTO_CLKLANE_CTRL_EN BIT(1)
79
80#define PHY_INTERFACE_CTRL 0x78
81#define RF_PHY_SHUTDOWN BIT(0)
82#define RF_PHY_RESET_N BIT(1)
83#define RF_PHY_CLK_EN BIT(2)
84
85#define CMD_MODE_STATUS 0x98
86#define GEN_CMD_RDATA_FIFO_EMPTY BIT(1)
87#define GEN_CMD_WDATA_FIFO_EMPTY BIT(3)
88#define GEN_CMD_CMD_FIFO_EMPTY BIT(5)
89#define GEN_CMD_RDCMD_DONE BIT(7)
90
91#define PHY_STATUS 0x9C
92#define PHY_LOCK BIT(1)
93
94#define PHY_MIN_STOP_TIME 0xA0
95#define PHY_LANE_NUM_CONFIG 0xA4
96
97#define PHY_CLKLANE_TIME_CONFIG 0xA8
98#define PHY_CLKLANE_LP_TO_HS_TIME GENMASK(15, 0)
99#define PHY_CLKLANE_HS_TO_LP_TIME GENMASK(31, 16)
100
101#define PHY_DATALANE_TIME_CONFIG 0xAC
102#define PHY_DATALANE_LP_TO_HS_TIME GENMASK(15, 0)
103#define PHY_DATALANE_HS_TO_LP_TIME GENMASK(31, 16)
104
105#define MAX_READ_TIME 0xB0
106
107#define RX_PKT_CHECK_CONFIG 0xB4
108#define RX_PKT_ECC_EN BIT(0)
109#define RX_PKT_CRC_EN BIT(1)
110
111#define TA_EN 0xB8
112
113#define EOTP_EN 0xBC
114#define TX_EOTP_EN BIT(0)
115#define RX_EOTP_EN BIT(1)
116
117#define VIDEO_NULLPKT_SIZE 0xC0
118#define DCS_WM_PKT_SIZE 0xC4
119
120#define VIDEO_SIG_DELAY_CONFIG 0xD0
121#define VIDEO_SIG_DELAY GENMASK(23, 0)
122
123#define PHY_TST_CTRL0 0xF0
124#define PHY_TESTCLR BIT(0)
125#define PHY_TESTCLK BIT(1)
126
127#define PHY_TST_CTRL1 0xF4
128#define PHY_TESTDIN GENMASK(7, 0)
129#define PHY_TESTDOUT GENMASK(15, 8)
130#define PHY_TESTEN BIT(16)
131
132#define host_to_dsi(host) \
133 container_of(host, struct sprd_dsi, host)
134
135static inline u32
136dsi_reg_rd(struct dsi_context *ctx, u32 offset, u32 mask,
137 u32 shift)
138{
139 return (readl(addr: ctx->base + offset) & mask) >> shift;
140}
141
142static inline void
143dsi_reg_wr(struct dsi_context *ctx, u32 offset, u32 mask,
144 u32 shift, u32 val)
145{
146 u32 ret;
147
148 ret = readl(addr: ctx->base + offset);
149 ret &= ~mask;
150 ret |= (val << shift) & mask;
151 writel(val: ret, addr: ctx->base + offset);
152}
153
154static inline void
155dsi_reg_up(struct dsi_context *ctx, u32 offset, u32 mask,
156 u32 val)
157{
158 u32 ret = readl(addr: ctx->base + offset);
159
160 writel(val: (ret & ~mask) | (val & mask), addr: ctx->base + offset);
161}
162
163static int regmap_tst_io_write(void *context, u32 reg, u32 val)
164{
165 struct sprd_dsi *dsi = context;
166 struct dsi_context *ctx = &dsi->ctx;
167
168 if (val > 0xff || reg > 0xff)
169 return -EINVAL;
170
171 drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, val);
172
173 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
174 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, shift: 0, val: reg);
175 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
176 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, val: 0);
177 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, val: 0);
178 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, shift: 0, val);
179 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
180 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, val: 0);
181
182 return 0;
183}
184
185static int regmap_tst_io_read(void *context, u32 reg, u32 *val)
186{
187 struct sprd_dsi *dsi = context;
188 struct dsi_context *ctx = &dsi->ctx;
189 int ret;
190
191 if (reg > 0xff)
192 return -EINVAL;
193
194 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
195 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, shift: 0, val: reg);
196 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
197 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, val: 0);
198 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, val: 0);
199
200 udelay(1);
201
202 ret = dsi_reg_rd(ctx, PHY_TST_CTRL1, PHY_TESTDOUT, shift: 8);
203 if (ret < 0)
204 return ret;
205
206 *val = ret;
207
208 drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, *val);
209 return 0;
210}
211
212static struct regmap_bus regmap_tst_io = {
213 .reg_write = regmap_tst_io_write,
214 .reg_read = regmap_tst_io_read,
215};
216
217static const struct regmap_config byte_config = {
218 .reg_bits = 8,
219 .val_bits = 8,
220};
221
222static int dphy_wait_pll_locked(struct dsi_context *ctx)
223{
224 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
225 int i;
226
227 for (i = 0; i < 50000; i++) {
228 if (dsi_reg_rd(ctx, PHY_STATUS, PHY_LOCK, shift: 1))
229 return 0;
230 udelay(3);
231 }
232
233 drm_err(dsi->drm, "dphy pll can not be locked\n");
234 return -ETIMEDOUT;
235}
236
237static int dsi_wait_tx_payload_fifo_empty(struct dsi_context *ctx)
238{
239 int i;
240
241 for (i = 0; i < 5000; i++) {
242 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_WDATA_FIFO_EMPTY, shift: 3))
243 return 0;
244 udelay(1);
245 }
246
247 return -ETIMEDOUT;
248}
249
250static int dsi_wait_tx_cmd_fifo_empty(struct dsi_context *ctx)
251{
252 int i;
253
254 for (i = 0; i < 5000; i++) {
255 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, shift: 5))
256 return 0;
257 udelay(1);
258 }
259
260 return -ETIMEDOUT;
261}
262
263static int dsi_wait_rd_resp_completed(struct dsi_context *ctx)
264{
265 int i;
266
267 for (i = 0; i < 10000; i++) {
268 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDCMD_DONE, shift: 7))
269 return 0;
270 udelay(10);
271 }
272
273 return -ETIMEDOUT;
274}
275
276static u16 calc_bytes_per_pixel_x100(int coding)
277{
278 u16 bpp_x100;
279
280 switch (coding) {
281 case COLOR_CODE_16BIT_CONFIG1:
282 case COLOR_CODE_16BIT_CONFIG2:
283 case COLOR_CODE_16BIT_CONFIG3:
284 bpp_x100 = 200;
285 break;
286 case COLOR_CODE_18BIT_CONFIG1:
287 case COLOR_CODE_18BIT_CONFIG2:
288 bpp_x100 = 225;
289 break;
290 case COLOR_CODE_24BIT:
291 bpp_x100 = 300;
292 break;
293 case COLOR_CODE_COMPRESSTION:
294 bpp_x100 = 100;
295 break;
296 case COLOR_CODE_20BIT_YCC422_LOOSELY:
297 bpp_x100 = 250;
298 break;
299 case COLOR_CODE_24BIT_YCC422:
300 bpp_x100 = 300;
301 break;
302 case COLOR_CODE_16BIT_YCC422:
303 bpp_x100 = 200;
304 break;
305 case COLOR_CODE_30BIT:
306 bpp_x100 = 375;
307 break;
308 case COLOR_CODE_36BIT:
309 bpp_x100 = 450;
310 break;
311 case COLOR_CODE_12BIT_YCC420:
312 bpp_x100 = 150;
313 break;
314 default:
315 DRM_ERROR("invalid color coding");
316 bpp_x100 = 0;
317 break;
318 }
319
320 return bpp_x100;
321}
322
323static u8 calc_video_size_step(int coding)
324{
325 u8 video_size_step;
326
327 switch (coding) {
328 case COLOR_CODE_16BIT_CONFIG1:
329 case COLOR_CODE_16BIT_CONFIG2:
330 case COLOR_CODE_16BIT_CONFIG3:
331 case COLOR_CODE_18BIT_CONFIG1:
332 case COLOR_CODE_18BIT_CONFIG2:
333 case COLOR_CODE_24BIT:
334 case COLOR_CODE_COMPRESSTION:
335 return video_size_step = 1;
336 case COLOR_CODE_20BIT_YCC422_LOOSELY:
337 case COLOR_CODE_24BIT_YCC422:
338 case COLOR_CODE_16BIT_YCC422:
339 case COLOR_CODE_30BIT:
340 case COLOR_CODE_36BIT:
341 case COLOR_CODE_12BIT_YCC420:
342 return video_size_step = 2;
343 default:
344 DRM_ERROR("invalid color coding");
345 return 0;
346 }
347}
348
349static u16 round_video_size(int coding, u16 video_size)
350{
351 switch (coding) {
352 case COLOR_CODE_16BIT_YCC422:
353 case COLOR_CODE_24BIT_YCC422:
354 case COLOR_CODE_20BIT_YCC422_LOOSELY:
355 case COLOR_CODE_12BIT_YCC420:
356 /* round up active H pixels to a multiple of 2 */
357 if ((video_size % 2) != 0)
358 video_size += 1;
359 break;
360 default:
361 break;
362 }
363
364 return video_size;
365}
366
367#define SPRD_MIPI_DSI_FMT_DSC 0xff
368static u32 fmt_to_coding(u32 fmt)
369{
370 switch (fmt) {
371 case MIPI_DSI_FMT_RGB565:
372 return COLOR_CODE_16BIT_CONFIG1;
373 case MIPI_DSI_FMT_RGB666:
374 case MIPI_DSI_FMT_RGB666_PACKED:
375 return COLOR_CODE_18BIT_CONFIG1;
376 case MIPI_DSI_FMT_RGB888:
377 return COLOR_CODE_24BIT;
378 case SPRD_MIPI_DSI_FMT_DSC:
379 return COLOR_CODE_COMPRESSTION;
380 default:
381 DRM_ERROR("Unsupported format (%d)\n", fmt);
382 return COLOR_CODE_24BIT;
383 }
384}
385
386#define ns_to_cycle(ns, byte_clk) \
387 DIV_ROUND_UP((ns) * (byte_clk), 1000000)
388
389static void sprd_dsi_init(struct dsi_context *ctx)
390{
391 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
392 u32 byte_clk = dsi->slave->hs_rate / 8;
393 u16 data_hs2lp, data_lp2hs, clk_hs2lp, clk_lp2hs;
394 u16 max_rd_time;
395 int div;
396
397 writel(val: 0, addr: ctx->base + SOFT_RESET);
398 writel(val: 0xffffffff, addr: ctx->base + MASK_PROTOCOL_INT);
399 writel(val: 0xffffffff, addr: ctx->base + MASK_INTERNAL_INT);
400 writel(val: 1, addr: ctx->base + DSI_MODE_CFG);
401 dsi_reg_up(ctx, EOTP_EN, RX_EOTP_EN, val: 0);
402 dsi_reg_up(ctx, EOTP_EN, TX_EOTP_EN, val: 0);
403 dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_ECC_EN, RX_PKT_ECC_EN);
404 dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_CRC_EN, RX_PKT_CRC_EN);
405 writel(val: 1, addr: ctx->base + TA_EN);
406 dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, VIDEO_PKT_VCID, val: 0);
407 dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, GEN_RX_VCID, val: 0);
408
409 div = DIV_ROUND_UP(byte_clk, dsi->slave->lp_rate);
410 writel(val: div, addr: ctx->base + TX_ESC_CLK_CONFIG);
411
412 max_rd_time = ns_to_cycle(ctx->max_rd_time, byte_clk);
413 writel(val: max_rd_time, addr: ctx->base + MAX_READ_TIME);
414
415 data_hs2lp = ns_to_cycle(ctx->data_hs2lp, byte_clk);
416 data_lp2hs = ns_to_cycle(ctx->data_lp2hs, byte_clk);
417 clk_hs2lp = ns_to_cycle(ctx->clk_hs2lp, byte_clk);
418 clk_lp2hs = ns_to_cycle(ctx->clk_lp2hs, byte_clk);
419 dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
420 PHY_DATALANE_HS_TO_LP_TIME, shift: 16, val: data_hs2lp);
421 dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
422 PHY_DATALANE_LP_TO_HS_TIME, shift: 0, val: data_lp2hs);
423 dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
424 PHY_CLKLANE_HS_TO_LP_TIME, shift: 16, val: clk_hs2lp);
425 dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
426 PHY_CLKLANE_LP_TO_HS_TIME, shift: 0, val: clk_lp2hs);
427
428 writel(val: 1, addr: ctx->base + SOFT_RESET);
429}
430
431/*
432 * Free up resources and shutdown host controller and PHY
433 */
434static void sprd_dsi_fini(struct dsi_context *ctx)
435{
436 writel(val: 0xffffffff, addr: ctx->base + MASK_PROTOCOL_INT);
437 writel(val: 0xffffffff, addr: ctx->base + MASK_INTERNAL_INT);
438 writel(val: 0, addr: ctx->base + SOFT_RESET);
439}
440
441/*
442 * If not in burst mode, it will compute the video and null packet sizes
443 * according to necessity.
444 * Configure timers for data lanes and/or clock lane to return to LP when
445 * bandwidth is not filled by data.
446 */
447static int sprd_dsi_dpi_video(struct dsi_context *ctx)
448{
449 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
450 struct videomode *vm = &ctx->vm;
451 u32 byte_clk = dsi->slave->hs_rate / 8;
452 u16 bpp_x100;
453 u16 video_size;
454 u32 ratio_x1000;
455 u16 null_pkt_size = 0;
456 u8 video_size_step;
457 u32 hs_to;
458 u32 total_bytes;
459 u32 bytes_per_chunk;
460 u32 chunks = 0;
461 u32 bytes_left = 0;
462 u32 chunk_overhead;
463 const u8 pkt_header = 6;
464 u8 coding;
465 int div;
466 u16 hline;
467 u16 byte_cycle;
468
469 coding = fmt_to_coding(fmt: dsi->slave->format);
470 video_size = round_video_size(coding, video_size: vm->hactive);
471 bpp_x100 = calc_bytes_per_pixel_x100(coding);
472 video_size_step = calc_video_size_step(coding);
473 ratio_x1000 = byte_clk * 1000 / (vm->pixelclock / 1000);
474 hline = vm->hactive + vm->hsync_len + vm->hfront_porch +
475 vm->hback_porch;
476
477 writel(val: 0, addr: ctx->base + SOFT_RESET);
478 dsi_reg_wr(ctx, VID_MODE_CFG, FRAME_BTA_ACK_EN, shift: 15, val: ctx->frame_ack_en);
479 dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, shift: 0, val: coding);
480 dsi_reg_wr(ctx, VID_MODE_CFG, VID_MODE_TYPE, shift: 0, val: ctx->burst_mode);
481 byte_cycle = 95 * hline * ratio_x1000 / 100000;
482 dsi_reg_wr(ctx, VIDEO_SIG_DELAY_CONFIG, VIDEO_SIG_DELAY, shift: 0, val: byte_cycle);
483 byte_cycle = hline * ratio_x1000 / 1000;
484 writel(val: byte_cycle, addr: ctx->base + VIDEO_LINE_TIME);
485 byte_cycle = vm->hsync_len * ratio_x1000 / 1000;
486 dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HSA_TIME, shift: 16, val: byte_cycle);
487 byte_cycle = vm->hback_porch * ratio_x1000 / 1000;
488 dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HBP_TIME, shift: 0, val: byte_cycle);
489 writel(val: vm->vactive, addr: ctx->base + VIDEO_VACTIVE_LINES);
490 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VFP_LINES, shift: 0, val: vm->vfront_porch);
491 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VBP_LINES, shift: 10, val: vm->vback_porch);
492 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VSA_LINES, shift: 20, val: vm->vsync_len);
493 dsi_reg_up(ctx, VID_MODE_CFG, LP_HBP_EN | LP_HFP_EN | LP_VACT_EN |
494 LP_VFP_EN | LP_VBP_EN | LP_VSA_EN, LP_HBP_EN | LP_HFP_EN |
495 LP_VACT_EN | LP_VFP_EN | LP_VBP_EN | LP_VSA_EN);
496
497 hs_to = (hline * vm->vactive) + (2 * bpp_x100) / 100;
498 for (div = 0x80; (div < hs_to) && (div > 2); div--) {
499 if ((hs_to % div) == 0) {
500 writel(val: div, addr: ctx->base + TIMEOUT_CNT_CLK_CONFIG);
501 writel(val: hs_to / div, addr: ctx->base + LRX_H_TO_CONFIG);
502 writel(val: hs_to / div, addr: ctx->base + HTX_TO_CONFIG);
503 break;
504 }
505 }
506
507 if (ctx->burst_mode == VIDEO_BURST_WITH_SYNC_PULSES) {
508 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, shift: 0, val: video_size);
509 writel(val: 0, addr: ctx->base + VIDEO_NULLPKT_SIZE);
510 dsi_reg_up(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, val: 0);
511 } else {
512 /* non burst transmission */
513 null_pkt_size = 0;
514
515 /* bytes to be sent - first as one chunk */
516 bytes_per_chunk = vm->hactive * bpp_x100 / 100 + pkt_header;
517
518 /* hline total bytes from the DPI interface */
519 total_bytes = (vm->hactive + vm->hfront_porch) *
520 ratio_x1000 / dsi->slave->lanes / 1000;
521
522 /* check if the pixels actually fit on the DSI link */
523 if (total_bytes < bytes_per_chunk) {
524 drm_err(dsi->drm, "current resolution can not be set\n");
525 return -EINVAL;
526 }
527
528 chunk_overhead = total_bytes - bytes_per_chunk;
529
530 /* overhead higher than 1 -> enable multi packets */
531 if (chunk_overhead > 1) {
532 /* multi packets */
533 for (video_size = video_size_step;
534 video_size < vm->hactive;
535 video_size += video_size_step) {
536 if (vm->hactive * 1000 / video_size % 1000)
537 continue;
538
539 chunks = vm->hactive / video_size;
540 bytes_per_chunk = bpp_x100 * video_size / 100
541 + pkt_header;
542 if (total_bytes >= (bytes_per_chunk * chunks)) {
543 bytes_left = total_bytes -
544 bytes_per_chunk * chunks;
545 break;
546 }
547 }
548
549 /* prevent overflow (unsigned - unsigned) */
550 if (bytes_left > (pkt_header * chunks)) {
551 null_pkt_size = (bytes_left -
552 pkt_header * chunks) / chunks;
553 /* avoid register overflow */
554 if (null_pkt_size > 1023)
555 null_pkt_size = 1023;
556 }
557
558 } else {
559 /* single packet */
560 chunks = 1;
561
562 /* must be a multiple of 4 except 18 loosely */
563 for (video_size = vm->hactive;
564 (video_size % video_size_step) != 0;
565 video_size++)
566 ;
567 }
568
569 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, shift: 0, val: video_size);
570 writel(val: null_pkt_size, addr: ctx->base + VIDEO_NULLPKT_SIZE);
571 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, shift: 16, val: chunks);
572 }
573
574 writel(val: ctx->int0_mask, addr: ctx->base + MASK_PROTOCOL_INT);
575 writel(val: ctx->int1_mask, addr: ctx->base + MASK_INTERNAL_INT);
576 writel(val: 1, addr: ctx->base + SOFT_RESET);
577
578 return 0;
579}
580
581static void sprd_dsi_edpi_video(struct dsi_context *ctx)
582{
583 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
584 const u32 fifo_depth = 1096;
585 const u32 word_length = 4;
586 u32 hactive = ctx->vm.hactive;
587 u32 bpp_x100;
588 u32 max_fifo_len;
589 u8 coding;
590
591 coding = fmt_to_coding(fmt: dsi->slave->format);
592 bpp_x100 = calc_bytes_per_pixel_x100(coding);
593 max_fifo_len = word_length * fifo_depth * 100 / bpp_x100;
594
595 writel(val: 0, addr: ctx->base + SOFT_RESET);
596 dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, shift: 0, val: coding);
597 dsi_reg_wr(ctx, CMD_MODE_CFG, TEAR_FX_EN, shift: 0, val: ctx->te_ack_en);
598
599 if (max_fifo_len > hactive)
600 writel(val: hactive, addr: ctx->base + DCS_WM_PKT_SIZE);
601 else
602 writel(val: max_fifo_len, addr: ctx->base + DCS_WM_PKT_SIZE);
603
604 writel(val: ctx->int0_mask, addr: ctx->base + MASK_PROTOCOL_INT);
605 writel(val: ctx->int1_mask, addr: ctx->base + MASK_INTERNAL_INT);
606 writel(val: 1, addr: ctx->base + SOFT_RESET);
607}
608
609/*
610 * Send a packet on the generic interface,
611 * this function has an active delay to wait for the buffer to clear.
612 * The delay is limited to:
613 * (param_length / 4) x DSIH_FIFO_ACTIVE_WAIT x register access time
614 * the controller restricts the sending of.
615 *
616 * This function will not be able to send Null and Blanking packets due to
617 * controller restriction
618 */
619static int sprd_dsi_wr_pkt(struct dsi_context *ctx, u8 vc, u8 type,
620 const u8 *param, u16 len)
621{
622 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
623 u8 wc_lsbyte, wc_msbyte;
624 u32 payload;
625 int i, j, ret;
626
627 if (vc > 3)
628 return -EINVAL;
629
630 /* 1st: for long packet, must config payload first */
631 ret = dsi_wait_tx_payload_fifo_empty(ctx);
632 if (ret) {
633 drm_err(dsi->drm, "tx payload fifo is not empty\n");
634 return ret;
635 }
636
637 if (len > 2) {
638 for (i = 0, j = 0; i < len; i += j) {
639 payload = 0;
640 for (j = 0; (j < 4) && ((j + i) < (len)); j++)
641 payload |= param[i + j] << (j * 8);
642
643 writel(val: payload, addr: ctx->base + GEN_PLD_DATA);
644 }
645 wc_lsbyte = len & 0xff;
646 wc_msbyte = len >> 8;
647 } else {
648 wc_lsbyte = (len > 0) ? param[0] : 0;
649 wc_msbyte = (len > 1) ? param[1] : 0;
650 }
651
652 /* 2nd: then set packet header */
653 ret = dsi_wait_tx_cmd_fifo_empty(ctx);
654 if (ret) {
655 drm_err(dsi->drm, "tx cmd fifo is not empty\n");
656 return ret;
657 }
658
659 writel(val: type | (vc << 6) | (wc_lsbyte << 8) | (wc_msbyte << 16),
660 addr: ctx->base + GEN_HDR);
661
662 return 0;
663}
664
665/*
666 * Send READ packet to peripheral using the generic interface,
667 * this will force command mode and stop video mode (because of BTA).
668 *
669 * This function has an active delay to wait for the buffer to clear,
670 * the delay is limited to 2 x DSIH_FIFO_ACTIVE_WAIT
671 * (waiting for command buffer, and waiting for receiving)
672 * @note this function will enable BTA
673 */
674static int sprd_dsi_rd_pkt(struct dsi_context *ctx, u8 vc, u8 type,
675 u8 msb_byte, u8 lsb_byte,
676 u8 *buffer, u8 bytes_to_read)
677{
678 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
679 int i, ret;
680 int count = 0;
681 u32 temp;
682
683 if (vc > 3)
684 return -EINVAL;
685
686 /* 1st: send read command to peripheral */
687 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, shift: 5);
688 if (!ret)
689 return -EIO;
690
691 writel(val: type | (vc << 6) | (lsb_byte << 8) | (msb_byte << 16),
692 addr: ctx->base + GEN_HDR);
693
694 /* 2nd: wait peripheral response completed */
695 ret = dsi_wait_rd_resp_completed(ctx);
696 if (ret) {
697 drm_err(dsi->drm, "wait read response time out\n");
698 return ret;
699 }
700
701 /* 3rd: get data from rx payload fifo */
702 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, shift: 1);
703 if (ret) {
704 drm_err(dsi->drm, "rx payload fifo empty\n");
705 return -EIO;
706 }
707
708 for (i = 0; i < 100; i++) {
709 temp = readl(addr: ctx->base + GEN_PLD_DATA);
710
711 if (count < bytes_to_read)
712 buffer[count++] = temp & 0xff;
713 if (count < bytes_to_read)
714 buffer[count++] = (temp >> 8) & 0xff;
715 if (count < bytes_to_read)
716 buffer[count++] = (temp >> 16) & 0xff;
717 if (count < bytes_to_read)
718 buffer[count++] = (temp >> 24) & 0xff;
719
720 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, shift: 1);
721 if (ret)
722 return count;
723 }
724
725 return 0;
726}
727
728static void sprd_dsi_set_work_mode(struct dsi_context *ctx, u8 mode)
729{
730 if (mode == DSI_MODE_CMD)
731 writel(val: 1, addr: ctx->base + DSI_MODE_CFG);
732 else
733 writel(val: 0, addr: ctx->base + DSI_MODE_CFG);
734}
735
736static void sprd_dsi_state_reset(struct dsi_context *ctx)
737{
738 writel(val: 0, addr: ctx->base + SOFT_RESET);
739 udelay(100);
740 writel(val: 1, addr: ctx->base + SOFT_RESET);
741}
742
743static int sprd_dphy_init(struct dsi_context *ctx)
744{
745 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
746 int ret;
747
748 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, val: 0);
749 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, val: 0);
750 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, val: 0);
751
752 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, val: 0);
753 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, PHY_TESTCLR);
754 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, val: 0);
755
756 dphy_pll_config(ctx);
757 dphy_timing_config(ctx);
758
759 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, RF_PHY_SHUTDOWN);
760 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
761 writel(val: 0x1C, addr: ctx->base + PHY_MIN_STOP_TIME);
762 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
763 writel(val: dsi->slave->lanes - 1, addr: ctx->base + PHY_LANE_NUM_CONFIG);
764
765 ret = dphy_wait_pll_locked(ctx);
766 if (ret) {
767 drm_err(dsi->drm, "dphy initial failed\n");
768 return ret;
769 }
770
771 return 0;
772}
773
774static void sprd_dphy_fini(struct dsi_context *ctx)
775{
776 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, val: 0);
777 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, val: 0);
778 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
779}
780
781static void sprd_dsi_encoder_mode_set(struct drm_encoder *encoder,
782 struct drm_display_mode *mode,
783 struct drm_display_mode *adj_mode)
784{
785 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
786
787 drm_display_mode_to_videomode(dmode: adj_mode, vm: &dsi->ctx.vm);
788}
789
790static void sprd_dsi_encoder_enable(struct drm_encoder *encoder)
791{
792 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
793 struct sprd_dpu *dpu = to_sprd_crtc(crtc: encoder->crtc);
794 struct dsi_context *ctx = &dsi->ctx;
795
796 if (ctx->enabled) {
797 drm_warn(dsi->drm, "dsi is initialized\n");
798 return;
799 }
800
801 sprd_dsi_init(ctx);
802 if (ctx->work_mode == DSI_MODE_VIDEO)
803 sprd_dsi_dpi_video(ctx);
804 else
805 sprd_dsi_edpi_video(ctx);
806
807 sprd_dphy_init(ctx);
808
809 sprd_dsi_set_work_mode(ctx, mode: ctx->work_mode);
810 sprd_dsi_state_reset(ctx);
811
812 if (dsi->slave->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) {
813 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, AUTO_CLKLANE_CTRL_EN,
814 AUTO_CLKLANE_CTRL_EN);
815 } else {
816 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
817 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, PHY_CLKLANE_TX_REQ_HS,
818 PHY_CLKLANE_TX_REQ_HS);
819 dphy_wait_pll_locked(ctx);
820 }
821
822 sprd_dpu_run(dpu);
823
824 ctx->enabled = true;
825}
826
827static void sprd_dsi_encoder_disable(struct drm_encoder *encoder)
828{
829 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
830 struct sprd_dpu *dpu = to_sprd_crtc(crtc: encoder->crtc);
831 struct dsi_context *ctx = &dsi->ctx;
832
833 if (!ctx->enabled) {
834 drm_warn(dsi->drm, "dsi isn't initialized\n");
835 return;
836 }
837
838 sprd_dpu_stop(dpu);
839 sprd_dphy_fini(ctx);
840 sprd_dsi_fini(ctx);
841
842 ctx->enabled = false;
843}
844
845static const struct drm_encoder_helper_funcs sprd_encoder_helper_funcs = {
846 .mode_set = sprd_dsi_encoder_mode_set,
847 .enable = sprd_dsi_encoder_enable,
848 .disable = sprd_dsi_encoder_disable
849};
850
851static const struct drm_encoder_funcs sprd_encoder_funcs = {
852 .destroy = drm_encoder_cleanup,
853};
854
855static int sprd_dsi_encoder_init(struct sprd_dsi *dsi,
856 struct device *dev)
857{
858 struct drm_encoder *encoder = &dsi->encoder;
859 u32 crtc_mask;
860 int ret;
861
862 crtc_mask = drm_of_find_possible_crtcs(dev: dsi->drm, port: dev->of_node);
863 if (!crtc_mask) {
864 drm_err(dsi->drm, "failed to find crtc mask\n");
865 return -EINVAL;
866 }
867
868 drm_dbg(dsi->drm, "find possible crtcs: 0x%08x\n", crtc_mask);
869
870 encoder->possible_crtcs = crtc_mask;
871 ret = drm_encoder_init(dev: dsi->drm, encoder, funcs: &sprd_encoder_funcs,
872 DRM_MODE_ENCODER_DSI, NULL);
873 if (ret) {
874 drm_err(dsi->drm, "failed to init dsi encoder\n");
875 return ret;
876 }
877
878 drm_encoder_helper_add(encoder, funcs: &sprd_encoder_helper_funcs);
879
880 return 0;
881}
882
883static int sprd_dsi_bridge_init(struct sprd_dsi *dsi,
884 struct device *dev)
885{
886 int ret;
887
888 dsi->panel_bridge = devm_drm_of_get_bridge(dev, node: dev->of_node, port: 1, endpoint: 0);
889 if (IS_ERR(ptr: dsi->panel_bridge))
890 return PTR_ERR(ptr: dsi->panel_bridge);
891
892 ret = drm_bridge_attach(encoder: &dsi->encoder, bridge: dsi->panel_bridge, NULL, flags: 0);
893 if (ret)
894 return ret;
895
896 return 0;
897}
898
899static int sprd_dsi_context_init(struct sprd_dsi *dsi,
900 struct device *dev)
901{
902 struct platform_device *pdev = to_platform_device(dev);
903 struct dsi_context *ctx = &dsi->ctx;
904 struct resource *res;
905
906 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
907 if (!res) {
908 dev_err(dev, "failed to get I/O resource\n");
909 return -EINVAL;
910 }
911
912 ctx->base = devm_ioremap(dev, offset: res->start, size: resource_size(res));
913 if (!ctx->base) {
914 drm_err(dsi->drm, "failed to map dsi host registers\n");
915 return -ENXIO;
916 }
917
918 ctx->regmap = devm_regmap_init(dev, &regmap_tst_io, dsi, &byte_config);
919 if (IS_ERR(ptr: ctx->regmap)) {
920 drm_err(dsi->drm, "dphy regmap init failed\n");
921 return PTR_ERR(ptr: ctx->regmap);
922 }
923
924 ctx->data_hs2lp = 120;
925 ctx->data_lp2hs = 500;
926 ctx->clk_hs2lp = 4;
927 ctx->clk_lp2hs = 15;
928 ctx->max_rd_time = 6000;
929 ctx->int0_mask = 0xffffffff;
930 ctx->int1_mask = 0xffffffff;
931 ctx->enabled = true;
932
933 return 0;
934}
935
936static int sprd_dsi_bind(struct device *dev, struct device *master, void *data)
937{
938 struct drm_device *drm = data;
939 struct sprd_dsi *dsi = dev_get_drvdata(dev);
940 int ret;
941
942 dsi->drm = drm;
943
944 ret = sprd_dsi_encoder_init(dsi, dev);
945 if (ret)
946 return ret;
947
948 ret = sprd_dsi_bridge_init(dsi, dev);
949 if (ret)
950 return ret;
951
952 ret = sprd_dsi_context_init(dsi, dev);
953 if (ret)
954 return ret;
955
956 return 0;
957}
958
959static void sprd_dsi_unbind(struct device *dev,
960 struct device *master, void *data)
961{
962 struct sprd_dsi *dsi = dev_get_drvdata(dev);
963
964 drm_of_panel_bridge_remove(np: dev->of_node, port: 1, endpoint: 0);
965
966 drm_encoder_cleanup(encoder: &dsi->encoder);
967}
968
969static const struct component_ops dsi_component_ops = {
970 .bind = sprd_dsi_bind,
971 .unbind = sprd_dsi_unbind,
972};
973
974static int sprd_dsi_host_attach(struct mipi_dsi_host *host,
975 struct mipi_dsi_device *slave)
976{
977 struct sprd_dsi *dsi = host_to_dsi(host);
978 struct dsi_context *ctx = &dsi->ctx;
979
980 dsi->slave = slave;
981
982 if (slave->mode_flags & MIPI_DSI_MODE_VIDEO)
983 ctx->work_mode = DSI_MODE_VIDEO;
984 else
985 ctx->work_mode = DSI_MODE_CMD;
986
987 if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
988 ctx->burst_mode = VIDEO_BURST_WITH_SYNC_PULSES;
989 else if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
990 ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_PULSES;
991 else
992 ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_EVENTS;
993
994 return component_add(host->dev, &dsi_component_ops);
995}
996
997static int sprd_dsi_host_detach(struct mipi_dsi_host *host,
998 struct mipi_dsi_device *slave)
999{
1000 component_del(host->dev, &dsi_component_ops);
1001
1002 return 0;
1003}
1004
1005static ssize_t sprd_dsi_host_transfer(struct mipi_dsi_host *host,
1006 const struct mipi_dsi_msg *msg)
1007{
1008 struct sprd_dsi *dsi = host_to_dsi(host);
1009 const u8 *tx_buf = msg->tx_buf;
1010
1011 if (msg->rx_buf && msg->rx_len) {
1012 u8 lsb = (msg->tx_len > 0) ? tx_buf[0] : 0;
1013 u8 msb = (msg->tx_len > 1) ? tx_buf[1] : 0;
1014
1015 return sprd_dsi_rd_pkt(ctx: &dsi->ctx, vc: msg->channel, type: msg->type,
1016 msb_byte: msb, lsb_byte: lsb, buffer: msg->rx_buf, bytes_to_read: msg->rx_len);
1017 }
1018
1019 if (msg->tx_buf && msg->tx_len)
1020 return sprd_dsi_wr_pkt(ctx: &dsi->ctx, vc: msg->channel, type: msg->type,
1021 param: tx_buf, len: msg->tx_len);
1022
1023 return 0;
1024}
1025
1026static const struct mipi_dsi_host_ops sprd_dsi_host_ops = {
1027 .attach = sprd_dsi_host_attach,
1028 .detach = sprd_dsi_host_detach,
1029 .transfer = sprd_dsi_host_transfer,
1030};
1031
1032static const struct of_device_id dsi_match_table[] = {
1033 { .compatible = "sprd,sharkl3-dsi-host" },
1034 { /* sentinel */ },
1035};
1036
1037static int sprd_dsi_probe(struct platform_device *pdev)
1038{
1039 struct device *dev = &pdev->dev;
1040 struct sprd_dsi *dsi;
1041
1042 dsi = devm_kzalloc(dev, size: sizeof(*dsi), GFP_KERNEL);
1043 if (!dsi)
1044 return -ENOMEM;
1045
1046 dev_set_drvdata(dev, data: dsi);
1047
1048 dsi->host.ops = &sprd_dsi_host_ops;
1049 dsi->host.dev = dev;
1050
1051 return mipi_dsi_host_register(host: &dsi->host);
1052}
1053
1054static void sprd_dsi_remove(struct platform_device *pdev)
1055{
1056 struct sprd_dsi *dsi = dev_get_drvdata(dev: &pdev->dev);
1057
1058 mipi_dsi_host_unregister(host: &dsi->host);
1059}
1060
1061struct platform_driver sprd_dsi_driver = {
1062 .probe = sprd_dsi_probe,
1063 .remove_new = sprd_dsi_remove,
1064 .driver = {
1065 .name = "sprd-dsi-drv",
1066 .of_match_table = dsi_match_table,
1067 },
1068};
1069
1070MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
1071MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
1072MODULE_DESCRIPTION("Unisoc MIPI DSI HOST Controller Driver");
1073MODULE_LICENSE("GPL v2");
1074

source code of linux/drivers/gpu/drm/sprd/sprd_dsi.c