1// SPDX-License-Identifier: GPL-2.0
2/*
3 * ZynqMP DisplayPort Subsystem - KMS API
4 *
5 * Copyright (C) 2017 - 2021 Xilinx, Inc.
6 *
7 * Authors:
8 * - Hyun Woo Kwon <hyun.kwon@xilinx.com>
9 * - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
10 */
11
12#include <drm/drm_atomic.h>
13#include <drm/drm_atomic_helper.h>
14#include <drm/drm_blend.h>
15#include <drm/drm_bridge.h>
16#include <drm/drm_bridge_connector.h>
17#include <drm/drm_connector.h>
18#include <drm/drm_crtc.h>
19#include <drm/drm_device.h>
20#include <drm/drm_drv.h>
21#include <drm/drm_encoder.h>
22#include <drm/drm_fbdev_dma.h>
23#include <drm/drm_fourcc.h>
24#include <drm/drm_framebuffer.h>
25#include <drm/drm_gem_dma_helper.h>
26#include <drm/drm_gem_framebuffer_helper.h>
27#include <drm/drm_managed.h>
28#include <drm/drm_mode_config.h>
29#include <drm/drm_plane.h>
30#include <drm/drm_probe_helper.h>
31#include <drm/drm_simple_kms_helper.h>
32#include <drm/drm_vblank.h>
33
34#include <linux/clk.h>
35#include <linux/delay.h>
36#include <linux/pm_runtime.h>
37#include <linux/spinlock.h>
38
39#include "zynqmp_disp.h"
40#include "zynqmp_dp.h"
41#include "zynqmp_dpsub.h"
42#include "zynqmp_kms.h"
43
44static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
45{
46 return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
47}
48
49/* -----------------------------------------------------------------------------
50 * DRM Planes
51 */
52
53static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
54 struct drm_atomic_state *state)
55{
56 struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
57 plane);
58 struct drm_crtc_state *crtc_state;
59
60 if (!new_plane_state->crtc)
61 return 0;
62
63 crtc_state = drm_atomic_get_crtc_state(state, crtc: new_plane_state->crtc);
64 if (IS_ERR(ptr: crtc_state))
65 return PTR_ERR(ptr: crtc_state);
66
67 return drm_atomic_helper_check_plane_state(plane_state: new_plane_state,
68 crtc_state,
69 DRM_PLANE_NO_SCALING,
70 DRM_PLANE_NO_SCALING,
71 can_position: false, can_update_disabled: false);
72}
73
74static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
75 struct drm_atomic_state *state)
76{
77 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
78 plane);
79 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm: plane->dev);
80 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
81
82 if (!old_state->fb)
83 return;
84
85 zynqmp_disp_layer_disable(layer);
86
87 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
88 zynqmp_disp_blend_set_global_alpha(disp: dpsub->disp, enable: false,
89 alpha: plane->state->alpha >> 8);
90}
91
92static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
93 struct drm_atomic_state *state)
94{
95 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
96 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
97 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm: plane->dev);
98 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
99 bool format_changed = false;
100
101 if (!old_state->fb ||
102 old_state->fb->format->format != new_state->fb->format->format)
103 format_changed = true;
104
105 /*
106 * If the format has changed (including going from a previously
107 * disabled state to any format), reconfigure the format. Disable the
108 * plane first if needed.
109 */
110 if (format_changed) {
111 if (old_state->fb)
112 zynqmp_disp_layer_disable(layer);
113
114 zynqmp_disp_layer_set_format(layer, info: new_state->fb->format);
115 }
116
117 zynqmp_disp_layer_update(layer, state: new_state);
118
119 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
120 zynqmp_disp_blend_set_global_alpha(disp: dpsub->disp, enable: true,
121 alpha: plane->state->alpha >> 8);
122
123 /* Enable or re-enable the plane if the format has changed. */
124 if (format_changed)
125 zynqmp_disp_layer_enable(layer, mode: ZYNQMP_DPSUB_LAYER_NONLIVE);
126}
127
128static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
129 .atomic_check = zynqmp_dpsub_plane_atomic_check,
130 .atomic_update = zynqmp_dpsub_plane_atomic_update,
131 .atomic_disable = zynqmp_dpsub_plane_atomic_disable,
132};
133
134static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
135 .update_plane = drm_atomic_helper_update_plane,
136 .disable_plane = drm_atomic_helper_disable_plane,
137 .destroy = drm_plane_cleanup,
138 .reset = drm_atomic_helper_plane_reset,
139 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
140 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
141};
142
143static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
144{
145 unsigned int i;
146 int ret;
147
148 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
149 struct zynqmp_disp_layer *layer = dpsub->layers[i];
150 struct drm_plane *plane = &dpsub->drm->planes[i];
151 enum drm_plane_type type;
152 unsigned int num_formats;
153 u32 *formats;
154
155 formats = zynqmp_disp_layer_drm_formats(layer, num_formats: &num_formats);
156 if (!formats)
157 return -ENOMEM;
158
159 /* Graphics layer is primary, and video layer is overlay. */
160 type = i == ZYNQMP_DPSUB_LAYER_VID
161 ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
162 ret = drm_universal_plane_init(dev: &dpsub->drm->dev, plane, possible_crtcs: 0,
163 funcs: &zynqmp_dpsub_plane_funcs,
164 formats, format_count: num_formats,
165 NULL, type, NULL);
166 kfree(objp: formats);
167 if (ret)
168 return ret;
169
170 drm_plane_helper_add(plane, funcs: &zynqmp_dpsub_plane_helper_funcs);
171
172 drm_plane_create_zpos_immutable_property(plane, zpos: i);
173 if (i == ZYNQMP_DPSUB_LAYER_GFX)
174 drm_plane_create_alpha_property(plane);
175 }
176
177 return 0;
178}
179
180/* -----------------------------------------------------------------------------
181 * DRM CRTC
182 */
183
184static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
185{
186 return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
187}
188
189static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
190 struct drm_atomic_state *state)
191{
192 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
193 struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
194 int ret, vrefresh;
195
196 pm_runtime_get_sync(dev: dpsub->dev);
197
198 zynqmp_disp_setup_clock(disp: dpsub->disp, mode_clock: adjusted_mode->clock * 1000);
199
200 ret = clk_prepare_enable(clk: dpsub->vid_clk);
201 if (ret) {
202 dev_err(dpsub->dev, "failed to enable a pixel clock\n");
203 pm_runtime_put_sync(dev: dpsub->dev);
204 return;
205 }
206
207 zynqmp_disp_enable(disp: dpsub->disp);
208
209 /* Delay of 3 vblank intervals for timing gen to be stable */
210 vrefresh = (adjusted_mode->clock * 1000) /
211 (adjusted_mode->vtotal * adjusted_mode->htotal);
212 msleep(msecs: 3 * 1000 / vrefresh);
213}
214
215static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
216 struct drm_atomic_state *state)
217{
218 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
219 struct drm_plane_state *old_plane_state;
220
221 /*
222 * Disable the plane if active. The old plane state can be NULL in the
223 * .shutdown() path if the plane is already disabled, skip
224 * zynqmp_disp_plane_atomic_disable() in that case.
225 */
226 old_plane_state = drm_atomic_get_old_plane_state(state, plane: crtc->primary);
227 if (old_plane_state)
228 zynqmp_dpsub_plane_atomic_disable(plane: crtc->primary, state);
229
230 zynqmp_disp_disable(disp: dpsub->disp);
231
232 drm_crtc_vblank_off(crtc);
233
234 spin_lock_irq(lock: &crtc->dev->event_lock);
235 if (crtc->state->event) {
236 drm_crtc_send_vblank_event(crtc, e: crtc->state->event);
237 crtc->state->event = NULL;
238 }
239 spin_unlock_irq(lock: &crtc->dev->event_lock);
240
241 clk_disable_unprepare(clk: dpsub->vid_clk);
242 pm_runtime_put_sync(dev: dpsub->dev);
243}
244
245static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
246 struct drm_atomic_state *state)
247{
248 return drm_atomic_add_affected_planes(state, crtc);
249}
250
251static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
252 struct drm_atomic_state *state)
253{
254 drm_crtc_vblank_on(crtc);
255}
256
257static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
258 struct drm_atomic_state *state)
259{
260 if (crtc->state->event) {
261 struct drm_pending_vblank_event *event;
262
263 /* Consume the flip_done event from atomic helper. */
264 event = crtc->state->event;
265 crtc->state->event = NULL;
266
267 event->pipe = drm_crtc_index(crtc);
268
269 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
270
271 spin_lock_irq(lock: &crtc->dev->event_lock);
272 drm_crtc_arm_vblank_event(crtc, e: event);
273 spin_unlock_irq(lock: &crtc->dev->event_lock);
274 }
275}
276
277static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
278 .atomic_enable = zynqmp_dpsub_crtc_atomic_enable,
279 .atomic_disable = zynqmp_dpsub_crtc_atomic_disable,
280 .atomic_check = zynqmp_dpsub_crtc_atomic_check,
281 .atomic_begin = zynqmp_dpsub_crtc_atomic_begin,
282 .atomic_flush = zynqmp_dpsub_crtc_atomic_flush,
283};
284
285static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
286{
287 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
288
289 zynqmp_dp_enable_vblank(dp: dpsub->dp);
290
291 return 0;
292}
293
294static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
295{
296 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
297
298 zynqmp_dp_disable_vblank(dp: dpsub->dp);
299}
300
301static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
302 .destroy = drm_crtc_cleanup,
303 .set_config = drm_atomic_helper_set_config,
304 .page_flip = drm_atomic_helper_page_flip,
305 .reset = drm_atomic_helper_crtc_reset,
306 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
307 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
308 .enable_vblank = zynqmp_dpsub_crtc_enable_vblank,
309 .disable_vblank = zynqmp_dpsub_crtc_disable_vblank,
310};
311
312static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
313{
314 struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
315 struct drm_crtc *crtc = &dpsub->drm->crtc;
316 int ret;
317
318 ret = drm_crtc_init_with_planes(dev: &dpsub->drm->dev, crtc, primary: plane,
319 NULL, funcs: &zynqmp_dpsub_crtc_funcs, NULL);
320 if (ret < 0)
321 return ret;
322
323 drm_crtc_helper_add(crtc, funcs: &zynqmp_dpsub_crtc_helper_funcs);
324
325 /* Start with vertical blanking interrupt reporting disabled. */
326 drm_crtc_vblank_off(crtc);
327
328 return 0;
329}
330
331static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
332{
333 u32 possible_crtcs = drm_crtc_mask(crtc: &dpsub->drm->crtc);
334 unsigned int i;
335
336 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
337 dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
338}
339
340/**
341 * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
342 * @dpsub: DisplayPort subsystem
343 *
344 * This function handles the vblank interrupt, and sends an event to
345 * CRTC object. This will be called by the DP vblank interrupt handler.
346 */
347void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
348{
349 drm_crtc_handle_vblank(crtc: &dpsub->drm->crtc);
350}
351
352/* -----------------------------------------------------------------------------
353 * Dumb Buffer & Framebuffer Allocation
354 */
355
356static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
357 struct drm_device *drm,
358 struct drm_mode_create_dumb *args)
359{
360 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
361 unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
362
363 /* Enforce the alignment constraints of the DMA engine. */
364 args->pitch = ALIGN(pitch, dpsub->dma_align);
365
366 return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
367}
368
369static struct drm_framebuffer *
370zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
371 const struct drm_mode_fb_cmd2 *mode_cmd)
372{
373 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
374 struct drm_mode_fb_cmd2 cmd = *mode_cmd;
375 unsigned int i;
376
377 /* Enforce the alignment constraints of the DMA engine. */
378 for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
379 cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
380
381 return drm_gem_fb_create(dev: drm, file: file_priv, mode_cmd: &cmd);
382}
383
384static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
385 .fb_create = zynqmp_dpsub_fb_create,
386 .atomic_check = drm_atomic_helper_check,
387 .atomic_commit = drm_atomic_helper_commit,
388};
389
390/* -----------------------------------------------------------------------------
391 * DRM/KMS Driver
392 */
393
394DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
395
396static const struct drm_driver zynqmp_dpsub_drm_driver = {
397 .driver_features = DRIVER_MODESET | DRIVER_GEM |
398 DRIVER_ATOMIC,
399
400 DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
401
402 .fops = &zynqmp_dpsub_drm_fops,
403
404 .name = "zynqmp-dpsub",
405 .desc = "Xilinx DisplayPort Subsystem Driver",
406 .date = "20130509",
407 .major = 1,
408 .minor = 0,
409};
410
411static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
412{
413 struct drm_encoder *encoder = &dpsub->drm->encoder;
414 struct drm_connector *connector;
415 int ret;
416
417 /* Create the planes and the CRTC. */
418 ret = zynqmp_dpsub_create_planes(dpsub);
419 if (ret)
420 return ret;
421
422 ret = zynqmp_dpsub_create_crtc(dpsub);
423 if (ret < 0)
424 return ret;
425
426 zynqmp_dpsub_map_crtc_to_plane(dpsub);
427
428 /* Create the encoder and attach the bridge. */
429 encoder->possible_crtcs |= drm_crtc_mask(crtc: &dpsub->drm->crtc);
430 drm_simple_encoder_init(dev: &dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
431
432 ret = drm_bridge_attach(encoder, bridge: dpsub->bridge, NULL,
433 flags: DRM_BRIDGE_ATTACH_NO_CONNECTOR);
434 if (ret) {
435 dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
436 return ret;
437 }
438
439 /* Create the connector for the chain of bridges. */
440 connector = drm_bridge_connector_init(drm: &dpsub->drm->dev, encoder);
441 if (IS_ERR(ptr: connector)) {
442 dev_err(dpsub->dev, "failed to created connector\n");
443 return PTR_ERR(ptr: connector);
444 }
445
446 ret = drm_connector_attach_encoder(connector, encoder);
447 if (ret < 0) {
448 dev_err(dpsub->dev, "failed to attach connector to encoder\n");
449 return ret;
450 }
451
452 return 0;
453}
454
455static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
456{
457 struct zynqmp_dpsub_drm *dpdrm = res;
458
459 zynqmp_dpsub_release(dpsub: dpdrm->dpsub);
460}
461
462int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
463{
464 struct zynqmp_dpsub_drm *dpdrm;
465 struct drm_device *drm;
466 int ret;
467
468 /*
469 * Allocate the drm_device and immediately add a cleanup action to
470 * release the zynqmp_dpsub instance. If any of those operations fail,
471 * dpsub->drm will remain NULL, which tells the caller that it must
472 * cleanup manually.
473 */
474 dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
475 struct zynqmp_dpsub_drm, dev);
476 if (IS_ERR(ptr: dpdrm))
477 return PTR_ERR(ptr: dpdrm);
478
479 dpdrm->dpsub = dpsub;
480 drm = &dpdrm->dev;
481
482 ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
483 if (ret < 0)
484 return ret;
485
486 dpsub->drm = dpdrm;
487
488 /* Initialize mode config, vblank and the KMS poll helper. */
489 ret = drmm_mode_config_init(dev: drm);
490 if (ret < 0)
491 return ret;
492
493 drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
494 drm->mode_config.min_width = 0;
495 drm->mode_config.min_height = 0;
496 drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
497 drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
498
499 ret = drm_vblank_init(dev: drm, num_crtcs: 1);
500 if (ret)
501 return ret;
502
503 drm_kms_helper_poll_init(dev: drm);
504
505 ret = zynqmp_dpsub_kms_init(dpsub);
506 if (ret < 0)
507 goto err_poll_fini;
508
509 /* Reset all components and register the DRM device. */
510 drm_mode_config_reset(dev: drm);
511
512 ret = drm_dev_register(dev: drm, flags: 0);
513 if (ret < 0)
514 goto err_poll_fini;
515
516 /* Initialize fbdev generic emulation. */
517 drm_fbdev_dma_setup(dev: drm, preferred_bpp: 24);
518
519 return 0;
520
521err_poll_fini:
522 drm_kms_helper_poll_fini(dev: drm);
523 return ret;
524}
525
526void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
527{
528 struct drm_device *drm = &dpsub->drm->dev;
529
530 drm_dev_unregister(dev: drm);
531 drm_atomic_helper_shutdown(dev: drm);
532 drm_kms_helper_poll_fini(dev: drm);
533}
534

source code of linux/drivers/gpu/drm/xlnx/zynqmp_kms.c