1// Copyright (C) 2016 Jolla Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qgstvideorenderersink_p.h"
5
6#include <QtMultimedia/qvideoframe.h>
7#include <QtMultimedia/qvideosink.h>
8#include <QtMultimedia/private/qvideoframe_p.h>
9#include <QtGui/rhi/qrhi.h>
10#include <QtCore/qcoreapplication.h>
11#include <QtCore/qdebug.h>
12#include <QtCore/qloggingcategory.h>
13#include <QtCore/private/qfactoryloader_p.h>
14#include <QtCore/private/quniquehandle_p.h>
15
16#include <common/qgst_debug_p.h>
17#include <common/qgstreamermetadata_p.h>
18#include <common/qgstreamervideosink_p.h>
19#include <common/qgstutils_p.h>
20#include <common/qgstvideobuffer_p.h>
21
22#include <gst/video/video.h>
23#include <gst/video/gstvideometa.h>
24
25
26#if QT_CONFIG(gstreamer_gl)
27#include <gst/gl/gl.h>
28#endif // #if QT_CONFIG(gstreamer_gl)
29
30// DMA support
31#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
32# include <gst/allocators/gstdmabuf.h>
33#endif
34
35// NOLINTBEGIN(readability-convert-member-functions-to-static)
36
37Q_STATIC_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer");
38
39QT_BEGIN_NAMESPACE
40
41QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
42 : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
43{
44 QObject::connect(
45 sender: sink, signal: &QGstreamerVideoSink::aboutToBeDestroyed, context: this,
46 slot: [this] {
47 QMutexLocker locker(&m_sinkMutex);
48 m_sink = nullptr;
49 },
50 type: Qt::DirectConnection);
51}
52
53QGstVideoRenderer::~QGstVideoRenderer() = default;
54
55QGstCaps QGstVideoRenderer::createSurfaceCaps([[maybe_unused]] QGstreamerVideoSink *sink)
56{
57 QGstCaps caps = QGstCaps::create();
58
59 // All the formats that both we and gstreamer support
60 auto formats = QList<QVideoFrameFormat::PixelFormat>()
61 << QVideoFrameFormat::Format_YUV420P
62 << QVideoFrameFormat::Format_YUV422P
63 << QVideoFrameFormat::Format_YV12
64 << QVideoFrameFormat::Format_UYVY
65 << QVideoFrameFormat::Format_YUYV
66 << QVideoFrameFormat::Format_NV12
67 << QVideoFrameFormat::Format_NV21
68 << QVideoFrameFormat::Format_AYUV
69 << QVideoFrameFormat::Format_P010
70 << QVideoFrameFormat::Format_XRGB8888
71 << QVideoFrameFormat::Format_XBGR8888
72 << QVideoFrameFormat::Format_RGBX8888
73 << QVideoFrameFormat::Format_BGRX8888
74 << QVideoFrameFormat::Format_ARGB8888
75 << QVideoFrameFormat::Format_ABGR8888
76 << QVideoFrameFormat::Format_RGBA8888
77 << QVideoFrameFormat::Format_BGRA8888
78 << QVideoFrameFormat::Format_Y8
79 << QVideoFrameFormat::Format_Y16
80 ;
81#if QT_CONFIG(gstreamer_gl)
82 QRhi *rhi = sink->rhi();
83 if (rhi && rhi->backend() == QRhi::OpenGLES2) {
84 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
85# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
86 if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) {
87 // We currently do not handle planar DMA buffers, as it's somewhat unclear how to
88 // convert the planar EGLImage into something we can use from OpenGL
89 auto singlePlaneFormats = QList<QVideoFrameFormat::PixelFormat>()
90 << QVideoFrameFormat::Format_UYVY
91 << QVideoFrameFormat::Format_YUYV
92 << QVideoFrameFormat::Format_AYUV
93 << QVideoFrameFormat::Format_XRGB8888
94 << QVideoFrameFormat::Format_XBGR8888
95 << QVideoFrameFormat::Format_RGBX8888
96 << QVideoFrameFormat::Format_BGRX8888
97 << QVideoFrameFormat::Format_ARGB8888
98 << QVideoFrameFormat::Format_ABGR8888
99 << QVideoFrameFormat::Format_RGBA8888
100 << QVideoFrameFormat::Format_BGRA8888
101 << QVideoFrameFormat::Format_Y8
102 << QVideoFrameFormat::Format_Y16
103 ;
104 caps.addPixelFormats(formats: singlePlaneFormats, GST_CAPS_FEATURE_MEMORY_DMABUF);
105 }
106# endif
107 }
108#endif
109 caps.addPixelFormats(formats);
110 return caps;
111}
112
113void QGstVideoRenderer::customEvent(QEvent *event)
114{
115QT_WARNING_PUSH
116QT_WARNING_DISABLE_GCC("-Wswitch") // case value not in enumerated type ‘QEvent::Type’
117
118 switch (event->type()) {
119 case renderFramesEvent: {
120 // LATER: we currently show every frame. however it may be reasonable to drop frames
121 // here if the queue contains more than one frame
122 while (std::optional<RenderBufferState> nextState = m_bufferQueue.dequeue())
123 handleNewBuffer(std::move(*nextState));
124 return;
125 }
126 case stopEvent: {
127 m_currentPipelineFrame = {};
128 updateCurrentVideoFrame(m_currentVideoFrame);
129 return;
130 }
131
132 default:
133 return;
134 }
135QT_WARNING_POP
136}
137
138
139void QGstVideoRenderer::handleNewBuffer(RenderBufferState state)
140{
141 auto videoBuffer = std::make_unique<QGstVideoBuffer>(args&: state.buffer, args&: state.videoInfo, args&: m_sink,
142 args&: state.format, args&: state.memoryFormat);
143 QVideoFrame frame = QVideoFramePrivate::createFrame(buffer: std::move(videoBuffer), format: state.format);
144 QGstUtils::setFrameTimeStampsFromBuffer(frame: &frame, buffer: state.buffer.get());
145 m_currentPipelineFrame = std::move(frame);
146
147 if (!m_isActive) {
148 qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
149 updateCurrentVideoFrame({});
150 return;
151 }
152
153 updateCurrentVideoFrame(m_currentPipelineFrame);
154}
155
156const QGstCaps &QGstVideoRenderer::caps()
157{
158 return m_surfaceCaps;
159}
160
161bool QGstVideoRenderer::start(const QGstCaps& caps)
162{
163 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
164
165 auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
166 if (optionalFormatAndVideoInfo) {
167 std::tie(args&: m_format, args&: m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
168 } else {
169 m_format = {};
170 m_videoInfo = {};
171 }
172 m_capsMemoryFormat = caps.memoryFormat();
173
174 // NOTE: m_format will not be fully populated until GST_EVENT_TAG is processed
175
176 return true;
177}
178
179void QGstVideoRenderer::stop()
180{
181 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
182
183 m_bufferQueue.clear();
184 QCoreApplication::postEvent(receiver: this, event: new QEvent(stopEvent));
185}
186
187void QGstVideoRenderer::unlock()
188{
189 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
190}
191
192bool QGstVideoRenderer::proposeAllocation(GstQuery *)
193{
194 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation";
195 return true;
196}
197
198GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
199{
200 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
201
202 if (m_flushing) {
203 qCDebug(qLcGstVideoRenderer)
204 << " buffer received while flushing the sink ... discarding buffer";
205 return GST_FLOW_FLUSHING;
206 }
207
208 GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
209 if (meta) {
210 QRect vp(meta->x, meta->y, meta->width, meta->height);
211 if (m_format.viewport() != vp) {
212 qCDebug(qLcGstVideoRenderer)
213 << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
214 << meta->width << " | " << meta->x << "x" << meta->y << "]";
215 // Update viewport if data is not the same
216 m_format.setViewport(vp);
217 }
218 }
219
220 // Some gst elements, like v4l2h264dec, can provide Direct Memory Access buffers (DMA-BUF)
221 // without specifying it in their caps. So we check the memory format manually:
222 QGstCaps::MemoryFormat bufferMemoryFormat = [&] {
223 if (m_capsMemoryFormat != QGstCaps::CpuMemory)
224 return m_capsMemoryFormat;
225
226 [[maybe_unused]] GstMemory *mem = gst_buffer_peek_memory(buffer, idx: 0);
227#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
228 if (gst_is_dmabuf_memory(mem))
229 return QGstCaps::DMABuf;
230#endif
231#if QT_CONFIG(gstreamer_gl)
232 if (gst_is_gl_memory(mem))
233 return QGstCaps::GLTexture;
234#endif
235 return QGstCaps::CpuMemory;
236 }();
237
238 RenderBufferState state{
239 .buffer: QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
240 .format: m_format,
241 .videoInfo: m_videoInfo,
242 .memoryFormat: bufferMemoryFormat,
243 };
244
245 qCDebug(qLcGstVideoRenderer) << " sending video frame";
246
247 qsizetype sizeOfQueue = m_bufferQueue.enqueue(value: std::move(state));
248 if (sizeOfQueue == 1)
249 // we only need to wake up, if we don't have a pending frame
250 QCoreApplication::postEvent(receiver: this, event: new QEvent(renderFramesEvent));
251
252 return GST_FLOW_OK;
253}
254
255bool QGstVideoRenderer::query(GstQuery *query)
256{
257#if QT_CONFIG(gstreamer_gl)
258 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
259 const gchar *type = nullptr;
260 gst_query_parse_context_type(query, context_type: &type);
261
262 QLatin1StringView typeStr(type);
263 if (typeStr != QLatin1StringView("gst.gl.GLDisplay")
264 && typeStr != QLatin1StringView("gst.gl.local_context")) {
265 return false;
266 }
267
268 QMutexLocker locker(&m_sinkMutex);
269 if (!m_sink)
270 return false;
271
272 auto *gstGlContext = typeStr == QLatin1StringView("gst.gl.GLDisplay")
273 ? m_sink->gstGlDisplayContext() : m_sink->gstGlLocalContext();
274 if (!gstGlContext)
275 return false;
276
277 gst_query_set_context(query, context: gstGlContext);
278
279 return true;
280 }
281#else
282 Q_UNUSED(query);
283#endif
284 return false;
285}
286
287void QGstVideoRenderer::gstEvent(GstEvent *event)
288{
289 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent:" << event;
290
291 switch (GST_EVENT_TYPE(event)) {
292 case GST_EVENT_TAG:
293 return gstEventHandleTag(event);
294 case GST_EVENT_EOS:
295 return gstEventHandleEOS(event);
296 case GST_EVENT_FLUSH_START:
297 return gstEventHandleFlushStart(event);
298 case GST_EVENT_FLUSH_STOP:
299 return gstEventHandleFlushStop(event);
300
301 default:
302 return;
303 }
304}
305
306void QGstVideoRenderer::setActive(bool isActive)
307{
308 if (isActive == m_isActive)
309 return;
310
311 m_isActive = isActive;
312 if (isActive)
313 updateCurrentVideoFrame(m_currentPipelineFrame);
314 else
315 updateCurrentVideoFrame({});
316}
317
318void QGstVideoRenderer::updateCurrentVideoFrame(QVideoFrame frame)
319{
320 m_currentVideoFrame = std::move(frame);
321 if (m_sink)
322 m_sink->setVideoFrame(m_currentVideoFrame);
323}
324
325void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
326{
327 GstTagList *taglist = nullptr;
328 gst_event_parse_tag(event, taglist: &taglist);
329 if (!taglist)
330 return;
331
332 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEventHandleTag:" << taglist;
333
334 QGString value;
335 if (!gst_tag_list_get_string(list: taglist, GST_TAG_IMAGE_ORIENTATION, value: &value))
336 return;
337
338 RotationResult parsed = parseRotationTag(value.get());
339
340 m_format.setMirrored(parsed.flip);
341 m_format.setRotation(parsed.rotation);
342}
343
344void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
345{
346 stop();
347}
348
349void QGstVideoRenderer::gstEventHandleFlushStart(GstEvent *)
350{
351 // "data is to be discarded"
352 m_flushing = true;
353 m_bufferQueue.clear();
354}
355
356void QGstVideoRenderer::gstEventHandleFlushStop(GstEvent *)
357{
358 // "data is allowed again"
359 m_flushing = false;
360}
361
362static GstVideoSinkClass *gvrs_sink_parent_class;
363static thread_local QGstreamerVideoSink *gvrs_current_sink;
364
365#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
366
367QGstVideoRendererSinkElement QGstVideoRendererSink::createSink(QGstreamerVideoSink *sink)
368{
369 setSink(sink);
370 QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
371 g_object_new(object_type: QGstVideoRendererSink::get_type(), first_property_name: nullptr));
372
373 return QGstVideoRendererSinkElement{
374 gstSink,
375 QGstElement::NeedsRef,
376 };
377}
378
379void QGstVideoRendererSink::setSink(QGstreamerVideoSink *sink)
380{
381 gvrs_current_sink = sink;
382}
383
384GType QGstVideoRendererSink::get_type()
385{
386 static const GTypeInfo info =
387 {
388 .class_size: sizeof(QGstVideoRendererSinkClass), // class_size
389 .base_init: base_init, // base_init
390 .base_finalize: nullptr, // base_finalize
391 .class_init: class_init, // class_init
392 .class_finalize: nullptr, // class_finalize
393 .class_data: nullptr, // class_data
394 .instance_size: sizeof(QGstVideoRendererSink), // instance_size
395 .n_preallocs: 0, // n_preallocs
396 .instance_init: instance_init, // instance_init
397 .value_table: nullptr // value_table
398 };
399
400 static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK, type_name: "QGstVideoRendererSink",
401 info: &info, flags: GTypeFlags(0));
402
403 return type;
404}
405
406void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
407{
408 Q_UNUSED(class_data);
409
410 gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
411
412 GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
413 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
414
415 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
416 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
417 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
418 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
419 base_sink_class->stop = QGstVideoRendererSink::stop;
420 base_sink_class->unlock = QGstVideoRendererSink::unlock;
421 base_sink_class->query = QGstVideoRendererSink::query;
422 base_sink_class->event = QGstVideoRendererSink::event;
423
424 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
425 element_class->change_state = QGstVideoRendererSink::change_state;
426 gst_element_class_set_metadata(klass: element_class,
427 longname: "Qt built-in video renderer sink",
428 classification: "Sink/Video",
429 description: "Qt default built-in video renderer sink",
430 author: "The Qt Company");
431
432 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
433 object_class->finalize = QGstVideoRendererSink::finalize;
434}
435
436void QGstVideoRendererSink::base_init(gpointer g_class)
437{
438 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
439 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
440 "video/x-raw, "
441 "framerate = (fraction) [ 0, MAX ], "
442 "width = (int) [ 1, MAX ], "
443 "height = (int) [ 1, MAX ]"));
444
445 gst_element_class_add_pad_template(
446 GST_ELEMENT_CLASS(g_class), templ: gst_static_pad_template_get(pad_template: &sink_pad_template));
447}
448
449void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
450{
451 Q_UNUSED(g_class);
452 VO_SINK(instance);
453
454 Q_ASSERT(gvrs_current_sink);
455
456 sink->renderer = new QGstVideoRenderer(gvrs_current_sink);
457 sink->renderer->moveToThread(thread: gvrs_current_sink->thread());
458 gvrs_current_sink = nullptr;
459}
460
461void QGstVideoRendererSink::finalize(GObject *object)
462{
463 VO_SINK(object);
464
465 delete sink->renderer;
466
467 // Chain up
468 G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
469}
470
471GstStateChangeReturn QGstVideoRendererSink::change_state(
472 GstElement *element, GstStateChange transition)
473{
474 GstStateChangeReturn ret =
475 GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
476 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::change_state:" << transition << ret;
477 return ret;
478}
479
480GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
481{
482 VO_SINK(base);
483
484 QGstCaps caps = sink->renderer->caps();
485 if (filter)
486 caps = QGstCaps(gst_caps_intersect(caps1: caps.caps(), caps2: filter), QGstCaps::HasRef);
487
488 return caps.release();
489}
490
491gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps)
492{
493 VO_SINK(base);
494 auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
495
496 qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps;
497
498 if (!caps) {
499 sink->renderer->stop();
500 return TRUE;
501 }
502
503 return sink->renderer->start(caps);
504}
505
506gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
507{
508 VO_SINK(base);
509 return sink->renderer->proposeAllocation(query);
510}
511
512gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
513{
514 VO_SINK(base);
515 sink->renderer->stop();
516 return TRUE;
517}
518
519gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
520{
521 VO_SINK(base);
522 sink->renderer->unlock();
523 return TRUE;
524}
525
526GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
527{
528 VO_SINK(base);
529 return sink->renderer->render(buffer);
530}
531
532gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
533{
534 VO_SINK(base);
535 if (sink->renderer->query(query))
536 return TRUE;
537
538 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
539}
540
541gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event)
542{
543 VO_SINK(base);
544 sink->renderer->gstEvent(event);
545 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
546}
547
548QGstVideoRendererSinkElement::QGstVideoRendererSinkElement(QGstVideoRendererSink *element,
549 RefMode mode)
550 : QGstBaseSink{
551 qGstCheckedCast<GstBaseSink>(arg: element),
552 mode,
553 }
554{
555}
556
557void QGstVideoRendererSinkElement::setActive(bool isActive)
558{
559 qGstVideoRendererSink()->renderer->setActive(isActive);
560}
561
562QGstVideoRendererSink *QGstVideoRendererSinkElement::qGstVideoRendererSink() const
563{
564 return reinterpret_cast<QGstVideoRendererSink *>(element());
565}
566
567QT_END_NAMESPACE
568

source code of qtmultimedia/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp