1// Copyright (C) 2016 Jolla Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <qvideoframe.h>
5#include <qvideosink.h>
6#include <QDebug>
7#include <QMap>
8#include <QThread>
9#include <QEvent>
10#include <QCoreApplication>
11
12#include <private/qfactoryloader_p.h>
13#include "qgstvideobuffer_p.h"
14#include "qgstreamervideosink_p.h"
15
16#include "qgstvideorenderersink_p.h"
17
18#include <gst/video/video.h>
19#include <gst/video/gstvideometa.h>
20#include <qloggingcategory.h>
21#include <qdebug.h>
22
23#include "qgstutils_p.h"
24
25#include <rhi/qrhi.h>
26#if QT_CONFIG(gstreamer_gl)
27#include <gst/gl/gl.h>
28#endif // #if QT_CONFIG(gstreamer_gl)
29
30// DMA support
31#if QT_CONFIG(linux_dmabuf)
32#include <gst/allocators/gstdmabuf.h>
33#endif
34
35//#define DEBUG_VIDEO_SURFACE_SINK
36
37static Q_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer")
38
39QT_BEGIN_NAMESPACE
40
41QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
42 : m_sink(sink)
43{
44 createSurfaceCaps();
45}
46
47QGstVideoRenderer::~QGstVideoRenderer()
48{
49}
50
51void QGstVideoRenderer::createSurfaceCaps()
52{
53 QRhi *rhi = m_sink->rhi();
54 Q_UNUSED(rhi);
55
56 auto caps = QGstCaps::create();
57
58 // All the formats that both we and gstreamer support
59 auto formats = QList<QVideoFrameFormat::PixelFormat>()
60 << QVideoFrameFormat::Format_YUV420P
61 << QVideoFrameFormat::Format_YUV422P
62 << QVideoFrameFormat::Format_YV12
63 << QVideoFrameFormat::Format_UYVY
64 << QVideoFrameFormat::Format_YUYV
65 << QVideoFrameFormat::Format_NV12
66 << QVideoFrameFormat::Format_NV21
67 << QVideoFrameFormat::Format_AYUV
68 << QVideoFrameFormat::Format_P010
69 << QVideoFrameFormat::Format_XRGB8888
70 << QVideoFrameFormat::Format_XBGR8888
71 << QVideoFrameFormat::Format_RGBX8888
72 << QVideoFrameFormat::Format_BGRX8888
73 << QVideoFrameFormat::Format_ARGB8888
74 << QVideoFrameFormat::Format_ABGR8888
75 << QVideoFrameFormat::Format_RGBA8888
76 << QVideoFrameFormat::Format_BGRA8888
77 << QVideoFrameFormat::Format_Y8
78 << QVideoFrameFormat::Format_Y16
79 ;
80#if QT_CONFIG(gstreamer_gl)
81 if (rhi && rhi->backend() == QRhi::OpenGLES2) {
82 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
83#if QT_CONFIG(linux_dmabuf)
84 if (m_sink->eglDisplay() && m_sink->eglImageTargetTexture2D()) {
85 // We currently do not handle planar DMA buffers, as it's somewhat unclear how to
86 // convert the planar EGLImage into something we can use from OpenGL
87 auto singlePlaneFormats = QList<QVideoFrameFormat::PixelFormat>()
88 << QVideoFrameFormat::Format_UYVY
89 << QVideoFrameFormat::Format_YUYV
90 << QVideoFrameFormat::Format_AYUV
91 << QVideoFrameFormat::Format_XRGB8888
92 << QVideoFrameFormat::Format_XBGR8888
93 << QVideoFrameFormat::Format_RGBX8888
94 << QVideoFrameFormat::Format_BGRX8888
95 << QVideoFrameFormat::Format_ARGB8888
96 << QVideoFrameFormat::Format_ABGR8888
97 << QVideoFrameFormat::Format_RGBA8888
98 << QVideoFrameFormat::Format_BGRA8888
99 << QVideoFrameFormat::Format_Y8
100 << QVideoFrameFormat::Format_Y16
101 ;
102 caps.addPixelFormats(formats: singlePlaneFormats, GST_CAPS_FEATURE_MEMORY_DMABUF);
103 }
104#endif
105 }
106#endif
107 caps.addPixelFormats(formats);
108
109 m_surfaceCaps = caps;
110}
111
112QGstCaps QGstVideoRenderer::caps()
113{
114 QMutexLocker locker(&m_mutex);
115
116 return m_surfaceCaps;
117}
118
119bool QGstVideoRenderer::start(const QGstCaps& caps)
120{
121 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps.toString();
122 QMutexLocker locker(&m_mutex);
123
124 m_frameMirrored = false;
125 m_frameRotationAngle = QVideoFrame::Rotation0;
126
127 if (m_active) {
128 m_flush = true;
129 m_stop = true;
130 }
131
132 m_startCaps = caps;
133
134 /*
135 Waiting for start() to be invoked in the main thread may block
136 if gstreamer blocks the main thread until this call is finished.
137 This situation is rare and usually caused by setState(Null)
138 while pipeline is being prerolled.
139
140 The proper solution to this involves controlling gstreamer pipeline from
141 other thread than video surface.
142
143 Currently start() fails if wait() timed out.
144 */
145 if (!waitForAsyncEvent(locker: &locker, condition: &m_setupCondition, time: 1000) && !m_startCaps.isNull()) {
146 qWarning() << "Failed to start video surface due to main thread blocked.";
147 m_startCaps = {};
148 }
149
150 return m_active;
151}
152
153void QGstVideoRenderer::stop()
154{
155 QMutexLocker locker(&m_mutex);
156
157 if (!m_active)
158 return;
159
160 m_flush = true;
161 m_stop = true;
162
163 m_startCaps = {};
164
165 waitForAsyncEvent(locker: &locker, condition: &m_setupCondition, time: 500);
166}
167
168void QGstVideoRenderer::unlock()
169{
170 QMutexLocker locker(&m_mutex);
171
172 m_setupCondition.wakeAll();
173 m_renderCondition.wakeAll();
174}
175
176bool QGstVideoRenderer::proposeAllocation(GstQuery *query)
177{
178 Q_UNUSED(query);
179 QMutexLocker locker(&m_mutex);
180 return m_active;
181}
182
183void QGstVideoRenderer::flush()
184{
185 QMutexLocker locker(&m_mutex);
186
187 m_flush = true;
188 m_renderBuffer = nullptr;
189 m_renderCondition.wakeAll();
190
191 notify();
192}
193
194GstFlowReturn QGstVideoRenderer::render(GstBuffer *buffer)
195{
196 QMutexLocker locker(&m_mutex);
197 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
198
199 m_renderReturn = GST_FLOW_OK;
200 m_renderBuffer = buffer;
201
202 waitForAsyncEvent(locker: &locker, condition: &m_renderCondition, time: 300);
203
204 m_renderBuffer = nullptr;
205
206 return m_renderReturn;
207}
208
209bool QGstVideoRenderer::query(GstQuery *query)
210{
211#if QT_CONFIG(gstreamer_gl)
212 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
213 const gchar *type;
214 gst_query_parse_context_type(query, context_type: &type);
215
216 if (strcmp(s1: type, s2: "gst.gl.local_context") != 0)
217 return false;
218
219 auto *gstGlContext = m_sink->gstGlLocalContext();
220 if (!gstGlContext)
221 return false;
222
223 gst_query_set_context(query, context: gstGlContext);
224
225 return true;
226 }
227#else
228 Q_UNUSED(query);
229#endif
230 return false;
231}
232
233void QGstVideoRenderer::gstEvent(GstEvent *event)
234{
235 if (GST_EVENT_TYPE(event) != GST_EVENT_TAG)
236 return;
237
238 GstTagList *taglist = nullptr;
239 gst_event_parse_tag(event, taglist: &taglist);
240 if (!taglist)
241 return;
242
243 gchar *value = nullptr;
244 if (!gst_tag_list_get_string(list: taglist, GST_TAG_IMAGE_ORIENTATION, value: &value))
245 return;
246
247 constexpr const char rotate[] = "rotate-";
248 constexpr const char flipRotate[] = "flip-rotate-";
249 constexpr size_t rotateLen = sizeof(rotate) - 1;
250 constexpr size_t flipRotateLen = sizeof(flipRotate) - 1;
251
252 bool mirrored = false;
253 int rotationAngle = 0;
254
255 if (!strncmp(s1: rotate, s2: value, n: rotateLen)) {
256 rotationAngle = atoi(nptr: value + rotateLen);
257 } else if (!strncmp(s1: flipRotate, s2: value, n: flipRotateLen)) {
258 // To flip by horizontal axis is the same as to mirror by vertical axis
259 // and rotate by 180 degrees.
260 mirrored = true;
261 rotationAngle = (180 + atoi(nptr: value + flipRotateLen)) % 360;
262 }
263
264 QMutexLocker locker(&m_mutex);
265 m_frameMirrored = mirrored;
266 switch (rotationAngle) {
267 case 0: m_frameRotationAngle = QVideoFrame::Rotation0; break;
268 case 90: m_frameRotationAngle = QVideoFrame::Rotation90; break;
269 case 180: m_frameRotationAngle = QVideoFrame::Rotation180; break;
270 case 270: m_frameRotationAngle = QVideoFrame::Rotation270; break;
271 default: m_frameRotationAngle = QVideoFrame::Rotation0;
272 }
273}
274
275bool QGstVideoRenderer::event(QEvent *event)
276{
277 if (event->type() == QEvent::UpdateRequest) {
278 QMutexLocker locker(&m_mutex);
279
280 if (m_notified) {
281 while (handleEvent(locker: &locker)) {}
282 m_notified = false;
283 }
284 return true;
285 }
286
287 return QObject::event(event);
288}
289
290bool QGstVideoRenderer::handleEvent(QMutexLocker<QMutex> *locker)
291{
292 if (m_flush) {
293 m_flush = false;
294 if (m_active) {
295 locker->unlock();
296
297 if (m_sink && !m_flushed)
298 m_sink->setVideoFrame(QVideoFrame());
299 m_flushed = true;
300 locker->relock();
301 }
302 } else if (m_stop) {
303 m_stop = false;
304
305 if (m_active) {
306 m_active = false;
307 m_flushed = true;
308 }
309 } else if (!m_startCaps.isNull()) {
310 Q_ASSERT(!m_active);
311
312 auto startCaps = m_startCaps;
313 m_startCaps = {};
314
315 if (m_sink) {
316 locker->unlock();
317
318 m_flushed = true;
319 m_format = startCaps.formatForCaps(info: &m_videoInfo);
320 memoryFormat = startCaps.memoryFormat();
321
322 locker->relock();
323 m_active = m_format.isValid();
324 } else if (m_active) {
325 m_active = false;
326 m_flushed = true;
327 }
328
329 } else if (m_renderBuffer) {
330 GstBuffer *buffer = m_renderBuffer;
331 m_renderBuffer = nullptr;
332 m_renderReturn = GST_FLOW_ERROR;
333
334 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::handleEvent(renderBuffer)" << m_active << m_sink;
335 if (m_active && m_sink) {
336 gst_buffer_ref(buf: buffer);
337
338 locker->unlock();
339
340 m_flushed = false;
341
342 auto meta = gst_buffer_get_video_crop_meta (buffer);
343 if (meta) {
344 QRect vp(meta->x, meta->y, meta->width, meta->height);
345 if (m_format.viewport() != vp) {
346 qCDebug(qLcGstVideoRenderer) << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x" << meta->width << " | " << meta->x << "x" << meta->y << "]";
347 // Update viewport if data is not the same
348 m_format.setViewport(vp);
349 }
350 }
351
352 if (m_sink->inStoppedState()) {
353 qCDebug(qLcGstVideoRenderer) << " sending empty video frame";
354 m_sink->setVideoFrame(QVideoFrame());
355 } else {
356 QGstVideoBuffer *videoBuffer = new QGstVideoBuffer(buffer, m_videoInfo, m_sink, m_format, memoryFormat);
357 QVideoFrame frame(videoBuffer, m_format);
358 QGstUtils::setFrameTimeStamps(frame: &frame, buffer);
359 frame.setMirrored(m_frameMirrored);
360 frame.setRotationAngle(m_frameRotationAngle);
361
362 qCDebug(qLcGstVideoRenderer) << " sending video frame";
363 m_sink->setVideoFrame(frame);
364 }
365
366 gst_buffer_unref(buf: buffer);
367
368 locker->relock();
369
370 m_renderReturn = GST_FLOW_OK;
371 }
372
373 m_renderCondition.wakeAll();
374 } else {
375 m_setupCondition.wakeAll();
376
377 return false;
378 }
379 return true;
380}
381
382void QGstVideoRenderer::notify()
383{
384 if (!m_notified) {
385 m_notified = true;
386 QCoreApplication::postEvent(receiver: this, event: new QEvent(QEvent::UpdateRequest));
387 }
388}
389
390bool QGstVideoRenderer::waitForAsyncEvent(
391 QMutexLocker<QMutex> *locker, QWaitCondition *condition, unsigned long time)
392{
393 if (QThread::currentThread() == thread()) {
394 while (handleEvent(locker)) {}
395 m_notified = false;
396
397 return true;
398 }
399
400 notify();
401
402 return condition->wait(lockedMutex: &m_mutex, time);
403}
404
405static GstVideoSinkClass *gvrs_sink_parent_class;
406static thread_local QGstreamerVideoSink *gvrs_current_sink;
407
408#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
409
410QGstVideoRendererSink *QGstVideoRendererSink::createSink(QGstreamerVideoSink *sink)
411{
412 setSink(sink);
413 QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
414 g_object_new(object_type: QGstVideoRendererSink::get_type(), first_property_name: nullptr));
415
416 g_signal_connect(G_OBJECT(gstSink), "notify::show-preroll-frame", G_CALLBACK(handleShowPrerollChange), gstSink);
417
418 return gstSink;
419}
420
421void QGstVideoRendererSink::setSink(QGstreamerVideoSink *sink)
422{
423 gvrs_current_sink = sink;
424}
425
426GType QGstVideoRendererSink::get_type()
427{
428 static const GTypeInfo info =
429 {
430 .class_size: sizeof(QGstVideoRendererSinkClass), // class_size
431 .base_init: base_init, // base_init
432 .base_finalize: nullptr, // base_finalize
433 .class_init: class_init, // class_init
434 .class_finalize: nullptr, // class_finalize
435 .class_data: nullptr, // class_data
436 .instance_size: sizeof(QGstVideoRendererSink), // instance_size
437 .n_preallocs: 0, // n_preallocs
438 .instance_init: instance_init, // instance_init
439 .value_table: nullptr // value_table
440 };
441
442 static const GType type = []() {
443 const auto result = g_type_register_static(
444 GST_TYPE_VIDEO_SINK, type_name: "QGstVideoRendererSink", info: &info, flags: GTypeFlags(0));
445
446 // Register the sink type to be used in custom piplines.
447 // When surface is ready the sink can be used.
448 gst_element_register(plugin: nullptr, name: "qtvideosink", rank: GST_RANK_PRIMARY, type: result);
449
450 return result;
451 }();
452
453 return type;
454}
455
456void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
457{
458 Q_UNUSED(class_data);
459
460 gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
461
462 GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
463 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
464
465 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
466 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
467 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
468 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
469 base_sink_class->stop = QGstVideoRendererSink::stop;
470 base_sink_class->unlock = QGstVideoRendererSink::unlock;
471 base_sink_class->query = QGstVideoRendererSink::query;
472 base_sink_class->event = QGstVideoRendererSink::event;
473
474 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
475 element_class->change_state = QGstVideoRendererSink::change_state;
476 gst_element_class_set_metadata(klass: element_class,
477 longname: "Qt built-in video renderer sink",
478 classification: "Sink/Video",
479 description: "Qt default built-in video renderer sink",
480 author: "The Qt Company");
481
482 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
483 object_class->finalize = QGstVideoRendererSink::finalize;
484}
485
486void QGstVideoRendererSink::base_init(gpointer g_class)
487{
488 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
489 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
490 "video/x-raw, "
491 "framerate = (fraction) [ 0, MAX ], "
492 "width = (int) [ 1, MAX ], "
493 "height = (int) [ 1, MAX ]"));
494
495 gst_element_class_add_pad_template(
496 GST_ELEMENT_CLASS(g_class), templ: gst_static_pad_template_get(pad_template: &sink_pad_template));
497}
498
499void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
500{
501 Q_UNUSED(g_class);
502 VO_SINK(instance);
503
504 Q_ASSERT(gvrs_current_sink);
505
506 sink->renderer = new QGstVideoRenderer(gvrs_current_sink);
507 sink->renderer->moveToThread(thread: gvrs_current_sink->thread());
508 gvrs_current_sink = nullptr;
509}
510
511void QGstVideoRendererSink::finalize(GObject *object)
512{
513 VO_SINK(object);
514
515 delete sink->renderer;
516
517 // Chain up
518 G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
519}
520
521void QGstVideoRendererSink::handleShowPrerollChange(GObject *o, GParamSpec *p, gpointer d)
522{
523 Q_UNUSED(o);
524 Q_UNUSED(p);
525 QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(d);
526
527 gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
528 g_object_get(G_OBJECT(sink), first_property_name: "show-preroll-frame", &showPrerollFrame, nullptr);
529
530 if (!showPrerollFrame) {
531 GstState state = GST_STATE_VOID_PENDING;
532 GstClockTime timeout = 10000000; // 10 ms
533 gst_element_get_state(GST_ELEMENT(sink), state: &state, pending: nullptr, timeout);
534 // show-preroll-frame being set to 'false' while in GST_STATE_PAUSED means
535 // the QMediaPlayer was stopped from the paused state.
536 // We need to flush the current frame.
537 if (state == GST_STATE_PAUSED)
538 sink->renderer->flush();
539 }
540}
541
542GstStateChangeReturn QGstVideoRendererSink::change_state(
543 GstElement *element, GstStateChange transition)
544{
545 QGstVideoRendererSink *sink = reinterpret_cast<QGstVideoRendererSink *>(element);
546
547 gboolean showPrerollFrame = true; // "show-preroll-frame" property is true by default
548 g_object_get(G_OBJECT(element), first_property_name: "show-preroll-frame", &showPrerollFrame, nullptr);
549
550 // If show-preroll-frame is 'false' when transitioning from GST_STATE_PLAYING to
551 // GST_STATE_PAUSED, it means the QMediaPlayer was stopped.
552 // We need to flush the current frame.
553 if (transition == GST_STATE_CHANGE_PLAYING_TO_PAUSED && !showPrerollFrame)
554 sink->renderer->flush();
555
556 return GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
557}
558
559GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
560{
561 VO_SINK(base);
562
563 QGstCaps caps = sink->renderer->caps();
564 if (filter)
565 caps = QGstCaps(gst_caps_intersect(caps1: caps.get(), caps2: filter), QGstCaps::HasRef);
566
567 gst_caps_ref(caps: caps.get());
568 return caps.get();
569}
570
571gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps)
572{
573 VO_SINK(base);
574
575 auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
576
577 qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps.toString();
578
579 if (caps.isNull()) {
580 sink->renderer->stop();
581
582 return TRUE;
583 } else if (sink->renderer->start(caps)) {
584 return TRUE;
585 } else {
586 return FALSE;
587 }
588}
589
590gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
591{
592 VO_SINK(base);
593 return sink->renderer->proposeAllocation(query);
594}
595
596gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
597{
598 VO_SINK(base);
599 sink->renderer->stop();
600 return TRUE;
601}
602
603gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
604{
605 VO_SINK(base);
606 sink->renderer->unlock();
607 return TRUE;
608}
609
610GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
611{
612 VO_SINK(base);
613 return sink->renderer->render(buffer);
614}
615
616gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
617{
618 VO_SINK(base);
619 if (sink->renderer->query(query))
620 return TRUE;
621
622 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
623}
624
625gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event)
626{
627 VO_SINK(base);
628 sink->renderer->gstEvent(event);
629 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
630}
631
632QT_END_NAMESPACE
633
634#include "moc_qgstvideorenderersink_p.cpp"
635

source code of qtmultimedia/src/plugins/multimedia/gstreamer/common/qgstvideorenderersink.cpp