1/****************************************************************************
2**
3** Copyright (C) 2016 The Qt Company Ltd.
4** Contact: https://www.qt.io/licensing/
5**
6** This file is part of the Qt Toolkit.
7**
8** $QT_BEGIN_LICENSE:LGPL$
9** Commercial License Usage
10** Licensees holding valid commercial Qt licenses may use this file in
11** accordance with the commercial license agreement provided with the
12** Software or, alternatively, in accordance with the terms contained in
13** a written agreement between you and The Qt Company. For licensing terms
14** and conditions see https://www.qt.io/terms-conditions. For further
15** information use the contact form at https://www.qt.io/contact-us.
16**
17** GNU Lesser General Public License Usage
18** Alternatively, this file may be used under the terms of the GNU Lesser
19** General Public License version 3 as published by the Free Software
20** Foundation and appearing in the file LICENSE.LGPL3 included in the
21** packaging of this file. Please review the following information to
22** ensure the GNU Lesser General Public License version 3 requirements
23** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
24**
25** GNU General Public License Usage
26** Alternatively, this file may be used under the terms of the GNU
27** General Public License version 2.0 or (at your option) the GNU General
28** Public license version 3 or any later version approved by the KDE Free
29** Qt Foundation. The licenses are as published by the Free Software
30** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
31** included in the packaging of this file. Please review the following
32** information to ensure the GNU General Public License requirements will
33** be met: https://www.gnu.org/licenses/gpl-2.0.html and
34** https://www.gnu.org/licenses/gpl-3.0.html.
35**
36** $QT_END_LICENSE$
37**
38****************************************************************************/
39
40#include <private/qgstreamerplayersession_p.h>
41#include <private/qgstreamerbushelper_p.h>
42
43#include <private/qgstreameraudioprobecontrol_p.h>
44#include <private/qgstreamervideoprobecontrol_p.h>
45#include <private/qgstreamervideorendererinterface_p.h>
46#if !GST_CHECK_VERSION(1,0,0)
47#include <private/gstvideoconnector_p.h>
48#endif
49#include <private/qgstutils_p.h>
50#include <private/qvideosurfacegstsink_p.h>
51
52#include <gst/gstvalue.h>
53#include <gst/base/gstbasesrc.h>
54
55#include <QtMultimedia/qmediametadata.h>
56#include <QtCore/qdatetime.h>
57#include <QtCore/qdebug.h>
58#include <QtCore/qsize.h>
59#include <QtCore/qtimer.h>
60#include <QtCore/qdebug.h>
61#include <QtCore/qdir.h>
62#include <QtCore/qstandardpaths.h>
63#include <qvideorenderercontrol.h>
64#include <QUrlQuery>
65
66//#define DEBUG_PLAYBIN
67
68QT_BEGIN_NAMESPACE
69
70static bool usePlaybinVolume()
71{
72 static enum { Yes, No, Unknown } status = Unknown;
73 if (status == Unknown) {
74 QByteArray v = qgetenv(varName: "QT_GSTREAMER_USE_PLAYBIN_VOLUME");
75 bool value = !v.isEmpty() && v != "0" && v != "false";
76 if (value)
77 status = Yes;
78 else
79 status = No;
80 }
81 return status == Yes;
82}
83
84typedef enum {
85 GST_PLAY_FLAG_VIDEO = 0x00000001,
86 GST_PLAY_FLAG_AUDIO = 0x00000002,
87 GST_PLAY_FLAG_TEXT = 0x00000004,
88 GST_PLAY_FLAG_VIS = 0x00000008,
89 GST_PLAY_FLAG_SOFT_VOLUME = 0x00000010,
90 GST_PLAY_FLAG_NATIVE_AUDIO = 0x00000020,
91 GST_PLAY_FLAG_NATIVE_VIDEO = 0x00000040,
92 GST_PLAY_FLAG_DOWNLOAD = 0x00000080,
93 GST_PLAY_FLAG_BUFFERING = 0x000000100
94} GstPlayFlags;
95
96#if !GST_CHECK_VERSION(1,0,0)
97#define DEFAULT_RAW_CAPS \
98 "video/x-raw-yuv; " \
99 "video/x-raw-rgb; " \
100 "video/x-raw-gray; " \
101 "video/x-surface; " \
102 "video/x-android-buffer; " \
103 "audio/x-raw-int; " \
104 "audio/x-raw-float; " \
105 "text/plain; " \
106 "text/x-pango-markup; " \
107 "video/x-dvd-subpicture; " \
108 "subpicture/x-pgs"
109
110static GstStaticCaps static_RawCaps = GST_STATIC_CAPS(DEFAULT_RAW_CAPS);
111#endif
112
113QGstreamerPlayerSession::QGstreamerPlayerSession(QObject *parent)
114 : QObject(parent)
115{
116 initPlaybin();
117}
118
119void QGstreamerPlayerSession::initPlaybin()
120{
121 m_playbin = gst_element_factory_make(QT_GSTREAMER_PLAYBIN_ELEMENT_NAME, name: nullptr);
122 if (m_playbin) {
123 //GST_PLAY_FLAG_NATIVE_VIDEO omits configuration of ffmpegcolorspace and videoscale,
124 //since those elements are included in the video output bin when necessary.
125 int flags = GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
126 QByteArray envFlags = qgetenv(varName: "QT_GSTREAMER_PLAYBIN_FLAGS");
127 if (!envFlags.isEmpty()) {
128 flags |= envFlags.toInt();
129#if !GST_CHECK_VERSION(1,0,0)
130 } else {
131 flags |= GST_PLAY_FLAG_NATIVE_VIDEO;
132#endif
133 }
134 g_object_set(G_OBJECT(m_playbin), first_property_name: "flags", flags, nullptr);
135
136 const QByteArray envAudioSink = qgetenv(varName: "QT_GSTREAMER_PLAYBIN_AUDIOSINK");
137 GstElement *audioSink = gst_element_factory_make(factoryname: envAudioSink.isEmpty() ? "autoaudiosink" : envAudioSink, name: "audiosink");
138 if (audioSink) {
139 if (usePlaybinVolume()) {
140 m_audioSink = audioSink;
141 m_volumeElement = m_playbin;
142 } else {
143 m_volumeElement = gst_element_factory_make(factoryname: "volume", name: "volumeelement");
144 if (m_volumeElement) {
145 m_audioSink = gst_bin_new(name: "audio-output-bin");
146
147 gst_bin_add_many(GST_BIN(m_audioSink), element_1: m_volumeElement, audioSink, nullptr);
148 gst_element_link(src: m_volumeElement, dest: audioSink);
149
150 GstPad *pad = gst_element_get_static_pad(element: m_volumeElement, name: "sink");
151 gst_element_add_pad(GST_ELEMENT(m_audioSink), pad: gst_ghost_pad_new(name: "sink", target: pad));
152 gst_object_unref(GST_OBJECT(pad));
153 } else {
154 m_audioSink = audioSink;
155 m_volumeElement = m_playbin;
156 }
157 }
158
159 g_object_set(G_OBJECT(m_playbin), first_property_name: "audio-sink", m_audioSink, nullptr);
160 addAudioBufferProbe();
161 }
162 }
163
164#if GST_CHECK_VERSION(1,0,0)
165 static const auto convDesc = qEnvironmentVariable(varName: "QT_GSTREAMER_PLAYBIN_CONVERT");
166 GError *err = nullptr;
167 auto convPipeline = !convDesc.isEmpty() ? convDesc.toLatin1().constData() : "identity";
168 auto convElement = gst_parse_launch(pipeline_description: convPipeline, error: &err);
169 if (err) {
170 qWarning() << "Error:" << convDesc << ":" << QLatin1String(err->message);
171 g_clear_error(err: &err);
172 }
173 m_videoIdentity = convElement;
174#else
175 m_videoIdentity = GST_ELEMENT(g_object_new(gst_video_connector_get_type(), 0)); // floating ref
176 g_signal_connect(G_OBJECT(m_videoIdentity), "connection-failed", G_CALLBACK(insertColorSpaceElement), (gpointer)this);
177 m_colorSpace = gst_element_factory_make(QT_GSTREAMER_COLORCONVERSION_ELEMENT_NAME, "ffmpegcolorspace-vo");
178
179 // might not get a parent, take ownership to avoid leak
180 qt_gst_object_ref_sink(GST_OBJECT(m_colorSpace));
181#endif
182
183 m_nullVideoSink = gst_element_factory_make(factoryname: "fakesink", name: nullptr);
184 g_object_set(G_OBJECT(m_nullVideoSink), first_property_name: "sync", true, nullptr);
185 gst_object_ref(GST_OBJECT(m_nullVideoSink));
186
187 m_videoOutputBin = gst_bin_new(name: "video-output-bin");
188 // might not get a parent, take ownership to avoid leak
189 qt_gst_object_ref_sink(GST_OBJECT(m_videoOutputBin));
190
191 GstElement *videoOutputSink = m_videoIdentity;
192#if QT_CONFIG(gstreamer_gl)
193 if (QGstUtils::useOpenGL()) {
194 videoOutputSink = gst_element_factory_make(factoryname: "glupload", name: nullptr);
195 GstElement *colorConvert = gst_element_factory_make(factoryname: "glcolorconvert", name: nullptr);
196 gst_bin_add_many(GST_BIN(m_videoOutputBin), element_1: videoOutputSink, colorConvert, m_videoIdentity, m_nullVideoSink, nullptr);
197 gst_element_link_many(element_1: videoOutputSink, element_2: colorConvert, m_videoIdentity, nullptr);
198 } else {
199 gst_bin_add_many(GST_BIN(m_videoOutputBin), element_1: m_videoIdentity, m_nullVideoSink, nullptr);
200 }
201#else
202 gst_bin_add_many(GST_BIN(m_videoOutputBin), m_videoIdentity, m_nullVideoSink, nullptr);
203#endif
204 gst_element_link(src: m_videoIdentity, dest: m_nullVideoSink);
205
206 m_videoSink = m_nullVideoSink;
207
208 // add ghostpads
209 GstPad *pad = gst_element_get_static_pad(element: videoOutputSink, name: "sink");
210 gst_element_add_pad(GST_ELEMENT(m_videoOutputBin), pad: gst_ghost_pad_new(name: "sink", target: pad));
211 gst_object_unref(GST_OBJECT(pad));
212
213 if (m_playbin != 0) {
214 // Sort out messages
215 setBus(gst_element_get_bus(element: m_playbin));
216
217 g_object_set(G_OBJECT(m_playbin), first_property_name: "video-sink", m_videoOutputBin, nullptr);
218
219 g_signal_connect(G_OBJECT(m_playbin), "notify::source", G_CALLBACK(playbinNotifySource), this);
220 g_signal_connect(G_OBJECT(m_playbin), "element-added", G_CALLBACK(handleElementAdded), this);
221
222 if (usePlaybinVolume()) {
223 updateVolume();
224 updateMuted();
225 g_signal_connect(G_OBJECT(m_playbin), "notify::volume", G_CALLBACK(handleVolumeChange), this);
226 g_signal_connect(G_OBJECT(m_playbin), "notify::mute", G_CALLBACK(handleMutedChange), this);
227 }
228
229 g_signal_connect(G_OBJECT(m_playbin), "video-changed", G_CALLBACK(handleStreamsChange), this);
230 g_signal_connect(G_OBJECT(m_playbin), "audio-changed", G_CALLBACK(handleStreamsChange), this);
231 g_signal_connect(G_OBJECT(m_playbin), "text-changed", G_CALLBACK(handleStreamsChange), this);
232
233#if QT_CONFIG(gstreamer_app)
234 g_signal_connect(G_OBJECT(m_playbin), "deep-notify::source", G_CALLBACK(configureAppSrcElement), this);
235#endif
236
237 m_pipeline = m_playbin;
238 gst_object_ref(GST_OBJECT(m_pipeline));
239 }
240}
241
242QGstreamerPlayerSession::~QGstreamerPlayerSession()
243{
244 if (m_pipeline) {
245 stop();
246
247 removeVideoBufferProbe();
248 removeAudioBufferProbe();
249
250 delete m_busHelper;
251 m_busHelper = nullptr;
252 resetElements();
253 }
254}
255
256template <class T>
257static inline void resetGstObject(T *&obj, T *v = nullptr)
258{
259 if (obj)
260 gst_object_unref(GST_OBJECT(obj));
261
262 obj = v;
263}
264
265void QGstreamerPlayerSession::resetElements()
266{
267 setBus(nullptr);
268 resetGstObject(obj&: m_playbin);
269 resetGstObject(obj&: m_pipeline);
270#if !GST_CHECK_VERSION(1,0,0)
271 resetGstObject(m_colorSpace);
272#endif
273 resetGstObject(obj&: m_nullVideoSink);
274 resetGstObject(obj&: m_videoOutputBin);
275
276 m_audioSink = nullptr;
277 m_volumeElement = nullptr;
278 m_videoIdentity = nullptr;
279 m_pendingVideoSink = nullptr;
280 m_videoSink = nullptr;
281}
282
283GstElement *QGstreamerPlayerSession::playbin() const
284{
285 return m_playbin;
286}
287
288#if QT_CONFIG(gstreamer_app)
289void QGstreamerPlayerSession::configureAppSrcElement(GObject* object, GObject *orig, GParamSpec *pspec, QGstreamerPlayerSession* self)
290{
291 Q_UNUSED(object);
292 Q_UNUSED(pspec);
293
294 if (!self->appsrc())
295 return;
296
297 GstElement *appsrc;
298 g_object_get(object: orig, first_property_name: "source", &appsrc, nullptr);
299
300 if (!self->appsrc()->setup(appsrc))
301 qWarning()<<"Could not setup appsrc element";
302
303 g_object_unref(G_OBJECT(appsrc));
304}
305#endif
306
307void QGstreamerPlayerSession::loadFromStream(const QNetworkRequest &request, QIODevice *appSrcStream)
308{
309#if QT_CONFIG(gstreamer_app)
310#ifdef DEBUG_PLAYBIN
311 qDebug() << Q_FUNC_INFO;
312#endif
313 m_request = request;
314 m_duration = 0;
315 m_lastPosition = 0;
316
317 if (!m_appSrc)
318 m_appSrc = new QGstAppSrc(this);
319 m_appSrc->setStream(appSrcStream);
320
321 if (!parsePipeline() && m_playbin) {
322 m_tags.clear();
323 emit tagsChanged();
324
325 g_object_set(G_OBJECT(m_playbin), first_property_name: "uri", "appsrc://", nullptr);
326
327 if (!m_streamTypes.isEmpty()) {
328 m_streamProperties.clear();
329 m_streamTypes.clear();
330
331 emit streamsChanged();
332 }
333 }
334#endif
335}
336
337void QGstreamerPlayerSession::loadFromUri(const QNetworkRequest &request)
338{
339#ifdef DEBUG_PLAYBIN
340 qDebug() << Q_FUNC_INFO << request.url();
341#endif
342 m_request = request;
343 m_duration = 0;
344 m_lastPosition = 0;
345
346#if QT_CONFIG(gstreamer_app)
347 if (m_appSrc) {
348 m_appSrc->deleteLater();
349 m_appSrc = 0;
350 }
351#endif
352
353 if (!parsePipeline() && m_playbin) {
354 m_tags.clear();
355 emit tagsChanged();
356
357 g_object_set(G_OBJECT(m_playbin), first_property_name: "uri", m_request.url().toEncoded().constData(), nullptr);
358
359 if (!m_streamTypes.isEmpty()) {
360 m_streamProperties.clear();
361 m_streamTypes.clear();
362
363 emit streamsChanged();
364 }
365 }
366}
367
368bool QGstreamerPlayerSession::parsePipeline()
369{
370 if (m_request.url().scheme() != QLatin1String("gst-pipeline")) {
371 if (!m_playbin) {
372 resetElements();
373 initPlaybin();
374 updateVideoRenderer();
375 }
376 return false;
377 }
378
379 // Set current surface to video sink before creating a pipeline.
380 auto renderer = qobject_cast<QVideoRendererControl *>(object: m_videoOutput);
381 if (renderer)
382 QVideoSurfaceGstSink::setSurface(renderer->surface());
383
384 QString url = m_request.url().toString(options: QUrl::RemoveScheme);
385 QString desc = QUrl::fromPercentEncoding(url.toLatin1().constData());
386 GError *err = nullptr;
387 GstElement *pipeline = gst_parse_launch(pipeline_description: desc.toLatin1().constData(), error: &err);
388 if (err) {
389 auto errstr = QLatin1String(err->message);
390 qWarning() << "Error:" << desc << ":" << errstr;
391 emit error(error: QMediaPlayer::FormatError, errorString: errstr);
392 g_clear_error(err: &err);
393 }
394
395 return setPipeline(pipeline);
396}
397
398static void gst_foreach(GstIterator *it, const std::function<bool(GstElement *)> &cmp)
399{
400#if GST_CHECK_VERSION(1,0,0)
401 GValue value = G_VALUE_INIT;
402 while (gst_iterator_next (it, elem: &value) == GST_ITERATOR_OK) {
403 auto child = static_cast<GstElement*>(g_value_get_object(value: &value));
404#else
405 GstElement *child = nullptr;
406 while (gst_iterator_next(it, reinterpret_cast<gpointer *>(&child)) == GST_ITERATOR_OK) {
407#endif
408 if (cmp(child))
409 break;
410 }
411
412 gst_iterator_free(it);
413#if GST_CHECK_VERSION(1,0,0)
414 g_value_unset(value: &value);
415#endif
416}
417
418bool QGstreamerPlayerSession::setPipeline(GstElement *pipeline)
419{
420 GstBus *bus = pipeline ? gst_element_get_bus(element: pipeline) : nullptr;
421 if (!bus)
422 return false;
423
424 if (m_playbin)
425 gst_element_set_state(element: m_playbin, state: GST_STATE_NULL);
426
427 resetElements();
428 setBus(bus);
429 m_pipeline = pipeline;
430
431 if (m_renderer) {
432 gst_foreach(it: gst_bin_iterate_sinks(GST_BIN(pipeline)),
433 cmp: [this](GstElement *child) {
434 if (qstrcmp(GST_OBJECT_NAME(child), str2: "qtvideosink") == 0) {
435 m_renderer->setVideoSink(child);
436 return true;
437 }
438 return false;
439 });
440 }
441
442#if QT_CONFIG(gstreamer_app)
443 if (m_appSrc) {
444 gst_foreach(it: gst_bin_iterate_sources(GST_BIN(pipeline)),
445 cmp: [this](GstElement *child) {
446 if (qstrcmp(str1: qt_gst_element_get_factory_name(element: child), str2: "appsrc") == 0) {
447 m_appSrc->setup(child);
448 return true;
449 }
450 return false;
451 });
452 }
453#endif
454
455 emit pipelineChanged();
456 return true;
457}
458
459void QGstreamerPlayerSession::setBus(GstBus *bus)
460{
461 resetGstObject(obj&: m_bus, v: bus);
462
463 // It might still accept gst messages.
464 if (m_busHelper)
465 m_busHelper->deleteLater();
466 m_busHelper = nullptr;
467
468 if (!m_bus)
469 return;
470
471 m_busHelper = new QGstreamerBusHelper(m_bus, this);
472 m_busHelper->installMessageFilter(filter: this);
473
474 if (m_videoOutput)
475 m_busHelper->installMessageFilter(filter: m_videoOutput);
476}
477
478qint64 QGstreamerPlayerSession::duration() const
479{
480 return m_duration;
481}
482
483qint64 QGstreamerPlayerSession::position() const
484{
485 gint64 position = 0;
486
487 if (m_pipeline && qt_gst_element_query_position(element: m_pipeline, format: GST_FORMAT_TIME, cur: &position))
488 m_lastPosition = position / 1000000;
489 return m_lastPosition;
490}
491
492qreal QGstreamerPlayerSession::playbackRate() const
493{
494 return m_playbackRate;
495}
496
497void QGstreamerPlayerSession::setPlaybackRate(qreal rate)
498{
499#ifdef DEBUG_PLAYBIN
500 qDebug() << Q_FUNC_INFO << rate;
501#endif
502 if (!qFuzzyCompare(p1: m_playbackRate, p2: rate)) {
503 m_playbackRate = rate;
504 if (m_pipeline && m_seekable) {
505 qint64 from = rate > 0 ? position() : 0;
506 qint64 to = rate > 0 ? duration() : position();
507 gst_element_seek(element: m_pipeline, rate, format: GST_FORMAT_TIME,
508 flags: GstSeekFlags(GST_SEEK_FLAG_FLUSH),
509 start_type: GST_SEEK_TYPE_SET, start: from * 1000000,
510 stop_type: GST_SEEK_TYPE_SET, stop: to * 1000000);
511 }
512 emit playbackRateChanged(m_playbackRate);
513 }
514}
515
516QMediaTimeRange QGstreamerPlayerSession::availablePlaybackRanges() const
517{
518 QMediaTimeRange ranges;
519
520 if (duration() <= 0)
521 return ranges;
522
523#if GST_CHECK_VERSION(0, 10, 31)
524 //GST_FORMAT_TIME would be more appropriate, but unfortunately it's not supported.
525 //with GST_FORMAT_PERCENT media is treated as encoded with constant bitrate.
526 GstQuery* query = gst_query_new_buffering(format: GST_FORMAT_PERCENT);
527
528 if (!gst_element_query(element: m_pipeline, query)) {
529 gst_query_unref(q: query);
530 return ranges;
531 }
532
533 gint64 rangeStart = 0;
534 gint64 rangeStop = 0;
535 for (guint index = 0; index < gst_query_get_n_buffering_ranges(query); index++) {
536 if (gst_query_parse_nth_buffering_range(query, index, start: &rangeStart, stop: &rangeStop))
537 ranges.addInterval(start: rangeStart * duration() / 100,
538 end: rangeStop * duration() / 100);
539 }
540
541 gst_query_unref(q: query);
542#endif
543
544 if (ranges.isEmpty() && !isLiveSource() && isSeekable())
545 ranges.addInterval(start: 0, end: duration());
546
547#ifdef DEBUG_PLAYBIN
548 qDebug() << ranges;
549#endif
550
551 return ranges;
552}
553
554int QGstreamerPlayerSession::activeStream(QMediaStreamsControl::StreamType streamType) const
555{
556 int streamNumber = -1;
557 if (m_playbin) {
558 switch (streamType) {
559 case QMediaStreamsControl::AudioStream:
560 g_object_get(G_OBJECT(m_playbin), first_property_name: "current-audio", &streamNumber, nullptr);
561 break;
562 case QMediaStreamsControl::VideoStream:
563 g_object_get(G_OBJECT(m_playbin), first_property_name: "current-video", &streamNumber, nullptr);
564 break;
565 case QMediaStreamsControl::SubPictureStream:
566 g_object_get(G_OBJECT(m_playbin), first_property_name: "current-text", &streamNumber, nullptr);
567 break;
568 default:
569 break;
570 }
571 }
572
573 if (streamNumber >= 0)
574 streamNumber += m_playbin2StreamOffset.value(akey: streamType,adefaultValue: 0);
575
576 return streamNumber;
577}
578
579void QGstreamerPlayerSession::setActiveStream(QMediaStreamsControl::StreamType streamType, int streamNumber)
580{
581#ifdef DEBUG_PLAYBIN
582 qDebug() << Q_FUNC_INFO << streamType << streamNumber;
583#endif
584
585 if (streamNumber >= 0)
586 streamNumber -= m_playbin2StreamOffset.value(akey: streamType,adefaultValue: 0);
587
588 if (m_playbin) {
589 switch (streamType) {
590 case QMediaStreamsControl::AudioStream:
591 g_object_set(G_OBJECT(m_playbin), first_property_name: "current-audio", streamNumber, nullptr);
592 break;
593 case QMediaStreamsControl::VideoStream:
594 g_object_set(G_OBJECT(m_playbin), first_property_name: "current-video", streamNumber, nullptr);
595 break;
596 case QMediaStreamsControl::SubPictureStream:
597 g_object_set(G_OBJECT(m_playbin), first_property_name: "current-text", streamNumber, nullptr);
598 break;
599 default:
600 break;
601 }
602 }
603}
604
605int QGstreamerPlayerSession::volume() const
606{
607 return m_volume;
608}
609
610bool QGstreamerPlayerSession::isMuted() const
611{
612 return m_muted;
613}
614
615bool QGstreamerPlayerSession::isAudioAvailable() const
616{
617 return m_audioAvailable;
618}
619
620#if GST_CHECK_VERSION(1,0,0)
621static GstPadProbeReturn block_pad_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
622#else
623static void block_pad_cb(GstPad *pad, gboolean blocked, gpointer user_data)
624#endif
625{
626 Q_UNUSED(pad);
627#if GST_CHECK_VERSION(1,0,0)
628 Q_UNUSED(info);
629 Q_UNUSED(user_data);
630 return GST_PAD_PROBE_OK;
631#else
632#ifdef DEBUG_PLAYBIN
633 qDebug() << "block_pad_cb, blocked:" << blocked;
634#endif
635 if (blocked && user_data) {
636 QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
637 QMetaObject::invokeMethod(session, "finishVideoOutputChange", Qt::QueuedConnection);
638 }
639#endif
640}
641
642void QGstreamerPlayerSession::updateVideoRenderer()
643{
644#ifdef DEBUG_PLAYBIN
645 qDebug() << "Video sink has chaged, reload video output";
646#endif
647
648 if (m_videoOutput)
649 setVideoRenderer(m_videoOutput);
650}
651
652void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
653{
654#ifdef DEBUG_PLAYBIN
655 qDebug() << Q_FUNC_INFO;
656#endif
657 if (m_videoOutput != videoOutput) {
658 if (m_videoOutput) {
659 disconnect(sender: m_videoOutput, SIGNAL(sinkChanged()),
660 receiver: this, SLOT(updateVideoRenderer()));
661 disconnect(sender: m_videoOutput, SIGNAL(readyChanged(bool)),
662 receiver: this, SLOT(updateVideoRenderer()));
663
664 m_busHelper->removeMessageFilter(filter: m_videoOutput);
665 }
666
667 m_videoOutput = videoOutput;
668
669 if (m_videoOutput) {
670 connect(sender: m_videoOutput, SIGNAL(sinkChanged()),
671 receiver: this, SLOT(updateVideoRenderer()));
672 connect(sender: m_videoOutput, SIGNAL(readyChanged(bool)),
673 receiver: this, SLOT(updateVideoRenderer()));
674
675 m_busHelper->installMessageFilter(filter: m_videoOutput);
676 }
677 }
678
679 m_renderer = qobject_cast<QGstreamerVideoRendererInterface*>(object: videoOutput);
680 emit rendererChanged();
681
682 // No sense to continue if custom pipeline requested.
683 if (!m_playbin)
684 return;
685
686 GstElement *videoSink = 0;
687 if (m_renderer && m_renderer->isReady())
688 videoSink = m_renderer->videoSink();
689
690 if (!videoSink)
691 videoSink = m_nullVideoSink;
692
693#ifdef DEBUG_PLAYBIN
694 qDebug() << "Set video output:" << videoOutput;
695 qDebug() << "Current sink:" << (m_videoSink ? GST_ELEMENT_NAME(m_videoSink) : "") << m_videoSink
696 << "pending:" << (m_pendingVideoSink ? GST_ELEMENT_NAME(m_pendingVideoSink) : "") << m_pendingVideoSink
697 << "new sink:" << (videoSink ? GST_ELEMENT_NAME(videoSink) : "") << videoSink;
698#endif
699
700 if (m_pendingVideoSink == videoSink ||
701 (m_pendingVideoSink == 0 && m_videoSink == videoSink)) {
702#ifdef DEBUG_PLAYBIN
703 qDebug() << "Video sink has not changed, skip video output reconfiguration";
704#endif
705 return;
706 }
707
708#ifdef DEBUG_PLAYBIN
709 qDebug() << "Reconfigure video output";
710#endif
711
712 if (m_state == QMediaPlayer::StoppedState) {
713#ifdef DEBUG_PLAYBIN
714 qDebug() << "The pipeline has not started yet, pending state:" << m_pendingState;
715#endif
716 //the pipeline has not started yet
717 flushVideoProbes();
718 m_pendingVideoSink = 0;
719 gst_element_set_state(element: m_videoSink, state: GST_STATE_NULL);
720 gst_element_set_state(element: m_playbin, state: GST_STATE_NULL);
721
722#if !GST_CHECK_VERSION(1,0,0)
723 if (m_usingColorspaceElement) {
724 gst_element_unlink(m_colorSpace, m_videoSink);
725 gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
726 } else {
727 gst_element_unlink(m_videoIdentity, m_videoSink);
728 }
729#endif
730
731 removeVideoBufferProbe();
732
733 gst_bin_remove(GST_BIN(m_videoOutputBin), element: m_videoSink);
734
735 m_videoSink = videoSink;
736
737 gst_bin_add(GST_BIN(m_videoOutputBin), element: m_videoSink);
738
739 bool linked = gst_element_link(src: m_videoIdentity, dest: m_videoSink);
740#if !GST_CHECK_VERSION(1,0,0)
741 m_usingColorspaceElement = false;
742 if (!linked) {
743 m_usingColorspaceElement = true;
744#ifdef DEBUG_PLAYBIN
745 qDebug() << "Failed to connect video output, inserting the colorspace element.";
746#endif
747 gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
748 linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, nullptr);
749 }
750#endif
751
752 if (!linked)
753 qWarning() << "Linking video output element failed";
754
755 if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), property_name: "show-preroll-frame") != 0) {
756 gboolean value = m_displayPrerolledFrame;
757 g_object_set(G_OBJECT(m_videoSink), first_property_name: "show-preroll-frame", value, nullptr);
758 }
759
760 addVideoBufferProbe();
761
762 switch (m_pendingState) {
763 case QMediaPlayer::PausedState:
764 gst_element_set_state(element: m_playbin, state: GST_STATE_PAUSED);
765 break;
766 case QMediaPlayer::PlayingState:
767 gst_element_set_state(element: m_playbin, state: GST_STATE_PLAYING);
768 break;
769 default:
770 break;
771 }
772
773 resumeVideoProbes();
774
775 } else {
776 if (m_pendingVideoSink) {
777#ifdef DEBUG_PLAYBIN
778 qDebug() << "already waiting for pad to be blocked, just change the pending sink";
779#endif
780 m_pendingVideoSink = videoSink;
781 return;
782 }
783
784 m_pendingVideoSink = videoSink;
785
786#ifdef DEBUG_PLAYBIN
787 qDebug() << "Blocking the video output pad...";
788#endif
789
790 //block pads, async to avoid locking in paused state
791 GstPad *srcPad = gst_element_get_static_pad(element: m_videoIdentity, name: "src");
792#if GST_CHECK_VERSION(1,0,0)
793 this->pad_probe_id = gst_pad_add_probe(pad: srcPad, mask: (GstPadProbeType)(GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCKING), callback: block_pad_cb, user_data: this, destroy_data: nullptr);
794#else
795 gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
796#endif
797 gst_object_unref(GST_OBJECT(srcPad));
798
799 //Unpause the sink to avoid waiting until the buffer is processed
800 //while the sink is paused. The pad will be blocked as soon as the current
801 //buffer is processed.
802 if (m_state == QMediaPlayer::PausedState) {
803#ifdef DEBUG_PLAYBIN
804 qDebug() << "Starting video output to avoid blocking in paused state...";
805#endif
806 gst_element_set_state(element: m_videoSink, state: GST_STATE_PLAYING);
807 }
808 }
809}
810
811void QGstreamerPlayerSession::finishVideoOutputChange()
812{
813 if (!m_playbin || !m_pendingVideoSink)
814 return;
815
816#ifdef DEBUG_PLAYBIN
817 qDebug() << "finishVideoOutputChange" << m_pendingVideoSink;
818#endif
819
820 GstPad *srcPad = gst_element_get_static_pad(element: m_videoIdentity, name: "src");
821
822 if (!gst_pad_is_blocked(pad: srcPad)) {
823 //pad is not blocked, it's possible to swap outputs only in the null state
824 qWarning() << "Pad is not blocked yet, could not switch video sink";
825 GstState identityElementState = GST_STATE_NULL;
826 gst_element_get_state(element: m_videoIdentity, state: &identityElementState, pending: nullptr, GST_CLOCK_TIME_NONE);
827 if (identityElementState != GST_STATE_NULL) {
828 gst_object_unref(GST_OBJECT(srcPad));
829 return; //can't change vo yet, received async call from the previous change
830 }
831 }
832
833 if (m_pendingVideoSink == m_videoSink) {
834 qDebug() << "Abort, no change";
835 //video output was change back to the current one,
836 //no need to torment the pipeline, just unblock the pad
837 if (gst_pad_is_blocked(pad: srcPad))
838#if GST_CHECK_VERSION(1,0,0)
839 gst_pad_remove_probe(pad: srcPad, id: this->pad_probe_id);
840#else
841 gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
842#endif
843
844 m_pendingVideoSink = 0;
845 gst_object_unref(GST_OBJECT(srcPad));
846 return;
847 }
848
849#if !GST_CHECK_VERSION(1,0,0)
850 if (m_usingColorspaceElement) {
851 gst_element_set_state(m_colorSpace, GST_STATE_NULL);
852 gst_element_set_state(m_videoSink, GST_STATE_NULL);
853
854 gst_element_unlink(m_colorSpace, m_videoSink);
855 gst_bin_remove(GST_BIN(m_videoOutputBin), m_colorSpace);
856 } else {
857#else
858 {
859#endif
860 gst_element_set_state(element: m_videoSink, state: GST_STATE_NULL);
861 gst_element_unlink(src: m_videoIdentity, dest: m_videoSink);
862 }
863
864 removeVideoBufferProbe();
865
866 gst_bin_remove(GST_BIN(m_videoOutputBin), element: m_videoSink);
867
868 m_videoSink = m_pendingVideoSink;
869 m_pendingVideoSink = 0;
870
871 gst_bin_add(GST_BIN(m_videoOutputBin), element: m_videoSink);
872
873 addVideoBufferProbe();
874
875 bool linked = gst_element_link(src: m_videoIdentity, dest: m_videoSink);
876#if !GST_CHECK_VERSION(1,0,0)
877 m_usingColorspaceElement = false;
878 if (!linked) {
879 m_usingColorspaceElement = true;
880#ifdef DEBUG_PLAYBIN
881 qDebug() << "Failed to connect video output, inserting the colorspace element.";
882#endif
883 gst_bin_add(GST_BIN(m_videoOutputBin), m_colorSpace);
884 linked = gst_element_link_many(m_videoIdentity, m_colorSpace, m_videoSink, nullptr);
885 }
886#endif
887
888 if (!linked)
889 qWarning() << "Linking video output element failed";
890
891#ifdef DEBUG_PLAYBIN
892 qDebug() << "notify the video connector it has to emit a new segment message...";
893#endif
894
895#if !GST_CHECK_VERSION(1,0,0)
896 //it's necessary to send a new segment event just before
897 //the first buffer pushed to the new sink
898 g_signal_emit_by_name(m_videoIdentity,
899 "resend-new-segment",
900 true //emit connection-failed signal
901 //to have a chance to insert colorspace element
902 );
903#endif
904
905 GstState state = GST_STATE_VOID_PENDING;
906
907 switch (m_pendingState) {
908 case QMediaPlayer::StoppedState:
909 state = GST_STATE_NULL;
910 break;
911 case QMediaPlayer::PausedState:
912 state = GST_STATE_PAUSED;
913 break;
914 case QMediaPlayer::PlayingState:
915 state = GST_STATE_PLAYING;
916 break;
917 }
918
919#if !GST_CHECK_VERSION(1,0,0)
920 if (m_usingColorspaceElement)
921 gst_element_set_state(m_colorSpace, state);
922#endif
923
924 gst_element_set_state(element: m_videoSink, state);
925
926 if (state == GST_STATE_NULL)
927 flushVideoProbes();
928
929 // Set state change that was deferred due the video output
930 // change being pending
931 gst_element_set_state(element: m_playbin, state);
932
933 if (state != GST_STATE_NULL)
934 resumeVideoProbes();
935
936 //don't have to wait here, it will unblock eventually
937 if (gst_pad_is_blocked(pad: srcPad))
938#if GST_CHECK_VERSION(1,0,0)
939 gst_pad_remove_probe(pad: srcPad, id: this->pad_probe_id);
940#else
941 gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
942#endif
943
944 gst_object_unref(GST_OBJECT(srcPad));
945
946}
947
948#if !GST_CHECK_VERSION(1,0,0)
949
950void QGstreamerPlayerSession::insertColorSpaceElement(GstElement *element, gpointer data)
951{
952#ifdef DEBUG_PLAYBIN
953 qDebug() << Q_FUNC_INFO;
954#endif
955 Q_UNUSED(element);
956 QGstreamerPlayerSession* session = reinterpret_cast<QGstreamerPlayerSession*>(data);
957
958 if (session->m_usingColorspaceElement)
959 return;
960 session->m_usingColorspaceElement = true;
961
962#ifdef DEBUG_PLAYBIN
963 qDebug() << "Failed to connect video output, inserting the colorspace elemnt.";
964 qDebug() << "notify the video connector it has to emit a new segment message...";
965#endif
966 //it's necessary to send a new segment event just before
967 //the first buffer pushed to the new sink
968 g_signal_emit_by_name(session->m_videoIdentity,
969 "resend-new-segment",
970 false // don't emit connection-failed signal
971 );
972
973 gst_element_unlink(session->m_videoIdentity, session->m_videoSink);
974 gst_bin_add(GST_BIN(session->m_videoOutputBin), session->m_colorSpace);
975 gst_element_link_many(session->m_videoIdentity, session->m_colorSpace, session->m_videoSink, nullptr);
976
977 GstState state = GST_STATE_VOID_PENDING;
978
979 switch (session->m_pendingState) {
980 case QMediaPlayer::StoppedState:
981 state = GST_STATE_NULL;
982 break;
983 case QMediaPlayer::PausedState:
984 state = GST_STATE_PAUSED;
985 break;
986 case QMediaPlayer::PlayingState:
987 state = GST_STATE_PLAYING;
988 break;
989 }
990
991 gst_element_set_state(session->m_colorSpace, state);
992}
993
994#endif
995
996bool QGstreamerPlayerSession::isVideoAvailable() const
997{
998 return m_videoAvailable;
999}
1000
1001bool QGstreamerPlayerSession::isSeekable() const
1002{
1003 return m_seekable;
1004}
1005
1006bool QGstreamerPlayerSession::play()
1007{
1008#if GST_CHECK_VERSION(1,0,0)
1009 static bool dumpDot = qEnvironmentVariableIsSet(varName: "GST_DEBUG_DUMP_DOT_DIR");
1010 if (dumpDot)
1011 gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_pipeline), details: GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL), file_name: "gst.play");
1012#endif
1013#ifdef DEBUG_PLAYBIN
1014 qDebug() << Q_FUNC_INFO;
1015#endif
1016
1017 m_everPlayed = false;
1018 if (m_pipeline) {
1019 m_pendingState = QMediaPlayer::PlayingState;
1020 if (gst_element_set_state(element: m_pipeline, state: GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
1021 qWarning() << "GStreamer; Unable to play -" << m_request.url().toString();
1022 m_pendingState = m_state = QMediaPlayer::StoppedState;
1023 emit stateChanged(state: m_state);
1024 } else {
1025 resumeVideoProbes();
1026 return true;
1027 }
1028 }
1029
1030 return false;
1031}
1032
1033bool QGstreamerPlayerSession::pause()
1034{
1035#ifdef DEBUG_PLAYBIN
1036 qDebug() << Q_FUNC_INFO;
1037#endif
1038 if (m_pipeline) {
1039 m_pendingState = QMediaPlayer::PausedState;
1040 if (m_pendingVideoSink != 0)
1041 return true;
1042
1043 if (gst_element_set_state(element: m_pipeline, state: GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
1044 qWarning() << "GStreamer; Unable to pause -" << m_request.url().toString();
1045 m_pendingState = m_state = QMediaPlayer::StoppedState;
1046 emit stateChanged(state: m_state);
1047 } else {
1048 resumeVideoProbes();
1049 return true;
1050 }
1051 }
1052
1053 return false;
1054}
1055
1056void QGstreamerPlayerSession::stop()
1057{
1058#ifdef DEBUG_PLAYBIN
1059 qDebug() << Q_FUNC_INFO;
1060#endif
1061 m_everPlayed = false;
1062 if (m_pipeline) {
1063
1064 if (m_renderer)
1065 m_renderer->stopRenderer();
1066
1067 flushVideoProbes();
1068 gst_element_set_state(element: m_pipeline, state: GST_STATE_NULL);
1069
1070 m_lastPosition = 0;
1071 QMediaPlayer::State oldState = m_state;
1072 m_pendingState = m_state = QMediaPlayer::StoppedState;
1073
1074 finishVideoOutputChange();
1075
1076 //we have to do it here, since gstreamer will not emit bus messages any more
1077 setSeekable(false);
1078 if (oldState != m_state)
1079 emit stateChanged(state: m_state);
1080 }
1081}
1082
1083bool QGstreamerPlayerSession::seek(qint64 ms)
1084{
1085#ifdef DEBUG_PLAYBIN
1086 qDebug() << Q_FUNC_INFO << ms;
1087#endif
1088 //seek locks when the video output sink is changing and pad is blocked
1089 if (m_pipeline && !m_pendingVideoSink && m_state != QMediaPlayer::StoppedState && m_seekable) {
1090 ms = qMax(a: ms,b: qint64(0));
1091 qint64 from = m_playbackRate > 0 ? ms : 0;
1092 qint64 to = m_playbackRate > 0 ? duration() : ms;
1093
1094 bool isSeeking = gst_element_seek(element: m_pipeline, rate: m_playbackRate, format: GST_FORMAT_TIME,
1095 flags: GstSeekFlags(GST_SEEK_FLAG_FLUSH),
1096 start_type: GST_SEEK_TYPE_SET, start: from * 1000000,
1097 stop_type: GST_SEEK_TYPE_SET, stop: to * 1000000);
1098 if (isSeeking)
1099 m_lastPosition = ms;
1100
1101 return isSeeking;
1102 }
1103
1104 return false;
1105}
1106
1107void QGstreamerPlayerSession::setVolume(int volume)
1108{
1109#ifdef DEBUG_PLAYBIN
1110 qDebug() << Q_FUNC_INFO << volume;
1111#endif
1112
1113 if (m_volume != volume) {
1114 m_volume = volume;
1115
1116 if (m_volumeElement)
1117 g_object_set(G_OBJECT(m_volumeElement), first_property_name: "volume", m_volume / 100.0, nullptr);
1118
1119 emit volumeChanged(volume: m_volume);
1120 }
1121}
1122
1123void QGstreamerPlayerSession::setMuted(bool muted)
1124{
1125#ifdef DEBUG_PLAYBIN
1126 qDebug() << Q_FUNC_INFO << muted;
1127#endif
1128 if (m_muted != muted) {
1129 m_muted = muted;
1130
1131 if (m_volumeElement)
1132 g_object_set(G_OBJECT(m_volumeElement), first_property_name: "mute", m_muted ? TRUE : FALSE, nullptr);
1133
1134 emit mutedStateChanged(muted: m_muted);
1135 }
1136}
1137
1138
1139void QGstreamerPlayerSession::setSeekable(bool seekable)
1140{
1141#ifdef DEBUG_PLAYBIN
1142 qDebug() << Q_FUNC_INFO << seekable;
1143#endif
1144 if (seekable != m_seekable) {
1145 m_seekable = seekable;
1146 emit seekableChanged(m_seekable);
1147 }
1148}
1149
1150bool QGstreamerPlayerSession::processBusMessage(const QGstreamerMessage &message)
1151{
1152 GstMessage* gm = message.rawMessage();
1153 if (gm) {
1154 //tag message comes from elements inside playbin, not from playbin itself
1155 if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_TAG) {
1156 GstTagList *tag_list;
1157 gst_message_parse_tag(message: gm, tag_list: &tag_list);
1158
1159 QMap<QByteArray, QVariant> newTags = QGstUtils::gstTagListToMap(list: tag_list);
1160 QMap<QByteArray, QVariant>::const_iterator it = newTags.constBegin();
1161 for ( ; it != newTags.constEnd(); ++it)
1162 m_tags.insert(akey: it.key(), avalue: it.value()); // overwrite existing tags
1163
1164 gst_tag_list_free(tag_list);
1165
1166 emit tagsChanged();
1167 } else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_DURATION) {
1168 updateDuration();
1169 }
1170
1171#ifdef DEBUG_PLAYBIN
1172 if (m_sourceType == MMSSrc && qstrcmp(GST_OBJECT_NAME(GST_MESSAGE_SRC(gm)), "source") == 0) {
1173 qDebug() << "Message from MMSSrc: " << GST_MESSAGE_TYPE(gm);
1174 } else if (m_sourceType == RTSPSrc && qstrcmp(GST_OBJECT_NAME(GST_MESSAGE_SRC(gm)), "source") == 0) {
1175 qDebug() << "Message from RTSPSrc: " << GST_MESSAGE_TYPE(gm);
1176 } else {
1177 qDebug() << "Message from " << GST_OBJECT_NAME(GST_MESSAGE_SRC(gm)) << ":" << GST_MESSAGE_TYPE(gm);
1178 }
1179#endif
1180
1181 if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_BUFFERING) {
1182 int progress = 0;
1183 gst_message_parse_buffering(message: gm, percent: &progress);
1184 emit bufferingProgressChanged(percentFilled: progress);
1185 }
1186
1187 bool handlePlaybin2 = false;
1188 if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) {
1189 switch (GST_MESSAGE_TYPE(gm)) {
1190 case GST_MESSAGE_STATE_CHANGED:
1191 {
1192 GstState oldState;
1193 GstState newState;
1194 GstState pending;
1195
1196 gst_message_parse_state_changed(message: gm, oldstate: &oldState, newstate: &newState, pending: &pending);
1197
1198#ifdef DEBUG_PLAYBIN
1199 static QStringList states = {
1200 QStringLiteral("GST_STATE_VOID_PENDING"), QStringLiteral("GST_STATE_NULL"),
1201 QStringLiteral("GST_STATE_READY"), QStringLiteral("GST_STATE_PAUSED"),
1202 QStringLiteral("GST_STATE_PLAYING") };
1203
1204 qDebug() << QStringLiteral("state changed: old: %1 new: %2 pending: %3") \
1205 .arg(states[oldState]) \
1206 .arg(states[newState]) \
1207 .arg(states[pending]);
1208#endif
1209
1210 switch (newState) {
1211 case GST_STATE_VOID_PENDING:
1212 case GST_STATE_NULL:
1213 setSeekable(false);
1214 finishVideoOutputChange();
1215 if (m_state != QMediaPlayer::StoppedState)
1216 emit stateChanged(state: m_state = QMediaPlayer::StoppedState);
1217 break;
1218 case GST_STATE_READY:
1219 setSeekable(false);
1220 if (m_state != QMediaPlayer::StoppedState)
1221 emit stateChanged(state: m_state = QMediaPlayer::StoppedState);
1222 break;
1223 case GST_STATE_PAUSED:
1224 {
1225 QMediaPlayer::State prevState = m_state;
1226 m_state = QMediaPlayer::PausedState;
1227
1228 //check for seekable
1229 if (oldState == GST_STATE_READY) {
1230 if (m_sourceType == SoupHTTPSrc || m_sourceType == MMSSrc) {
1231 //since udpsrc is a live source, it is not applicable here
1232 m_everPlayed = true;
1233 }
1234
1235 getStreamsInfo();
1236 updateVideoResolutionTag();
1237
1238 //gstreamer doesn't give a reliable indication the duration
1239 //information is ready, GST_MESSAGE_DURATION is not sent by most elements
1240 //the duration is queried up to 5 times with increasing delay
1241 m_durationQueries = 5;
1242 // This should also update the seekable flag.
1243 updateDuration();
1244
1245 if (!qFuzzyCompare(p1: m_playbackRate, p2: qreal(1.0))) {
1246 qreal rate = m_playbackRate;
1247 m_playbackRate = 1.0;
1248 setPlaybackRate(rate);
1249 }
1250 }
1251
1252 if (m_state != prevState)
1253 emit stateChanged(state: m_state);
1254
1255 break;
1256 }
1257 case GST_STATE_PLAYING:
1258 m_everPlayed = true;
1259 if (m_state != QMediaPlayer::PlayingState) {
1260 emit stateChanged(state: m_state = QMediaPlayer::PlayingState);
1261
1262 // For rtsp streams duration information might not be available
1263 // until playback starts.
1264 if (m_duration <= 0) {
1265 m_durationQueries = 5;
1266 updateDuration();
1267 }
1268 }
1269
1270 break;
1271 }
1272 }
1273 break;
1274
1275 case GST_MESSAGE_EOS:
1276 emit playbackFinished();
1277 break;
1278
1279 case GST_MESSAGE_TAG:
1280 case GST_MESSAGE_STREAM_STATUS:
1281 case GST_MESSAGE_UNKNOWN:
1282 break;
1283 case GST_MESSAGE_ERROR: {
1284 GError *err;
1285 gchar *debug;
1286 gst_message_parse_error(message: gm, gerror: &err, debug: &debug);
1287 if (err->domain == GST_STREAM_ERROR && err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND)
1288 processInvalidMedia(errorCode: QMediaPlayer::FormatError, errorString: tr(s: "Cannot play stream of type: <unknown>"));
1289 else
1290 processInvalidMedia(errorCode: QMediaPlayer::ResourceError, errorString: QString::fromUtf8(str: err->message));
1291 qWarning() << "Error:" << QString::fromUtf8(str: err->message);
1292 g_error_free(error: err);
1293 g_free(mem: debug);
1294 }
1295 break;
1296 case GST_MESSAGE_WARNING:
1297 {
1298 GError *err;
1299 gchar *debug;
1300 gst_message_parse_warning (message: gm, gerror: &err, debug: &debug);
1301 qWarning() << "Warning:" << QString::fromUtf8(str: err->message);
1302 g_error_free (error: err);
1303 g_free (mem: debug);
1304 }
1305 break;
1306 case GST_MESSAGE_INFO:
1307#ifdef DEBUG_PLAYBIN
1308 {
1309 GError *err;
1310 gchar *debug;
1311 gst_message_parse_info (gm, &err, &debug);
1312 qDebug() << "Info:" << QString::fromUtf8(err->message);
1313 g_error_free (err);
1314 g_free (debug);
1315 }
1316#endif
1317 break;
1318 case GST_MESSAGE_BUFFERING:
1319 case GST_MESSAGE_STATE_DIRTY:
1320 case GST_MESSAGE_STEP_DONE:
1321 case GST_MESSAGE_CLOCK_PROVIDE:
1322 case GST_MESSAGE_CLOCK_LOST:
1323 case GST_MESSAGE_NEW_CLOCK:
1324 case GST_MESSAGE_STRUCTURE_CHANGE:
1325 case GST_MESSAGE_APPLICATION:
1326 case GST_MESSAGE_ELEMENT:
1327 break;
1328 case GST_MESSAGE_SEGMENT_START:
1329 {
1330 const GstStructure *structure = gst_message_get_structure(message: gm);
1331 qint64 position = g_value_get_int64(value: gst_structure_get_value(structure, fieldname: "position"));
1332 position /= 1000000;
1333 m_lastPosition = position;
1334 emit positionChanged(position);
1335 }
1336 break;
1337 case GST_MESSAGE_SEGMENT_DONE:
1338 break;
1339 case GST_MESSAGE_LATENCY:
1340#if GST_CHECK_VERSION(0,10,13)
1341 case GST_MESSAGE_ASYNC_START:
1342 break;
1343 case GST_MESSAGE_ASYNC_DONE:
1344 {
1345 gint64 position = 0;
1346 if (qt_gst_element_query_position(element: m_pipeline, format: GST_FORMAT_TIME, cur: &position)) {
1347 position /= 1000000;
1348 m_lastPosition = position;
1349 emit positionChanged(position);
1350 }
1351 break;
1352 }
1353#if GST_CHECK_VERSION(0,10,23)
1354 case GST_MESSAGE_REQUEST_STATE:
1355#endif
1356#endif
1357 case GST_MESSAGE_ANY:
1358 break;
1359 default:
1360 break;
1361 }
1362 } else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
1363 GError *err;
1364 gchar *debug;
1365 gst_message_parse_error(message: gm, gerror: &err, debug: &debug);
1366 // If the source has given up, so do we.
1367 if (qstrcmp(GST_OBJECT_NAME(GST_MESSAGE_SRC(gm)), str2: "source") == 0) {
1368 bool everPlayed = m_everPlayed;
1369 // Try and differentiate network related resource errors from the others
1370 if (!m_request.url().isRelative() && m_request.url().scheme().compare(other: QLatin1String("file"), cs: Qt::CaseInsensitive) != 0 ) {
1371 if (everPlayed ||
1372 (err->domain == GST_RESOURCE_ERROR && (
1373 err->code == GST_RESOURCE_ERROR_BUSY ||
1374 err->code == GST_RESOURCE_ERROR_OPEN_READ ||
1375 err->code == GST_RESOURCE_ERROR_READ ||
1376 err->code == GST_RESOURCE_ERROR_SEEK ||
1377 err->code == GST_RESOURCE_ERROR_SYNC))) {
1378 processInvalidMedia(errorCode: QMediaPlayer::NetworkError, errorString: QString::fromUtf8(str: err->message));
1379 } else {
1380 processInvalidMedia(errorCode: QMediaPlayer::ResourceError, errorString: QString::fromUtf8(str: err->message));
1381 }
1382 }
1383 else
1384 processInvalidMedia(errorCode: QMediaPlayer::ResourceError, errorString: QString::fromUtf8(str: err->message));
1385 } else if (err->domain == GST_STREAM_ERROR
1386 && (err->code == GST_STREAM_ERROR_DECRYPT || err->code == GST_STREAM_ERROR_DECRYPT_NOKEY)) {
1387 processInvalidMedia(errorCode: QMediaPlayer::AccessDeniedError, errorString: QString::fromUtf8(str: err->message));
1388 } else {
1389 handlePlaybin2 = true;
1390 }
1391 if (!handlePlaybin2)
1392 qWarning() << "Error:" << QString::fromUtf8(str: err->message);
1393 g_error_free(error: err);
1394 g_free(mem: debug);
1395 } else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT
1396 && qstrcmp(GST_OBJECT_NAME(GST_MESSAGE_SRC(gm)), str2: "source") == 0
1397 && m_sourceType == UDPSrc
1398 && gst_structure_has_name(structure: gst_message_get_structure(message: gm), name: "GstUDPSrcTimeout")) {
1399 //since udpsrc will not generate an error for the timeout event,
1400 //we need to process its element message here and treat it as an error.
1401 processInvalidMedia(errorCode: m_everPlayed ? QMediaPlayer::NetworkError : QMediaPlayer::ResourceError,
1402 errorString: tr(s: "UDP source timeout"));
1403 } else {
1404 handlePlaybin2 = true;
1405 }
1406
1407 if (handlePlaybin2) {
1408 if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_WARNING) {
1409 GError *err;
1410 gchar *debug;
1411 gst_message_parse_warning(message: gm, gerror: &err, debug: &debug);
1412 if (err->domain == GST_STREAM_ERROR && err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND)
1413 emit error(error: int(QMediaPlayer::FormatError), errorString: tr(s: "Cannot play stream of type: <unknown>"));
1414 // GStreamer shows warning for HTTP playlists
1415 if (err && err->message)
1416 qWarning() << "Warning:" << QString::fromUtf8(str: err->message);
1417 g_error_free(error: err);
1418 g_free(mem: debug);
1419 } else if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) {
1420 GError *err;
1421 gchar *debug;
1422 gst_message_parse_error(message: gm, gerror: &err, debug: &debug);
1423
1424 // Nearly all errors map to ResourceError
1425 QMediaPlayer::Error qerror = QMediaPlayer::ResourceError;
1426 if (err->domain == GST_STREAM_ERROR
1427 && (err->code == GST_STREAM_ERROR_DECRYPT
1428 || err->code == GST_STREAM_ERROR_DECRYPT_NOKEY)) {
1429 qerror = QMediaPlayer::AccessDeniedError;
1430 }
1431 processInvalidMedia(errorCode: qerror, errorString: QString::fromUtf8(str: err->message));
1432 if (err && err->message)
1433 qWarning() << "Error:" << QString::fromUtf8(str: err->message);
1434
1435 g_error_free(error: err);
1436 g_free(mem: debug);
1437 }
1438 }
1439 }
1440
1441 return false;
1442}
1443
1444void QGstreamerPlayerSession::getStreamsInfo()
1445{
1446 if (!m_playbin)
1447 return;
1448
1449 QList< QMap<QString,QVariant> > oldProperties = m_streamProperties;
1450 QList<QMediaStreamsControl::StreamType> oldTypes = m_streamTypes;
1451 QMap<QMediaStreamsControl::StreamType, int> oldOffset = m_playbin2StreamOffset;
1452
1453 //check if video is available:
1454 bool haveAudio = false;
1455 bool haveVideo = false;
1456 m_streamProperties.clear();
1457 m_streamTypes.clear();
1458 m_playbin2StreamOffset.clear();
1459
1460 gint audioStreamsCount = 0;
1461 gint videoStreamsCount = 0;
1462 gint textStreamsCount = 0;
1463
1464 g_object_get(G_OBJECT(m_playbin), first_property_name: "n-audio", &audioStreamsCount, nullptr);
1465 g_object_get(G_OBJECT(m_playbin), first_property_name: "n-video", &videoStreamsCount, nullptr);
1466 g_object_get(G_OBJECT(m_playbin), first_property_name: "n-text", &textStreamsCount, nullptr);
1467
1468 haveAudio = audioStreamsCount > 0;
1469 haveVideo = videoStreamsCount > 0;
1470
1471 m_playbin2StreamOffset[QMediaStreamsControl::AudioStream] = 0;
1472 m_playbin2StreamOffset[QMediaStreamsControl::VideoStream] = audioStreamsCount;
1473 m_playbin2StreamOffset[QMediaStreamsControl::SubPictureStream] = audioStreamsCount+videoStreamsCount;
1474
1475 for (int i=0; i<audioStreamsCount; i++)
1476 m_streamTypes.append(t: QMediaStreamsControl::AudioStream);
1477
1478 for (int i=0; i<videoStreamsCount; i++)
1479 m_streamTypes.append(t: QMediaStreamsControl::VideoStream);
1480
1481 for (int i=0; i<textStreamsCount; i++)
1482 m_streamTypes.append(t: QMediaStreamsControl::SubPictureStream);
1483
1484 for (int i=0; i<m_streamTypes.count(); i++) {
1485 QMediaStreamsControl::StreamType streamType = m_streamTypes[i];
1486 QMap<QString, QVariant> streamProperties;
1487
1488 int streamIndex = i - m_playbin2StreamOffset[streamType];
1489
1490 GstTagList *tags = 0;
1491 switch (streamType) {
1492 case QMediaStreamsControl::AudioStream:
1493 g_signal_emit_by_name(G_OBJECT(m_playbin), detailed_signal: "get-audio-tags", streamIndex, &tags);
1494 break;
1495 case QMediaStreamsControl::VideoStream:
1496 g_signal_emit_by_name(G_OBJECT(m_playbin), detailed_signal: "get-video-tags", streamIndex, &tags);
1497 break;
1498 case QMediaStreamsControl::SubPictureStream:
1499 g_signal_emit_by_name(G_OBJECT(m_playbin), detailed_signal: "get-text-tags", streamIndex, &tags);
1500 break;
1501 default:
1502 break;
1503 }
1504#if GST_CHECK_VERSION(1,0,0)
1505 if (tags && GST_IS_TAG_LIST(tags)) {
1506#else
1507 if (tags && gst_is_tag_list(tags)) {
1508#endif
1509 gchar *languageCode = 0;
1510 if (gst_tag_list_get_string(list: tags, GST_TAG_LANGUAGE_CODE, value: &languageCode))
1511 streamProperties[QMediaMetaData::Language] = QString::fromUtf8(str: languageCode);
1512
1513 //qDebug() << "language for setream" << i << QString::fromUtf8(languageCode);
1514 g_free (mem: languageCode);
1515 gst_tag_list_free(tags);
1516 }
1517
1518 m_streamProperties.append(t: streamProperties);
1519 }
1520
1521 bool emitAudioChanged = (haveAudio != m_audioAvailable);
1522 bool emitVideoChanged = (haveVideo != m_videoAvailable);
1523
1524 m_audioAvailable = haveAudio;
1525 m_videoAvailable = haveVideo;
1526
1527 if (emitAudioChanged) {
1528 emit audioAvailableChanged(audioAvailable: m_audioAvailable);
1529 }
1530 if (emitVideoChanged) {
1531 emit videoAvailableChanged(videoAvailable: m_videoAvailable);
1532 }
1533
1534 if (oldProperties != m_streamProperties || oldTypes != m_streamTypes || oldOffset != m_playbin2StreamOffset)
1535 emit streamsChanged();
1536}
1537
1538void QGstreamerPlayerSession::updateVideoResolutionTag()
1539{
1540 if (!m_videoIdentity)
1541 return;
1542
1543#ifdef DEBUG_PLAYBIN
1544 qDebug() << Q_FUNC_INFO;
1545#endif
1546 QSize size;
1547 QSize aspectRatio;
1548 GstPad *pad = gst_element_get_static_pad(element: m_videoIdentity, name: "src");
1549 GstCaps *caps = qt_gst_pad_get_current_caps(pad);
1550
1551 if (caps) {
1552 const GstStructure *structure = gst_caps_get_structure(caps, index: 0);
1553 gst_structure_get_int(structure, fieldname: "width", value: &size.rwidth());
1554 gst_structure_get_int(structure, fieldname: "height", value: &size.rheight());
1555
1556 gint aspectNum = 0;
1557 gint aspectDenum = 0;
1558 if (!size.isEmpty() && gst_structure_get_fraction(
1559 structure, fieldname: "pixel-aspect-ratio", value_numerator: &aspectNum, value_denominator: &aspectDenum)) {
1560 if (aspectDenum > 0)
1561 aspectRatio = QSize(aspectNum, aspectDenum);
1562 }
1563 gst_caps_unref(caps);
1564 }
1565
1566 gst_object_unref(GST_OBJECT(pad));
1567
1568 QSize currentSize = m_tags.value(akey: "resolution").toSize();
1569 QSize currentAspectRatio = m_tags.value(akey: "pixel-aspect-ratio").toSize();
1570
1571 if (currentSize != size || currentAspectRatio != aspectRatio) {
1572 if (aspectRatio.isEmpty())
1573 m_tags.remove(akey: "pixel-aspect-ratio");
1574
1575 if (size.isEmpty()) {
1576 m_tags.remove(akey: "resolution");
1577 } else {
1578 m_tags.insert(akey: "resolution", avalue: QVariant(size));
1579 if (!aspectRatio.isEmpty())
1580 m_tags.insert(akey: "pixel-aspect-ratio", avalue: QVariant(aspectRatio));
1581 }
1582
1583 emit tagsChanged();
1584 }
1585}
1586
1587void QGstreamerPlayerSession::updateDuration()
1588{
1589 gint64 gstDuration = 0;
1590 qint64 duration = 0;
1591
1592 if (m_pipeline && qt_gst_element_query_duration(element: m_pipeline, format: GST_FORMAT_TIME, cur: &gstDuration))
1593 duration = gstDuration / 1000000;
1594
1595 if (m_duration != duration) {
1596 m_duration = duration;
1597 emit durationChanged(duration: m_duration);
1598 }
1599
1600 gboolean seekable = false;
1601 if (m_duration > 0) {
1602 m_durationQueries = 0;
1603 GstQuery *query = gst_query_new_seeking(format: GST_FORMAT_TIME);
1604 if (gst_element_query(element: m_pipeline, query))
1605 gst_query_parse_seeking(query, format: 0, seekable: &seekable, segment_start: 0, segment_end: 0);
1606 gst_query_unref(q: query);
1607 }
1608 setSeekable(seekable);
1609
1610 if (m_durationQueries > 0) {
1611 //increase delay between duration requests
1612 int delay = 25 << (5 - m_durationQueries);
1613 QTimer::singleShot(msec: delay, receiver: this, SLOT(updateDuration()));
1614 m_durationQueries--;
1615 }
1616#ifdef DEBUG_PLAYBIN
1617 qDebug() << Q_FUNC_INFO << m_duration;
1618#endif
1619}
1620
1621void QGstreamerPlayerSession::playbinNotifySource(GObject *o, GParamSpec *p, gpointer d)
1622{
1623 Q_UNUSED(p);
1624
1625 GstElement *source = 0;
1626 g_object_get(object: o, first_property_name: "source", &source, nullptr);
1627 if (source == 0)
1628 return;
1629
1630#ifdef DEBUG_PLAYBIN
1631 qDebug() << "Playbin source added:" << G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source));
1632#endif
1633
1634 // Set Headers
1635 const QByteArray userAgentString("User-Agent");
1636
1637 QGstreamerPlayerSession *self = reinterpret_cast<QGstreamerPlayerSession *>(d);
1638
1639 // User-Agent - special case, souphhtpsrc will always set something, even if
1640 // defined in extra-headers
1641 if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), property_name: "user-agent") != 0) {
1642 g_object_set(G_OBJECT(source), first_property_name: "user-agent",
1643 self->m_request.rawHeader(headerName: userAgentString).constData(), nullptr);
1644 }
1645
1646 // The rest
1647 if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), property_name: "extra-headers") != 0) {
1648 GstStructure *extras = qt_gst_structure_new_empty(name: "extras");
1649
1650 const auto rawHeaderList = self->m_request.rawHeaderList();
1651 for (const QByteArray &rawHeader : rawHeaderList) {
1652 if (rawHeader == userAgentString) // Filter User-Agent
1653 continue;
1654 else {
1655 GValue headerValue;
1656
1657 memset(s: &headerValue, c: 0, n: sizeof(GValue));
1658 g_value_init(value: &headerValue, G_TYPE_STRING);
1659
1660 g_value_set_string(value: &headerValue,
1661 v_string: self->m_request.rawHeader(headerName: rawHeader).constData());
1662
1663 gst_structure_set_value(structure: extras, fieldname: rawHeader.constData(), value: &headerValue);
1664 }
1665 }
1666
1667 if (gst_structure_n_fields(structure: extras) > 0)
1668 g_object_set(G_OBJECT(source), first_property_name: "extra-headers", extras, nullptr);
1669
1670 gst_structure_free(structure: extras);
1671 }
1672
1673 //set timeout property to 30 seconds
1674 const int timeout = 30;
1675 if (qstrcmp(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source)), str2: "GstUDPSrc") == 0) {
1676 quint64 convertedTimeout = timeout;
1677#if GST_CHECK_VERSION(1,0,0)
1678 // Gst 1.x -> nanosecond
1679 convertedTimeout *= 1000000000;
1680#else
1681 // Gst 0.10 -> microsecond
1682 convertedTimeout *= 1000000;
1683#endif
1684 g_object_set(G_OBJECT(source), first_property_name: "timeout", convertedTimeout, nullptr);
1685 self->m_sourceType = UDPSrc;
1686 //The udpsrc is always a live source.
1687 self->m_isLiveSource = true;
1688
1689 QUrlQuery query(self->m_request.url());
1690 const QString var = QLatin1String("udpsrc.caps");
1691 if (query.hasQueryItem(key: var)) {
1692 GstCaps *caps = gst_caps_from_string(string: query.queryItemValue(key: var).toLatin1().constData());
1693 g_object_set(G_OBJECT(source), first_property_name: "caps", caps, nullptr);
1694 gst_caps_unref(caps);
1695 }
1696 } else if (qstrcmp(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source)), str2: "GstSoupHTTPSrc") == 0) {
1697 //souphttpsrc timeout unit = second
1698 g_object_set(G_OBJECT(source), first_property_name: "timeout", guint(timeout), nullptr);
1699 self->m_sourceType = SoupHTTPSrc;
1700 //since gst_base_src_is_live is not reliable, so we check the source property directly
1701 gboolean isLive = false;
1702 g_object_get(G_OBJECT(source), first_property_name: "is-live", &isLive, nullptr);
1703 self->m_isLiveSource = isLive;
1704 } else if (qstrcmp(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source)), str2: "GstMMSSrc") == 0) {
1705 self->m_sourceType = MMSSrc;
1706 self->m_isLiveSource = gst_base_src_is_live(GST_BASE_SRC(source));
1707 g_object_set(G_OBJECT(source), first_property_name: "tcp-timeout", G_GUINT64_CONSTANT(timeout*1000000), nullptr);
1708 } else if (qstrcmp(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(source)), str2: "GstRTSPSrc") == 0) {
1709 //rtspsrc acts like a live source and will therefore only generate data in the PLAYING state.
1710 self->m_sourceType = RTSPSrc;
1711 self->m_isLiveSource = true;
1712 g_object_set(G_OBJECT(source), first_property_name: "buffer-mode", 1, nullptr);
1713 } else {
1714 self->m_sourceType = UnknownSrc;
1715 self->m_isLiveSource = gst_base_src_is_live(GST_BASE_SRC(source));
1716 }
1717
1718#ifdef DEBUG_PLAYBIN
1719 if (self->m_isLiveSource)
1720 qDebug() << "Current source is a live source";
1721 else
1722 qDebug() << "Current source is a non-live source";
1723#endif
1724
1725 if (self->m_videoSink)
1726 g_object_set(G_OBJECT(self->m_videoSink), first_property_name: "sync", !self->m_isLiveSource, nullptr);
1727
1728 gst_object_unref(object: source);
1729}
1730
1731bool QGstreamerPlayerSession::isLiveSource() const
1732{
1733 return m_isLiveSource;
1734}
1735
1736void QGstreamerPlayerSession::handleVolumeChange(GObject *o, GParamSpec *p, gpointer d)
1737{
1738 Q_UNUSED(o);
1739 Q_UNUSED(p);
1740 QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession *>(d);
1741 QMetaObject::invokeMethod(obj: session, member: "updateVolume", type: Qt::QueuedConnection);
1742}
1743
1744void QGstreamerPlayerSession::updateVolume()
1745{
1746 double volume = 1.0;
1747 g_object_get(object: m_playbin, first_property_name: "volume", &volume, nullptr);
1748
1749 if (m_volume != int(volume*100 + 0.5)) {
1750 m_volume = int(volume*100 + 0.5);
1751#ifdef DEBUG_PLAYBIN
1752 qDebug() << Q_FUNC_INFO << m_volume;
1753#endif
1754 emit volumeChanged(volume: m_volume);
1755 }
1756}
1757
1758void QGstreamerPlayerSession::handleMutedChange(GObject *o, GParamSpec *p, gpointer d)
1759{
1760 Q_UNUSED(o);
1761 Q_UNUSED(p);
1762 QGstreamerPlayerSession *session = reinterpret_cast<QGstreamerPlayerSession *>(d);
1763 QMetaObject::invokeMethod(obj: session, member: "updateMuted", type: Qt::QueuedConnection);
1764}
1765
1766void QGstreamerPlayerSession::updateMuted()
1767{
1768 gboolean muted = FALSE;
1769 g_object_get(G_OBJECT(m_playbin), first_property_name: "mute", &muted, nullptr);
1770 if (m_muted != muted) {
1771 m_muted = muted;
1772#ifdef DEBUG_PLAYBIN
1773 qDebug() << Q_FUNC_INFO << m_muted;
1774#endif
1775 emit mutedStateChanged(muted);
1776 }
1777}
1778
1779#if !GST_CHECK_VERSION(0, 10, 33)
1780static gboolean factory_can_src_any_caps (GstElementFactory *factory, const GstCaps *caps)
1781{
1782 GList *templates;
1783
1784 g_return_val_if_fail(factory != nullptr, FALSE);
1785 g_return_val_if_fail(caps != nullptr, FALSE);
1786
1787 templates = factory->staticpadtemplates;
1788
1789 while (templates) {
1790 GstStaticPadTemplate *templ = (GstStaticPadTemplate *)templates->data;
1791
1792 if (templ->direction == GST_PAD_SRC) {
1793 GstCaps *templcaps = gst_static_caps_get(&templ->static_caps);
1794
1795 if (qt_gst_caps_can_intersect(caps, templcaps)) {
1796 gst_caps_unref(templcaps);
1797 return TRUE;
1798 }
1799 gst_caps_unref(templcaps);
1800 }
1801 templates = g_list_next(templates);
1802 }
1803
1804 return FALSE;
1805}
1806#endif
1807
1808GstAutoplugSelectResult QGstreamerPlayerSession::handleAutoplugSelect(GstBin *bin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, QGstreamerPlayerSession *session)
1809{
1810 Q_UNUSED(bin);
1811 Q_UNUSED(pad);
1812 Q_UNUSED(caps);
1813
1814 GstAutoplugSelectResult res = GST_AUTOPLUG_SELECT_TRY;
1815
1816 // if VAAPI is available and can be used to decode but the current video sink cannot handle
1817 // the decoded format, don't use it
1818 const gchar *factoryName = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
1819 if (g_str_has_prefix(str: factoryName, prefix: "vaapi")) {
1820 GstPad *sinkPad = gst_element_get_static_pad(element: session->m_videoSink, name: "sink");
1821#if GST_CHECK_VERSION(1,0,0)
1822 GstCaps *sinkCaps = gst_pad_query_caps(pad: sinkPad, filter: nullptr);
1823#else
1824 GstCaps *sinkCaps = gst_pad_get_caps(sinkPad);
1825#endif
1826
1827#if !GST_CHECK_VERSION(0, 10, 33)
1828 if (!factory_can_src_any_caps(factory, sinkCaps))
1829#else
1830 if (!gst_element_factory_can_src_any_caps(factory, caps: sinkCaps))
1831#endif
1832 res = GST_AUTOPLUG_SELECT_SKIP;
1833
1834 gst_object_unref(object: sinkPad);
1835 gst_caps_unref(caps: sinkCaps);
1836 }
1837
1838 return res;
1839}
1840
1841void QGstreamerPlayerSession::handleElementAdded(GstBin *bin, GstElement *element, QGstreamerPlayerSession *session)
1842{
1843 Q_UNUSED(bin);
1844 //we have to configure queue2 element to enable media downloading
1845 //and reporting available ranges,
1846 //but it's added dynamically to playbin2
1847
1848 gchar *elementName = gst_element_get_name(element);
1849
1850 if (g_str_has_prefix(str: elementName, prefix: "queue2")) {
1851 // Disable on-disk buffering.
1852 g_object_set(G_OBJECT(element), first_property_name: "temp-template", nullptr, nullptr);
1853 } else if (g_str_has_prefix(str: elementName, prefix: "uridecodebin") ||
1854#if GST_CHECK_VERSION(1,0,0)
1855 g_str_has_prefix(str: elementName, prefix: "decodebin")) {
1856#else
1857 g_str_has_prefix(elementName, "decodebin2")) {
1858 if (g_str_has_prefix(elementName, "uridecodebin")) {
1859 // Add video/x-surface (VAAPI) to default raw formats
1860 g_object_set(G_OBJECT(element), "caps", gst_static_caps_get(&static_RawCaps), nullptr);
1861 // listen for uridecodebin autoplug-select to skip VAAPI usage when the current
1862 // video sink doesn't support it
1863 g_signal_connect(element, "autoplug-select", G_CALLBACK(handleAutoplugSelect), session);
1864 }
1865#endif
1866 //listen for queue2 element added to uridecodebin/decodebin2 as well.
1867 //Don't touch other bins since they may have unrelated queues
1868 g_signal_connect(element, "element-added",
1869 G_CALLBACK(handleElementAdded), session);
1870 }
1871
1872 g_free(mem: elementName);
1873}
1874
1875void QGstreamerPlayerSession::handleStreamsChange(GstBin *bin, gpointer user_data)
1876{
1877 Q_UNUSED(bin);
1878
1879 QGstreamerPlayerSession* session = reinterpret_cast<QGstreamerPlayerSession*>(user_data);
1880 QMetaObject::invokeMethod(obj: session, member: "getStreamsInfo", type: Qt::QueuedConnection);
1881}
1882
1883//doing proper operations when detecting an invalidMedia: change media status before signal the erorr
1884void QGstreamerPlayerSession::processInvalidMedia(QMediaPlayer::Error errorCode, const QString& errorString)
1885{
1886#ifdef DEBUG_PLAYBIN
1887 qDebug() << Q_FUNC_INFO;
1888#endif
1889 emit invalidMedia();
1890 stop();
1891 emit error(error: int(errorCode), errorString);
1892}
1893
1894void QGstreamerPlayerSession::showPrerollFrames(bool enabled)
1895{
1896#ifdef DEBUG_PLAYBIN
1897 qDebug() << Q_FUNC_INFO << enabled;
1898#endif
1899 if (enabled != m_displayPrerolledFrame && m_videoSink &&
1900 g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSink), property_name: "show-preroll-frame") != 0) {
1901
1902 gboolean value = enabled;
1903 g_object_set(G_OBJECT(m_videoSink), first_property_name: "show-preroll-frame", value, nullptr);
1904 m_displayPrerolledFrame = enabled;
1905 }
1906}
1907
1908void QGstreamerPlayerSession::addProbe(QGstreamerVideoProbeControl* probe)
1909{
1910 Q_ASSERT(!m_videoProbe);
1911 m_videoProbe = probe;
1912 addVideoBufferProbe();
1913}
1914
1915void QGstreamerPlayerSession::removeProbe(QGstreamerVideoProbeControl* probe)
1916{
1917 Q_ASSERT(m_videoProbe == probe);
1918 removeVideoBufferProbe();
1919 m_videoProbe = 0;
1920}
1921
1922void QGstreamerPlayerSession::addProbe(QGstreamerAudioProbeControl* probe)
1923{
1924 Q_ASSERT(!m_audioProbe);
1925 m_audioProbe = probe;
1926 addAudioBufferProbe();
1927}
1928
1929void QGstreamerPlayerSession::removeProbe(QGstreamerAudioProbeControl* probe)
1930{
1931 Q_ASSERT(m_audioProbe == probe);
1932 removeAudioBufferProbe();
1933 m_audioProbe = 0;
1934}
1935
1936// This function is similar to stop(),
1937// but does not set m_everPlayed, m_lastPosition,
1938// and setSeekable() values.
1939void QGstreamerPlayerSession::endOfMediaReset()
1940{
1941 if (m_renderer)
1942 m_renderer->stopRenderer();
1943
1944 flushVideoProbes();
1945 gst_element_set_state(element: m_pipeline, state: GST_STATE_PAUSED);
1946
1947 QMediaPlayer::State oldState = m_state;
1948 m_pendingState = m_state = QMediaPlayer::StoppedState;
1949
1950 finishVideoOutputChange();
1951
1952 if (oldState != m_state)
1953 emit stateChanged(state: m_state);
1954}
1955
1956void QGstreamerPlayerSession::removeVideoBufferProbe()
1957{
1958 if (!m_videoProbe)
1959 return;
1960
1961 GstPad *pad = gst_element_get_static_pad(element: m_videoSink, name: "sink");
1962 if (pad) {
1963 m_videoProbe->removeProbeFromPad(pad);
1964 gst_object_unref(GST_OBJECT(pad));
1965 }
1966}
1967
1968void QGstreamerPlayerSession::addVideoBufferProbe()
1969{
1970 if (!m_videoProbe)
1971 return;
1972
1973 GstPad *pad = gst_element_get_static_pad(element: m_videoSink, name: "sink");
1974 if (pad) {
1975 m_videoProbe->addProbeToPad(pad);
1976 gst_object_unref(GST_OBJECT(pad));
1977 }
1978}
1979
1980void QGstreamerPlayerSession::removeAudioBufferProbe()
1981{
1982 if (!m_audioProbe)
1983 return;
1984
1985 GstPad *pad = gst_element_get_static_pad(element: m_audioSink, name: "sink");
1986 if (pad) {
1987 m_audioProbe->removeProbeFromPad(pad);
1988 gst_object_unref(GST_OBJECT(pad));
1989 }
1990}
1991
1992void QGstreamerPlayerSession::addAudioBufferProbe()
1993{
1994 if (!m_audioProbe)
1995 return;
1996
1997 GstPad *pad = gst_element_get_static_pad(element: m_audioSink, name: "sink");
1998 if (pad) {
1999 m_audioProbe->addProbeToPad(pad);
2000 gst_object_unref(GST_OBJECT(pad));
2001 }
2002}
2003
2004void QGstreamerPlayerSession::flushVideoProbes()
2005{
2006 if (m_videoProbe)
2007 m_videoProbe->startFlushing();
2008}
2009
2010void QGstreamerPlayerSession::resumeVideoProbes()
2011{
2012 if (m_videoProbe)
2013 m_videoProbe->stopFlushing();
2014}
2015
2016QT_END_NAMESPACE
2017

source code of qtmultimedia/src/gsttools/qgstreamerplayersession.cpp