1 | // Copyright (C) 2016 The Qt Company Ltd. |
---|---|
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include <common/qgstreamermediaplayer_p.h> |
5 | |
6 | #include <audio/qgstreameraudiodevice_p.h> |
7 | #include <common/qglist_helper_p.h> |
8 | #include <common/qgst_debug_p.h> |
9 | #include <common/qgst_discoverer_p.h> |
10 | #include <common/qgst_play_p.h> |
11 | #include <common/qgstpipeline_p.h> |
12 | #include <common/qgstreameraudiooutput_p.h> |
13 | #include <common/qgstreamermessage_p.h> |
14 | #include <common/qgstreamermetadata_p.h> |
15 | #include <common/qgstreamervideooutput_p.h> |
16 | #include <common/qgstreamervideosink_p.h> |
17 | #include <uri_handler/qgstreamer_qiodevice_handler_p.h> |
18 | #include <qgstreamerformatinfo_p.h> |
19 | |
20 | #include <QtMultimedia/qaudiodevice.h> |
21 | #include <QtCore/qdebug.h> |
22 | #include <QtCore/qiodevice.h> |
23 | #include <QtCore/qloggingcategory.h> |
24 | #include <QtCore/qthread.h> |
25 | #include <QtCore/qurl.h> |
26 | #include <QtCore/private/quniquehandle_p.h> |
27 | |
28 | // NOLINTBEGIN(readability-convert-member-functions-to-static) |
29 | |
30 | static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player") |
31 | |
32 | QT_BEGIN_NAMESPACE |
33 | |
34 | namespace { |
35 | |
36 | std::optional<QGstreamerMediaPlayer::TrackType> toTrackType(const QGstCaps &caps) |
37 | { |
38 | using TrackType = QGstreamerMediaPlayer::TrackType; |
39 | |
40 | QByteArrayView type = caps.at(index: 0).name(); |
41 | |
42 | if (type.startsWith(other: "video/x-raw")) |
43 | return TrackType::VideoStream; |
44 | if (type.startsWith(other: "audio/x-raw")) |
45 | return TrackType::AudioStream; |
46 | if (type.startsWith(other: "text")) |
47 | return TrackType::SubtitleStream; |
48 | |
49 | return std::nullopt; |
50 | } |
51 | |
52 | } // namespace |
53 | |
54 | bool QGstreamerMediaPlayer::discover(const QUrl &url) |
55 | { |
56 | QGst::QGstDiscoverer discoverer; |
57 | |
58 | using namespace std::chrono; |
59 | using namespace std::chrono_literals; |
60 | |
61 | auto discoveryResult = discoverer.discover(url); |
62 | if (discoveryResult) { |
63 | // Make sure GstPlay is ready if play() is called from slots during discovery |
64 | gst_play_set_uri(play: m_gstPlay.get(), uri: url.toEncoded().constData()); |
65 | |
66 | m_trackMetaData.fill(u: {}); |
67 | seekableChanged(seekable: discoveryResult->isSeekable); |
68 | if (discoveryResult->duration) |
69 | m_duration = round<milliseconds>(d: *discoveryResult->duration); |
70 | else |
71 | m_duration = 0ms; |
72 | durationChanged(ms: m_duration); |
73 | |
74 | m_metaData = QGst::toContainerMetadata(*discoveryResult); |
75 | |
76 | videoAvailableChanged(videoAvailable: !discoveryResult->videoStreams.empty()); |
77 | audioAvailableChanged(audioAvailable: !discoveryResult->audioStreams.empty()); |
78 | |
79 | m_nativeSize.clear(); |
80 | for (const auto &videoInfo : discoveryResult->videoStreams) { |
81 | m_trackMetaData[0].emplace_back(args: QGst::toStreamMetadata(videoInfo)); |
82 | QGstStructureView structure = videoInfo.caps.at(index: 0); |
83 | m_nativeSize.emplace_back(args: structure.nativeSize()); |
84 | } |
85 | for (const auto &audioInfo : discoveryResult->audioStreams) |
86 | m_trackMetaData[1].emplace_back(args: QGst::toStreamMetadata(audioInfo)); |
87 | for (const auto &subtitleInfo : discoveryResult->subtitleStreams) |
88 | m_trackMetaData[2].emplace_back(args: QGst::toStreamMetadata(subtitleInfo)); |
89 | |
90 | using Key = QMediaMetaData::Key; |
91 | auto copyKeysToRootMetadata = [&](const QMediaMetaData &reference, QSpan<const Key> keys) { |
92 | for (QMediaMetaData::Key key : keys) { |
93 | QVariant referenceValue = reference.value(k: key); |
94 | if (referenceValue.isValid()) |
95 | m_metaData.insert(k: key, value: referenceValue); |
96 | } |
97 | }; |
98 | |
99 | // FIXME: we duplicate some metadata for the first audio / video track |
100 | // in future we will want to use e.g. the currently selected track |
101 | if (!m_trackMetaData[0].empty()) |
102 | copyKeysToRootMetadata(m_trackMetaData[0].front(), |
103 | { |
104 | Key::HasHdrContent, |
105 | Key::Orientation, |
106 | Key::Resolution, |
107 | Key::VideoBitRate, |
108 | Key::VideoCodec, |
109 | Key::VideoFrameRate, |
110 | }); |
111 | |
112 | if (!m_trackMetaData[1].empty()) |
113 | copyKeysToRootMetadata(m_trackMetaData[1].front(), |
114 | { |
115 | Key::AudioBitRate, |
116 | Key::AudioCodec, |
117 | }); |
118 | |
119 | if (!m_url.isEmpty()) |
120 | m_metaData.insert(k: QMediaMetaData::Key::Url, value: m_url); |
121 | |
122 | qCDebug(qLcMediaPlayer) << "metadata:"<< m_metaData; |
123 | qCDebug(qLcMediaPlayer) << "video metadata:"<< m_trackMetaData[0]; |
124 | qCDebug(qLcMediaPlayer) << "audio metadata:"<< m_trackMetaData[1]; |
125 | qCDebug(qLcMediaPlayer) << "subtitle metadata:"<< m_trackMetaData[2]; |
126 | |
127 | metaDataChanged(); |
128 | tracksChanged(); |
129 | m_activeTrack = { |
130 | isVideoAvailable() ? 0 : -1, |
131 | isAudioAvailable() ? 0 : -1, |
132 | -1, |
133 | }; |
134 | updateVideoTrackEnabled(); |
135 | updateAudioTrackEnabled(); |
136 | updateNativeSizeOnVideoOutput(); |
137 | } |
138 | |
139 | return bool(discoveryResult); |
140 | } |
141 | |
142 | void QGstreamerMediaPlayer::decoderPadAddedCustomSource(const QGstElement &src, const QGstPad &pad) |
143 | { |
144 | // GStreamer or application thread |
145 | if (src != decoder) |
146 | return; |
147 | |
148 | qCDebug(qLcMediaPlayer) << "Added pad"<< pad.name() << "from"<< src.name(); |
149 | |
150 | QGstCaps caps = pad.queryCaps(); |
151 | |
152 | std::optional<QGstreamerMediaPlayer::TrackType> type = toTrackType(caps); |
153 | if (!type) |
154 | return; |
155 | |
156 | customPipelinePads[*type] = pad; |
157 | |
158 | switch (*type) { |
159 | case VideoStream: { |
160 | QGstElement sink = gstVideoOutput->gstreamerVideoSink() |
161 | ? gstVideoOutput->gstreamerVideoSink()->gstSink() |
162 | : QGstElement::createFromPipelineDescription("fakesink"); |
163 | |
164 | customPipeline.add(ts: sink); |
165 | pad.link(sink: sink.sink()); |
166 | customPipelineSinks[VideoStream] = sink; |
167 | sink.syncStateWithParent(); |
168 | return; |
169 | } |
170 | case AudioStream: { |
171 | QGstElement sink = gstAudioOutput ? gstAudioOutput->gstElement() |
172 | : QGstElement::createFromPipelineDescription("fakesink"); |
173 | customPipeline.add(ts: sink); |
174 | pad.link(sink: sink.sink()); |
175 | customPipelineSinks[AudioStream] = sink; |
176 | sink.syncStateWithParent(); |
177 | return; |
178 | } |
179 | case SubtitleStream: { |
180 | QGstElement sink = gstVideoOutput->gstreamerVideoSink() |
181 | ? gstVideoOutput->gstreamerVideoSink()->gstSink() |
182 | : QGstElement::createFromPipelineDescription("fakesink"); |
183 | customPipeline.add(ts: sink); |
184 | pad.link(sink: sink.sink()); |
185 | customPipelineSinks[SubtitleStream] = sink; |
186 | sink.syncStateWithParent(); |
187 | return; |
188 | } |
189 | |
190 | default: |
191 | Q_UNREACHABLE(); |
192 | } |
193 | } |
194 | |
195 | void QGstreamerMediaPlayer::decoderPadRemovedCustomSource(const QGstElement &src, |
196 | const QGstPad &pad) |
197 | { |
198 | if (src != decoder) |
199 | return; |
200 | |
201 | // application thread! |
202 | Q_ASSERT(thread()->isCurrentThread()); |
203 | |
204 | qCDebug(qLcMediaPlayer) << "Removed pad"<< pad.name() << "from"<< src.name() << "for stream" |
205 | << pad.streamId(); |
206 | |
207 | auto found = std::find(first: customPipelinePads.begin(), last: customPipelinePads.end(), val: pad); |
208 | if (found == customPipelinePads.end()) |
209 | return; |
210 | |
211 | TrackType type = TrackType(std::distance(first: customPipelinePads.begin(), last: found)); |
212 | |
213 | switch (type) { |
214 | case VideoStream: |
215 | case AudioStream: |
216 | case SubtitleStream: { |
217 | if (customPipelineSinks[VideoStream]) { |
218 | customPipeline.stopAndRemoveElements(ts&: customPipelineSinks[VideoStream]); |
219 | customPipelineSinks[VideoStream] = {}; |
220 | } |
221 | return; |
222 | |
223 | default: |
224 | Q_UNREACHABLE(); |
225 | } |
226 | } |
227 | } |
228 | |
229 | void QGstreamerMediaPlayer::resetStateForEmptyOrInvalidMedia() |
230 | { |
231 | using namespace std::chrono_literals; |
232 | m_nativeSize.clear(); |
233 | |
234 | bool metadataNeedsSignal = !m_metaData.isEmpty(); |
235 | bool tracksNeedsSignal = |
236 | std::any_of(first: m_trackMetaData.begin(), last: m_trackMetaData.end(), pred: [](const auto &container) { |
237 | return !container.empty(); |
238 | }); |
239 | |
240 | m_metaData.clear(); |
241 | m_trackMetaData.fill(u: {}); |
242 | m_duration = 0ms; |
243 | seekableChanged(seekable: false); |
244 | |
245 | videoAvailableChanged(videoAvailable: false); |
246 | audioAvailableChanged(audioAvailable: false); |
247 | |
248 | m_activeTrack.fill(u: -1); |
249 | |
250 | if (metadataNeedsSignal) |
251 | metaDataChanged(); |
252 | if (tracksNeedsSignal) |
253 | tracksChanged(); |
254 | } |
255 | |
256 | void QGstreamerMediaPlayer::updateNativeSizeOnVideoOutput() |
257 | { |
258 | int activeVideoTrack = activeTrack(TrackType::VideoStream); |
259 | bool hasVideoTrack = activeVideoTrack != -1; |
260 | |
261 | QSize nativeSize = hasVideoTrack ? m_nativeSize[activeTrack(TrackType::VideoStream)] : QSize{}; |
262 | |
263 | QVariant orientation = hasVideoTrack |
264 | ? m_trackMetaData[TrackType::VideoStream][activeTrack(TrackType::VideoStream)].value( |
265 | k: QMediaMetaData::Key::Orientation) |
266 | : QVariant{}; |
267 | |
268 | if (orientation.isValid()) { |
269 | auto rotation = orientation.value<QtVideo::Rotation>(); |
270 | gstVideoOutput->setRotation(rotation); |
271 | } |
272 | gstVideoOutput->setNativeSize(nativeSize); |
273 | } |
274 | |
275 | void QGstreamerMediaPlayer::seekToCurrentPosition() |
276 | { |
277 | gst_play_seek(play: m_gstPlay.get(), position: gst_play_get_position(play: m_gstPlay.get())); |
278 | } |
279 | |
280 | void QGstreamerMediaPlayer::updateVideoTrackEnabled() |
281 | { |
282 | bool hasTrack = m_activeTrack[TrackType::VideoStream] != -1; |
283 | bool hasSink = gstVideoOutput->gstreamerVideoSink() != nullptr; |
284 | |
285 | gstVideoOutput->setActive(hasTrack); |
286 | gst_play_set_video_track_enabled(play: m_gstPlay.get(), enabled: hasTrack && hasSink); |
287 | } |
288 | |
289 | void QGstreamerMediaPlayer::updateAudioTrackEnabled() |
290 | { |
291 | bool hasTrack = m_activeTrack[TrackType::AudioStream] != -1; |
292 | bool hasAudioOut = gstAudioOutput; |
293 | |
294 | gst_play_set_audio_track_enabled(play: m_gstPlay.get(), enabled: hasTrack && hasAudioOut); |
295 | } |
296 | |
297 | void QGstreamerMediaPlayer::updateBufferProgress(float newProgress) |
298 | { |
299 | if (qFuzzyIsNull(f: newProgress - m_bufferProgress)) |
300 | return; |
301 | |
302 | m_bufferProgress = newProgress; |
303 | bufferProgressChanged(progress: m_bufferProgress); |
304 | } |
305 | |
306 | void QGstreamerMediaPlayer::disconnectDecoderHandlers() |
307 | { |
308 | auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{ &sourceSetup }; |
309 | for (QGObjectHandlerScopedConnection *handler : handlers) |
310 | handler->disconnect(); |
311 | } |
312 | |
313 | QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent) |
314 | { |
315 | auto videoOutput = QGstreamerVideoOutput::create(); |
316 | if (!videoOutput) |
317 | return videoOutput.error(); |
318 | |
319 | return new QGstreamerMediaPlayer(videoOutput.value(), parent); |
320 | } |
321 | |
322 | template <typename T> |
323 | void setSeekAccurate(T *config, gboolean accurate) |
324 | { |
325 | gst_play_config_set_seek_accurate(config, accurate); |
326 | } |
327 | |
328 | QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, |
329 | QMediaPlayer *parent) |
330 | : QObject(parent), |
331 | QPlatformMediaPlayer(parent), |
332 | gstVideoOutput(videoOutput), |
333 | m_gstPlay{ |
334 | gst_play_new(video_renderer: nullptr), |
335 | QGstPlayHandle::HasRef, |
336 | }, |
337 | m_playbin{ |
338 | GST_PIPELINE_CAST(gst_play_get_pipeline(m_gstPlay.get())), |
339 | QGstPipeline::HasRef, |
340 | }, |
341 | m_gstPlayBus{ |
342 | QGstBusHandle{ gst_play_get_message_bus(play: m_gstPlay.get()), QGstBusHandle::HasRef }, |
343 | } |
344 | { |
345 | #if 1 |
346 | // LATER: remove this hack after meta-freescale decides not to pull in outdated APIs |
347 | |
348 | // QTBUG-131300: nxp deliberately reverted to an old gst-play API before the gst-play API |
349 | // stabilized. compare: |
350 | // https://github.com/nxp-imx/gst-plugins-bad/commit/ff04fa9ca1b79c98e836d8cdb26ac3502dafba41 |
351 | constexpr bool useNxpWorkaround = std::is_same_v<decltype(&gst_play_config_set_seek_accurate), |
352 | void (*)(GstPlay *, gboolean)>; |
353 | |
354 | QUniqueGstStructureHandle config{ |
355 | gst_play_get_config(play: m_gstPlay.get()), |
356 | }; |
357 | |
358 | if constexpr (useNxpWorkaround) |
359 | setSeekAccurate(config: m_gstPlay.get(), accurate: true); |
360 | else |
361 | setSeekAccurate(config: config.get(), accurate: true); |
362 | |
363 | gst_play_set_config(play: m_gstPlay.get(), config: config.release()); |
364 | #else |
365 | QUniqueGstStructureHandle config{ |
366 | gst_play_get_config(m_gstPlay.get()), |
367 | }; |
368 | gst_play_config_set_seek_accurate(config.get(), true); |
369 | gst_play_set_config(m_gstPlay.get(), config.release()); |
370 | #endif |
371 | |
372 | gstVideoOutput->setParent(this); |
373 | |
374 | m_playbin.set(property: "video-sink", o: gstVideoOutput->gstElement()); |
375 | m_playbin.set(property: "text-sink", o: gstVideoOutput->gstSubtitleElement()); |
376 | m_playbin.set(property: "audio-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
377 | |
378 | m_gstPlayBus.installMessageFilter(this); |
379 | |
380 | // we start without subtitles |
381 | gst_play_set_subtitle_track_enabled(play: m_gstPlay.get(), enabled: false); |
382 | |
383 | sourceSetup = m_playbin.connect(name: "source-setup", callback: GCallback(sourceSetupCallback), userData: this); |
384 | |
385 | m_activeTrack.fill(u: -1); |
386 | |
387 | // TODO: how to detect stalled media? |
388 | } |
389 | |
390 | QGstreamerMediaPlayer::~QGstreamerMediaPlayer() |
391 | { |
392 | if (customPipeline) |
393 | cleanupCustomPipeline(); |
394 | |
395 | m_gstPlayBus.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this)); |
396 | gst_bus_set_flushing(bus: m_gstPlayBus.get(), TRUE); |
397 | gst_play_stop(play: m_gstPlay.get()); |
398 | |
399 | // NOTE: gst_play_stop is not sufficient, un-reffing m_gstPlay can deadlock |
400 | m_playbin.setStateSync(state: GST_STATE_NULL); |
401 | |
402 | m_playbin.set(property: "video-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
403 | m_playbin.set(property: "text-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
404 | m_playbin.set(property: "audio-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
405 | } |
406 | |
407 | void QGstreamerMediaPlayer::updatePositionFromPipeline() |
408 | { |
409 | using namespace std::chrono; |
410 | |
411 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ |
412 | gst_play_get_position(play: m_gstPlay.get()), |
413 | })); |
414 | } |
415 | |
416 | bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message) |
417 | { |
418 | if (isCustomSource()) { |
419 | constexpr bool traceBusMessages = true; |
420 | if (traceBusMessages) |
421 | qCDebug(qLcMediaPlayer) << "received bus message:"<< message; |
422 | |
423 | switch (message.type()) { |
424 | case GST_MESSAGE_WARNING: |
425 | qWarning() << "received bus message:"<< message; |
426 | break; |
427 | |
428 | case GST_MESSAGE_INFO: |
429 | qInfo() << "received bus message:"<< message; |
430 | break; |
431 | |
432 | case GST_MESSAGE_ERROR: |
433 | qWarning() << "received bus message:"<< message; |
434 | customPipeline.dumpPipelineGraph(filename: "GST_MESSAGE_ERROR"); |
435 | break; |
436 | |
437 | case GST_MESSAGE_LATENCY: |
438 | customPipeline.recalculateLatency(); |
439 | break; |
440 | |
441 | default: |
442 | break; |
443 | } |
444 | return false; |
445 | } |
446 | |
447 | switch (message.type()) { |
448 | case GST_MESSAGE_APPLICATION: |
449 | if (gst_play_is_play_message(msg: message.message())) |
450 | return processBusMessageApplication(message); |
451 | return false; |
452 | |
453 | default: |
454 | qCDebug(qLcMediaPlayer) << message; |
455 | |
456 | return false; |
457 | } |
458 | |
459 | return false; |
460 | } |
461 | |
462 | bool QGstreamerMediaPlayer::processBusMessageApplication(const QGstreamerMessage &message) |
463 | { |
464 | using namespace std::chrono; |
465 | GstPlayMessage type; |
466 | gst_play_message_parse_type(msg: message.message(), type: &type); |
467 | qCDebug(qLcMediaPlayer) << QGstPlayMessageAdaptor{ message }; |
468 | |
469 | switch (type) { |
470 | case GST_PLAY_MESSAGE_URI_LOADED: { |
471 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
472 | return false; |
473 | } |
474 | |
475 | case GST_PLAY_MESSAGE_POSITION_UPDATED: { |
476 | if (state() == QMediaPlayer::PlaybackState::PlayingState) { |
477 | |
478 | constexpr bool usePayload = false; |
479 | if constexpr (usePayload) { |
480 | GstClockTime position; |
481 | gst_play_message_parse_position_updated(msg: message.message(), position: &position); |
482 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ position })); |
483 | } else { |
484 | GstClockTime position = gst_play_get_position(play: m_gstPlay.get()); |
485 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ position })); |
486 | } |
487 | } |
488 | return false; |
489 | } |
490 | case GST_PLAY_MESSAGE_DURATION_CHANGED: { |
491 | GstClockTime duration; |
492 | gst_play_message_parse_duration_updated(msg: message.message(), duration: &duration); |
493 | milliseconds durationInMs = round<milliseconds>(d: nanoseconds{ duration }); |
494 | durationChanged(ms: durationInMs); |
495 | |
496 | m_metaData.insert(k: QMediaMetaData::Duration, value: int(durationInMs.count())); |
497 | metaDataChanged(); |
498 | |
499 | return false; |
500 | } |
501 | case GST_PLAY_MESSAGE_BUFFERING: { |
502 | guint percent; |
503 | gst_play_message_parse_buffering_percent(msg: message.message(), percent: &percent); |
504 | updateBufferProgress(newProgress: percent * 0.01f); |
505 | return false; |
506 | } |
507 | case GST_PLAY_MESSAGE_STATE_CHANGED: { |
508 | GstPlayState state; |
509 | gst_play_message_parse_state_changed(msg: message.message(), state: &state); |
510 | |
511 | switch (state) { |
512 | case GstPlayState::GST_PLAY_STATE_STOPPED: |
513 | if (stateChangeToSkip) { |
514 | qCDebug(qLcMediaPlayer) << " skipping StoppedState transition"; |
515 | |
516 | stateChangeToSkip -= 1; |
517 | return false; |
518 | } |
519 | stateChanged(newState: QMediaPlayer::StoppedState); |
520 | updateBufferProgress(newProgress: 0); |
521 | return false; |
522 | |
523 | case GstPlayState::GST_PLAY_STATE_PAUSED: |
524 | stateChanged(newState: QMediaPlayer::PausedState); |
525 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
526 | gstVideoOutput->setActive(true); |
527 | updateBufferProgress(newProgress: 1); |
528 | return false; |
529 | case GstPlayState::GST_PLAY_STATE_BUFFERING: |
530 | mediaStatusChanged(status: QMediaPlayer::BufferingMedia); |
531 | return false; |
532 | case GstPlayState::GST_PLAY_STATE_PLAYING: |
533 | stateChanged(newState: QMediaPlayer::PlayingState); |
534 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
535 | gstVideoOutput->setActive(true); |
536 | updateBufferProgress(newProgress: 1); |
537 | |
538 | return false; |
539 | default: |
540 | return false; |
541 | } |
542 | } |
543 | case GST_PLAY_MESSAGE_MEDIA_INFO_UPDATED: { |
544 | using namespace QGstPlaySupport; |
545 | |
546 | QUniqueGstPlayMediaInfoHandle info{}; |
547 | gst_play_message_parse_media_info_updated(msg: message.message(), info: &info); |
548 | |
549 | seekableChanged(seekable: gst_play_media_info_is_seekable(info: info.get())); |
550 | |
551 | const gchar *title = gst_play_media_info_get_title(info: info.get()); |
552 | m_metaData.insert(k: QMediaMetaData::Title, value: QString::fromUtf8(utf8: title)); |
553 | |
554 | metaDataChanged(); |
555 | tracksChanged(); |
556 | |
557 | return false; |
558 | } |
559 | case GST_PLAY_MESSAGE_END_OF_STREAM: { |
560 | if (doLoop()) { |
561 | positionChanged(ms: m_duration); |
562 | qCDebug(qLcMediaPlayer) << "EOS: restarting loop"; |
563 | gst_play_play(play: m_gstPlay.get()); |
564 | positionChanged(ms: 0ms); |
565 | |
566 | // we will still get a GST_PLAY_MESSAGE_STATE_CHANGED message, which we will just ignore |
567 | // for now |
568 | stateChangeToSkip += 1; |
569 | } else { |
570 | qCDebug(qLcMediaPlayer) << "EOS: done"; |
571 | positionChanged(ms: m_duration); |
572 | mediaStatusChanged(status: QMediaPlayer::EndOfMedia); |
573 | stateChanged(newState: QMediaPlayer::StoppedState); |
574 | gstVideoOutput->setActive(false); |
575 | } |
576 | |
577 | return false; |
578 | } |
579 | case GST_PLAY_MESSAGE_ERROR: |
580 | case GST_PLAY_MESSAGE_WARNING: |
581 | case GST_PLAY_MESSAGE_VIDEO_DIMENSIONS_CHANGED: |
582 | case GST_PLAY_MESSAGE_VOLUME_CHANGED: |
583 | case GST_PLAY_MESSAGE_MUTE_CHANGED: |
584 | case GST_PLAY_MESSAGE_SEEK_DONE: |
585 | return false; |
586 | |
587 | default: |
588 | Q_UNREACHABLE_RETURN(false); |
589 | } |
590 | } |
591 | |
592 | qint64 QGstreamerMediaPlayer::duration() const |
593 | { |
594 | return m_duration.count(); |
595 | } |
596 | |
597 | bool QGstreamerMediaPlayer::hasMedia() const |
598 | { |
599 | return !m_url.isEmpty() || m_stream; |
600 | } |
601 | |
602 | bool QGstreamerMediaPlayer::hasValidMedia() const |
603 | { |
604 | if (!hasMedia()) |
605 | return false; |
606 | |
607 | switch (mediaStatus()) { |
608 | case QMediaPlayer::MediaStatus::NoMedia: |
609 | case QMediaPlayer::MediaStatus::InvalidMedia: |
610 | return false; |
611 | |
612 | default: |
613 | return true; |
614 | } |
615 | } |
616 | |
617 | float QGstreamerMediaPlayer::bufferProgress() const |
618 | { |
619 | return m_bufferProgress; |
620 | } |
621 | |
622 | QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const |
623 | { |
624 | return QMediaTimeRange(); |
625 | } |
626 | |
627 | qreal QGstreamerMediaPlayer::playbackRate() const |
628 | { |
629 | return gst_play_get_rate(play: m_gstPlay.get()); |
630 | } |
631 | |
632 | void QGstreamerMediaPlayer::setPlaybackRate(qreal rate) |
633 | { |
634 | if (isCustomSource()) { |
635 | static std::once_flag flag; |
636 | std::call_once(once&: flag, f: [] { |
637 | // CAVEAT: unsynchronised with pipeline state. Potentially prone to race conditions |
638 | qWarning() |
639 | << "setPlaybackRate with custom gstreamer pipelines can cause pipeline hangs. " |
640 | "Use with care"; |
641 | }); |
642 | |
643 | customPipeline.setPlaybackRate(rate); |
644 | return; |
645 | } |
646 | |
647 | if (rate == playbackRate()) |
648 | return; |
649 | |
650 | qCDebug(qLcMediaPlayer) << "gst_play_set_rate"<< rate; |
651 | gst_play_set_rate(play: m_gstPlay.get(), rate); |
652 | playbackRateChanged(rate); |
653 | } |
654 | |
655 | void QGstreamerMediaPlayer::setPosition(qint64 pos) |
656 | { |
657 | std::chrono::milliseconds posInMs{ pos }; |
658 | |
659 | setPosition(posInMs); |
660 | } |
661 | |
662 | void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos) |
663 | { |
664 | using namespace std::chrono; |
665 | |
666 | if (isCustomSource()) { |
667 | static std::once_flag flag; |
668 | std::call_once(once&: flag, f: [] { |
669 | // CAVEAT: unsynchronised with pipeline state. Potentially prone to race conditions |
670 | qWarning() << "setPosition with custom gstreamer pipelines can cause pipeline hangs. " |
671 | "Use with care"; |
672 | }); |
673 | |
674 | customPipeline.setPosition(pos); |
675 | return; |
676 | } else { |
677 | qCDebug(qLcMediaPlayer) << "gst_play_seek"<< pos; |
678 | gst_play_seek(play: m_gstPlay.get(), position: nanoseconds(pos).count()); |
679 | |
680 | if (mediaStatus() == QMediaPlayer::EndOfMedia) |
681 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
682 | } |
683 | positionChanged(ms: pos); |
684 | } |
685 | |
686 | void QGstreamerMediaPlayer::play() |
687 | { |
688 | if (isCustomSource()) { |
689 | gstVideoOutput->setActive(true); |
690 | customPipeline.setState(GST_STATE_PLAYING); |
691 | stateChanged(newState: QMediaPlayer::PlayingState); |
692 | return; |
693 | } |
694 | |
695 | QMediaPlayer::PlaybackState currentState = state(); |
696 | if (currentState == QMediaPlayer::PlayingState || !hasValidMedia()) |
697 | return; |
698 | |
699 | if (currentState != QMediaPlayer::PausedState) |
700 | resetCurrentLoop(); |
701 | |
702 | if (mediaStatus() == QMediaPlayer::EndOfMedia) { |
703 | positionChanged(position: 0); |
704 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
705 | } |
706 | |
707 | if (m_pendingSeek) { |
708 | gst_play_seek(play: m_gstPlay.get(), position: m_pendingSeek->count()); |
709 | m_pendingSeek = std::nullopt; |
710 | } |
711 | |
712 | qCDebug(qLcMediaPlayer) << "gst_play_play"; |
713 | gstVideoOutput->setActive(true); |
714 | gst_play_play(play: m_gstPlay.get()); |
715 | stateChanged(newState: QMediaPlayer::PlayingState); |
716 | } |
717 | |
718 | void QGstreamerMediaPlayer::pause() |
719 | { |
720 | if (isCustomSource()) { |
721 | gstVideoOutput->setActive(true); |
722 | customPipeline.setState(GST_STATE_PAUSED); |
723 | stateChanged(newState: QMediaPlayer::PausedState); |
724 | return; |
725 | } |
726 | |
727 | if (state() == QMediaPlayer::PausedState || !hasMedia() |
728 | || m_resourceErrorState != ResourceErrorState::NoError) |
729 | return; |
730 | |
731 | gstVideoOutput->setActive(true); |
732 | |
733 | qCDebug(qLcMediaPlayer) << "gst_play_pause"; |
734 | gst_play_pause(play: m_gstPlay.get()); |
735 | |
736 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
737 | stateChanged(newState: QMediaPlayer::PausedState); |
738 | } |
739 | |
740 | void QGstreamerMediaPlayer::stop() |
741 | { |
742 | if (isCustomSource()) { |
743 | customPipeline.setState(GST_STATE_READY); |
744 | stateChanged(newState: QMediaPlayer::StoppedState); |
745 | gstVideoOutput->setActive(false); |
746 | return; |
747 | } |
748 | |
749 | using namespace std::chrono_literals; |
750 | if (state() == QMediaPlayer::StoppedState) { |
751 | if (position() != 0) { |
752 | m_pendingSeek = 0ms; |
753 | positionChanged(ms: 0ms); |
754 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
755 | } |
756 | return; |
757 | } |
758 | |
759 | qCDebug(qLcMediaPlayer) << "gst_play_stop"; |
760 | gstVideoOutput->setActive(false); |
761 | gst_play_stop(play: m_gstPlay.get()); |
762 | |
763 | stateChanged(newState: QMediaPlayer::StoppedState); |
764 | |
765 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
766 | positionChanged(ms: 0ms); |
767 | } |
768 | |
769 | const QGstPipeline &QGstreamerMediaPlayer::pipeline() const |
770 | { |
771 | if (isCustomSource()) |
772 | return customPipeline; |
773 | |
774 | return m_playbin; |
775 | } |
776 | |
777 | bool QGstreamerMediaPlayer::canPlayQrc() const |
778 | { |
779 | return true; |
780 | } |
781 | |
782 | QUrl QGstreamerMediaPlayer::media() const |
783 | { |
784 | return m_url; |
785 | } |
786 | |
787 | const QIODevice *QGstreamerMediaPlayer::mediaStream() const |
788 | { |
789 | return m_stream; |
790 | } |
791 | |
792 | void QGstreamerMediaPlayer::sourceSetupCallback([[maybe_unused]] GstElement *playbin, |
793 | GstElement *source, QGstreamerMediaPlayer *) |
794 | { |
795 | // gst_play thread |
796 | |
797 | const gchar *typeName = g_type_name_from_instance(instance: (GTypeInstance *)source); |
798 | qCDebug(qLcMediaPlayer) << "Setting up source:"<< typeName; |
799 | |
800 | if (typeName == std::string_view("GstRTSPSrc")) { |
801 | QGstElement s(source, QGstElement::NeedsRef); |
802 | int latency{40}; |
803 | bool ok{false}; |
804 | int v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_LATENCY", ok: &ok); |
805 | if (ok) |
806 | latency = v; |
807 | qCDebug(qLcMediaPlayer) << " -> setting source latency to:"<< latency << "ms"; |
808 | s.set(property: "latency", i: latency); |
809 | |
810 | bool drop{true}; |
811 | v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_DROP_ON_LATENCY", ok: &ok); |
812 | if (ok && v == 0) |
813 | drop = false; |
814 | qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:"<< drop; |
815 | s.set(property: "drop-on-latency", b: drop); |
816 | |
817 | bool retrans{false}; |
818 | v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_DO_RETRANSMISSION", ok: &ok); |
819 | if (ok && v != 0) |
820 | retrans = true; |
821 | qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:"<< retrans; |
822 | s.set(property: "do-retransmission", b: retrans); |
823 | } |
824 | } |
825 | |
826 | void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream) |
827 | { |
828 | using namespace Qt::Literals; |
829 | using namespace std::chrono; |
830 | using namespace std::chrono_literals; |
831 | |
832 | if (customPipeline) |
833 | cleanupCustomPipeline(); |
834 | |
835 | m_resourceErrorState = ResourceErrorState::NoError; |
836 | m_url = content; |
837 | m_stream = stream; |
838 | QUrl streamURL; |
839 | if (stream) |
840 | streamURL = qGstRegisterQIODevice(stream); |
841 | |
842 | if (content.isEmpty() && !stream) { |
843 | mediaStatusChanged(status: QMediaPlayer::NoMedia); |
844 | resetStateForEmptyOrInvalidMedia(); |
845 | return; |
846 | } |
847 | |
848 | if (isCustomSource()) { |
849 | setMediaCustomSource(content); |
850 | } else { |
851 | mediaStatusChanged(status: QMediaPlayer::LoadingMedia); |
852 | const QUrl &playUrl = stream ? streamURL : content; |
853 | |
854 | // LATER: discover is synchronous, but we would be way more friendly to make it |
855 | // asynchronous. |
856 | bool mediaDiscovered = discover(url: playUrl); |
857 | if (!mediaDiscovered) { |
858 | m_resourceErrorState = ResourceErrorState::ErrorOccurred; |
859 | error(error: QMediaPlayer::Error::ResourceError, errorString: u"Resource cannot be discovered"_s); |
860 | mediaStatusChanged(status: QMediaPlayer::InvalidMedia); |
861 | resetStateForEmptyOrInvalidMedia(); |
862 | return; |
863 | } |
864 | |
865 | positionChanged(ms: 0ms); |
866 | } |
867 | } |
868 | |
869 | void QGstreamerMediaPlayer::setMediaCustomSource(const QUrl &content) |
870 | { |
871 | using namespace Qt::Literals; |
872 | using namespace std::chrono; |
873 | using namespace std::chrono_literals; |
874 | |
875 | { |
876 | // FIXME: claim sinks |
877 | // TODO: move ownership of sinks to gst_play after using them |
878 | m_playbin.set(property: "video-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
879 | m_playbin.set(property: "text-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
880 | m_playbin.set(property: "audio-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
881 | |
882 | if (gstVideoOutput->gstreamerVideoSink()) { |
883 | if (QGstElement sink = gstVideoOutput->gstreamerVideoSink()->gstSink()) |
884 | sink.removeFromParent(); |
885 | } |
886 | } |
887 | |
888 | customPipeline = QGstPipeline::create(name: "customPipeline"); |
889 | customPipeline.installMessageFilter(filter: this); |
890 | positionUpdateTimer = std::make_unique<QTimer>(); |
891 | |
892 | QObject::connect(sender: positionUpdateTimer.get(), signal: &QTimer::timeout, context: this, slot: [this] { |
893 | Q_ASSERT(customPipeline); |
894 | auto position = customPipeline.position(); |
895 | |
896 | positionChanged(ms: round<milliseconds>(d: position)); |
897 | }); |
898 | |
899 | positionUpdateTimer->start(value: 100ms); |
900 | |
901 | QByteArray gstLaunchString = |
902 | content.toString(options: QUrl::RemoveScheme | QUrl::PrettyDecoded).toLatin1(); |
903 | qCDebug(qLcMediaPlayer) << "generating"<< gstLaunchString; |
904 | QGstElement element = QGstElement::createFromPipelineDescription(gstLaunchString); |
905 | if (!element) { |
906 | emit error(error: QMediaPlayer::ResourceError, errorString: u"Could not create custom pipeline"_s); |
907 | return; |
908 | } |
909 | |
910 | decoder = element; |
911 | customPipeline.add(ts: decoder); |
912 | |
913 | QGstBin elementBin{ |
914 | qGstSafeCast<GstBin>(arg: element.element()), |
915 | QGstBin::NeedsRef, |
916 | }; |
917 | if (elementBin) // bins are expected to provide unconnected src pads |
918 | elementBin.addUnlinkedGhostPads(GstPadDirection::GST_PAD_SRC); |
919 | |
920 | // for all other elements |
921 | padAdded = decoder.onPadAdded<&QGstreamerMediaPlayer::decoderPadAddedCustomSource>(instance: this); |
922 | padRemoved = decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemovedCustomSource>(instance: this); |
923 | |
924 | customPipeline.setStateSync(state: GstState::GST_STATE_PAUSED); |
925 | |
926 | auto srcPadVisitor = [](GstElement *element, GstPad *pad, void *self) -> gboolean { |
927 | reinterpret_cast<QGstreamerMediaPlayer *>(self)->decoderPadAddedCustomSource( |
928 | src: QGstElement{ element, QGstElement::NeedsRef }, pad: QGstPad{ pad, QGstPad::NeedsRef }); |
929 | return true; |
930 | }; |
931 | |
932 | gst_element_foreach_pad(element: element.element(), func: srcPadVisitor, user_data: this); |
933 | |
934 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
935 | |
936 | customPipeline.dumpGraph(fileNamePrefix: "setMediaCustomPipeline"); |
937 | } |
938 | |
939 | void QGstreamerMediaPlayer::cleanupCustomPipeline() |
940 | { |
941 | customPipeline.setStateSync(state: GST_STATE_NULL); |
942 | customPipeline.removeMessageFilter(filter: this); |
943 | |
944 | for (QGstElement &sink : customPipelineSinks) |
945 | if (sink) |
946 | customPipeline.remove(ts: sink); |
947 | |
948 | positionUpdateTimer = {}; |
949 | customPipeline = {}; |
950 | } |
951 | |
952 | void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output) |
953 | { |
954 | if (isCustomSource()) { |
955 | qWarning() << "QMediaPlayer::setAudioOutput not supported when using custom sources"; |
956 | return; |
957 | } |
958 | |
959 | if (gstAudioOutput == output) |
960 | return; |
961 | |
962 | auto *gstOutput = static_cast<QGstreamerAudioOutput *>(output); |
963 | if (gstOutput) |
964 | gstOutput->setAsync(true); |
965 | |
966 | gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); |
967 | if (gstAudioOutput) |
968 | m_playbin.set(property: "audio-sink", o: gstAudioOutput->gstElement()); |
969 | else |
970 | m_playbin.set(property: "audio-sink", o: QGstElement::createFromPipelineDescription( "fakesink")); |
971 | updateAudioTrackEnabled(); |
972 | |
973 | // FIXME: we need to have a gst_play API to change the sinks on the fly. |
974 | // finishStateChange a hack to avoid assertion failures in gstreamer |
975 | m_playbin.finishStateChange(); |
976 | } |
977 | |
978 | QMediaMetaData QGstreamerMediaPlayer::metaData() const |
979 | { |
980 | return m_metaData; |
981 | } |
982 | |
983 | void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink) |
984 | { |
985 | if (isCustomSource()) { |
986 | qWarning() << "QMediaPlayer::setVideoSink not supported when using custom sources"; |
987 | return; |
988 | } |
989 | |
990 | auto *gstSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr; |
991 | if (gstSink) |
992 | gstSink->setAsync(false); |
993 | |
994 | gstVideoOutput->setVideoSink(sink); |
995 | updateVideoTrackEnabled(); |
996 | |
997 | if (sink && state() == QMediaPlayer::PausedState) { |
998 | // FIXME: we want to get a the existing frame, but gst_play does not have such capabilities. |
999 | // seeking to the current position is a rather bad hack, but it's the best we can do for now |
1000 | seekToCurrentPosition(); |
1001 | } |
1002 | } |
1003 | |
1004 | int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type) |
1005 | { |
1006 | QSpan<const QMediaMetaData> tracks = m_trackMetaData[type]; |
1007 | return tracks.size(); |
1008 | } |
1009 | |
1010 | QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index) |
1011 | { |
1012 | QSpan<const QMediaMetaData> tracks = m_trackMetaData[type]; |
1013 | if (index < tracks.size()) |
1014 | return tracks[index]; |
1015 | return {}; |
1016 | } |
1017 | |
1018 | int QGstreamerMediaPlayer::activeTrack(TrackType type) |
1019 | { |
1020 | return m_activeTrack[type]; |
1021 | } |
1022 | |
1023 | void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index) |
1024 | { |
1025 | if (m_activeTrack[type] == index) |
1026 | return; |
1027 | |
1028 | int formerTrack = m_activeTrack[type]; |
1029 | m_activeTrack[type] = index; |
1030 | |
1031 | switch (type) { |
1032 | case TrackType::VideoStream: { |
1033 | if (index != -1) |
1034 | gst_play_set_video_track(play: m_gstPlay.get(), stream_index: index); |
1035 | updateVideoTrackEnabled(); |
1036 | updateNativeSizeOnVideoOutput(); |
1037 | break; |
1038 | } |
1039 | case TrackType::AudioStream: { |
1040 | if (index != -1) |
1041 | gst_play_set_audio_track(play: m_gstPlay.get(), stream_index: index); |
1042 | updateAudioTrackEnabled(); |
1043 | break; |
1044 | } |
1045 | case TrackType::SubtitleStream: { |
1046 | if (index != -1) |
1047 | gst_play_set_subtitle_track(play: m_gstPlay.get(), stream_index: index); |
1048 | gst_play_set_subtitle_track_enabled(play: m_gstPlay.get(), enabled: index != -1); |
1049 | break; |
1050 | } |
1051 | default: |
1052 | Q_UNREACHABLE(); |
1053 | }; |
1054 | |
1055 | if (formerTrack != -1 && index != -1) |
1056 | // it can take several seconds for gstreamer to switch the track. so we seek to the current |
1057 | // position |
1058 | seekToCurrentPosition(); |
1059 | } |
1060 | |
1061 | QT_END_NAMESPACE |
1062 |
Definitions
- qLcMediaPlayer
- toTrackType
- discover
- decoderPadAddedCustomSource
- decoderPadRemovedCustomSource
- resetStateForEmptyOrInvalidMedia
- updateNativeSizeOnVideoOutput
- seekToCurrentPosition
- updateVideoTrackEnabled
- updateAudioTrackEnabled
- updateBufferProgress
- disconnectDecoderHandlers
- create
- setSeekAccurate
- QGstreamerMediaPlayer
- ~QGstreamerMediaPlayer
- updatePositionFromPipeline
- processBusMessage
- processBusMessageApplication
- duration
- hasMedia
- hasValidMedia
- bufferProgress
- availablePlaybackRanges
- playbackRate
- setPlaybackRate
- setPosition
- setPosition
- play
- pause
- stop
- pipeline
- canPlayQrc
- media
- mediaStream
- sourceSetupCallback
- setMedia
- setMediaCustomSource
- cleanupCustomPipeline
- setAudioOutput
- metaData
- setVideoSink
- trackCount
- trackMetaData
- activeTrack
Learn Advanced QML with KDAB
Find out more