1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include <common/qgstreamermediaplayer_p.h> |
5 | |
6 | #include <audio/qgstreameraudiodevice_p.h> |
7 | #include <common/qglist_helper_p.h> |
8 | #include <common/qgst_debug_p.h> |
9 | #include <common/qgst_discoverer_p.h> |
10 | #include <common/qgst_play_p.h> |
11 | #include <common/qgstpipeline_p.h> |
12 | #include <common/qgstreameraudiooutput_p.h> |
13 | #include <common/qgstreamermessage_p.h> |
14 | #include <common/qgstreamermetadata_p.h> |
15 | #include <common/qgstreamervideooutput_p.h> |
16 | #include <common/qgstreamervideosink_p.h> |
17 | #include <uri_handler/qgstreamer_qiodevice_handler_p.h> |
18 | #include <qgstreamerformatinfo_p.h> |
19 | |
20 | #include <QtMultimedia/qaudiodevice.h> |
21 | #include <QtCore/qdebug.h> |
22 | #include <QtCore/qiodevice.h> |
23 | #include <QtCore/qloggingcategory.h> |
24 | #include <QtCore/qthread.h> |
25 | #include <QtCore/qurl.h> |
26 | #include <QtCore/private/quniquehandle_p.h> |
27 | |
28 | // NOLINTBEGIN(readability-convert-member-functions-to-static) |
29 | |
30 | static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player" ) |
31 | |
32 | QT_BEGIN_NAMESPACE |
33 | |
34 | bool QGstreamerMediaPlayer::discover(const QUrl &url) |
35 | { |
36 | QGst::QGstDiscoverer discoverer; |
37 | |
38 | using namespace std::chrono; |
39 | using namespace std::chrono_literals; |
40 | |
41 | auto discoveryResult = discoverer.discover(url); |
42 | if (discoveryResult) { |
43 | m_trackMetaData.fill(u: {}); |
44 | seekableChanged(seekable: discoveryResult->isSeekable); |
45 | if (discoveryResult->duration) |
46 | m_duration = round<milliseconds>(d: *discoveryResult->duration); |
47 | else |
48 | m_duration = 0ms; |
49 | durationChanged(ms: m_duration); |
50 | |
51 | m_metaData = QGst::toContainerMetadata(*discoveryResult); |
52 | |
53 | videoAvailableChanged(videoAvailable: !discoveryResult->videoStreams.empty()); |
54 | audioAvailableChanged(audioAvailable: !discoveryResult->audioStreams.empty()); |
55 | |
56 | m_nativeSize.clear(); |
57 | for (const auto &videoInfo : discoveryResult->videoStreams) { |
58 | m_trackMetaData[0].emplace_back(args: QGst::toStreamMetadata(videoInfo)); |
59 | QGstStructureView structure = videoInfo.caps.at(index: 0); |
60 | m_nativeSize.emplace_back(args: structure.nativeSize()); |
61 | } |
62 | for (const auto &audioInfo : discoveryResult->audioStreams) |
63 | m_trackMetaData[1].emplace_back(args: QGst::toStreamMetadata(audioInfo)); |
64 | for (const auto &subtitleInfo : discoveryResult->subtitleStreams) |
65 | m_trackMetaData[2].emplace_back(args: QGst::toStreamMetadata(subtitleInfo)); |
66 | |
67 | using Key = QMediaMetaData::Key; |
68 | auto copyKeysToRootMetadata = [&](const QMediaMetaData &reference, QSpan<const Key> keys) { |
69 | for (QMediaMetaData::Key key : keys) { |
70 | QVariant referenceValue = reference.value(k: key); |
71 | if (referenceValue.isValid()) |
72 | m_metaData.insert(k: key, value: referenceValue); |
73 | } |
74 | }; |
75 | |
76 | // FIXME: we duplicate some metadata for the first audio / video track |
77 | // in future we will want to use e.g. the currently selected track |
78 | if (!m_trackMetaData[0].empty()) |
79 | copyKeysToRootMetadata(m_trackMetaData[0].front(), |
80 | { |
81 | Key::HasHdrContent, |
82 | Key::Orientation, |
83 | Key::Resolution, |
84 | Key::VideoBitRate, |
85 | Key::VideoCodec, |
86 | Key::VideoFrameRate, |
87 | }); |
88 | |
89 | if (!m_trackMetaData[1].empty()) |
90 | copyKeysToRootMetadata(m_trackMetaData[1].front(), |
91 | { |
92 | Key::AudioBitRate, |
93 | Key::AudioCodec, |
94 | }); |
95 | |
96 | if (!m_url.isEmpty()) |
97 | m_metaData.insert(k: QMediaMetaData::Key::Url, value: m_url); |
98 | |
99 | qCDebug(qLcMediaPlayer) << "metadata:" << m_metaData; |
100 | qCDebug(qLcMediaPlayer) << "video metadata:" << m_trackMetaData[0]; |
101 | qCDebug(qLcMediaPlayer) << "audio metadata:" << m_trackMetaData[1]; |
102 | qCDebug(qLcMediaPlayer) << "subtitle metadata:" << m_trackMetaData[2]; |
103 | |
104 | metaDataChanged(); |
105 | tracksChanged(); |
106 | m_activeTrack = { |
107 | isVideoAvailable() ? 0 : -1, |
108 | isAudioAvailable() ? 0 : -1, |
109 | -1, |
110 | }; |
111 | updateVideoTrackEnabled(); |
112 | updateAudioTrackEnabled(); |
113 | updateNativeSizeOnVideoOutput(); |
114 | } |
115 | |
116 | return bool(discoveryResult); |
117 | } |
118 | |
119 | void QGstreamerMediaPlayer::resetStateForEmptyOrInvalidMedia() |
120 | { |
121 | using namespace std::chrono_literals; |
122 | m_nativeSize.clear(); |
123 | |
124 | bool metadataNeedsSignal = !m_metaData.isEmpty(); |
125 | bool tracksNeedsSignal = |
126 | std::any_of(first: m_trackMetaData.begin(), last: m_trackMetaData.end(), pred: [](const auto &container) { |
127 | return !container.empty(); |
128 | }); |
129 | |
130 | m_metaData.clear(); |
131 | m_trackMetaData.fill(u: {}); |
132 | m_duration = 0ms; |
133 | seekableChanged(seekable: false); |
134 | |
135 | videoAvailableChanged(videoAvailable: false); |
136 | audioAvailableChanged(audioAvailable: false); |
137 | |
138 | m_activeTrack.fill(u: -1); |
139 | |
140 | if (metadataNeedsSignal) |
141 | metaDataChanged(); |
142 | if (tracksNeedsSignal) |
143 | tracksChanged(); |
144 | } |
145 | |
146 | void QGstreamerMediaPlayer::updateNativeSizeOnVideoOutput() |
147 | { |
148 | int activeVideoTrack = activeTrack(TrackType::VideoStream); |
149 | bool hasVideoTrack = activeVideoTrack != -1; |
150 | |
151 | QSize nativeSize = hasVideoTrack ? m_nativeSize[activeTrack(TrackType::VideoStream)] : QSize{}; |
152 | |
153 | QVariant orientation = hasVideoTrack |
154 | ? m_trackMetaData[TrackType::VideoStream][activeTrack(TrackType::VideoStream)].value( |
155 | k: QMediaMetaData::Key::Orientation) |
156 | : QVariant{}; |
157 | |
158 | if (orientation.isValid()) { |
159 | auto rotation = orientation.value<QtVideo::Rotation>(); |
160 | gstVideoOutput->setRotation(rotation); |
161 | } |
162 | gstVideoOutput->setNativeSize(nativeSize); |
163 | } |
164 | |
165 | void QGstreamerMediaPlayer::seekToCurrentPosition() |
166 | { |
167 | gst_play_seek(play: m_gstPlay.get(), position: gst_play_get_position(play: m_gstPlay.get())); |
168 | } |
169 | |
170 | void QGstreamerMediaPlayer::updateVideoTrackEnabled() |
171 | { |
172 | bool hasTrack = m_activeTrack[TrackType::VideoStream] != -1; |
173 | bool hasSink = gstVideoOutput->gstreamerVideoSink() != nullptr; |
174 | |
175 | gstVideoOutput->setActive(hasTrack); |
176 | gst_play_set_video_track_enabled(play: m_gstPlay.get(), enabled: hasTrack && hasSink); |
177 | } |
178 | |
179 | void QGstreamerMediaPlayer::updateAudioTrackEnabled() |
180 | { |
181 | bool hasTrack = m_activeTrack[TrackType::AudioStream] != -1; |
182 | bool hasAudioOut = gstAudioOutput; |
183 | |
184 | gst_play_set_audio_track_enabled(play: m_gstPlay.get(), enabled: hasTrack && hasAudioOut); |
185 | } |
186 | |
187 | void QGstreamerMediaPlayer::updateBufferProgress(float newProgress) |
188 | { |
189 | if (qFuzzyIsNull(f: newProgress - m_bufferProgress)) |
190 | return; |
191 | |
192 | m_bufferProgress = newProgress; |
193 | bufferProgressChanged(progress: m_bufferProgress); |
194 | } |
195 | |
196 | void QGstreamerMediaPlayer::disconnectDecoderHandlers() |
197 | { |
198 | auto handlers = std::initializer_list<QGObjectHandlerScopedConnection *>{ &sourceSetup }; |
199 | for (QGObjectHandlerScopedConnection *handler : handlers) |
200 | handler->disconnect(); |
201 | } |
202 | |
203 | QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent) |
204 | { |
205 | auto videoOutput = QGstreamerVideoOutput::create(); |
206 | if (!videoOutput) |
207 | return videoOutput.error(); |
208 | |
209 | return new QGstreamerMediaPlayer(videoOutput.value(), parent); |
210 | } |
211 | |
212 | template <typename T> |
213 | void setSeekAccurate(T *config, gboolean accurate) |
214 | { |
215 | gst_play_config_set_seek_accurate(config, accurate); |
216 | } |
217 | |
218 | QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, |
219 | QMediaPlayer *parent) |
220 | : QObject(parent), |
221 | QPlatformMediaPlayer(parent), |
222 | gstVideoOutput(videoOutput), |
223 | m_gstPlay{ |
224 | gst_play_new(video_renderer: nullptr), |
225 | }, |
226 | m_playbin{ |
227 | GST_PIPELINE_CAST(gst_play_get_pipeline(m_gstPlay.get())), |
228 | QGstPipeline::HasRef, |
229 | }, |
230 | m_gstPlayBus{ |
231 | QGstBusHandle{ gst_play_get_message_bus(play: m_gstPlay.get()), QGstBusHandle::HasRef }, |
232 | } |
233 | { |
234 | #if 1 |
235 | // LATER: remove this hack after meta-freescale decides not to pull in outdated APIs |
236 | |
237 | // QTBUG-131300: nxp deliberately reverted to an old gst-play API before the gst-play API |
238 | // stabilized. compare: |
239 | // https://github.com/nxp-imx/gst-plugins-bad/commit/ff04fa9ca1b79c98e836d8cdb26ac3502dafba41 |
240 | constexpr bool useNxpWorkaround = std::is_same_v<decltype(&gst_play_config_set_seek_accurate), |
241 | void (*)(GstPlay *, gboolean)>; |
242 | |
243 | QUniqueGstStructureHandle config{ |
244 | gst_play_get_config(play: m_gstPlay.get()), |
245 | }; |
246 | |
247 | if constexpr (useNxpWorkaround) |
248 | setSeekAccurate(config: m_gstPlay.get(), accurate: true); |
249 | else |
250 | setSeekAccurate(config: config.get(), accurate: true); |
251 | |
252 | gst_play_set_config(play: m_gstPlay.get(), config: config.release()); |
253 | #else |
254 | QUniqueGstStructureHandle config{ |
255 | gst_play_get_config(m_gstPlay.get()), |
256 | }; |
257 | gst_play_config_set_seek_accurate(config.get(), true); |
258 | gst_play_set_config(m_gstPlay.get(), config.release()); |
259 | #endif |
260 | |
261 | gstVideoOutput->setParent(this); |
262 | |
263 | m_playbin.set(property: "video-sink" , o: gstVideoOutput->gstElement()); |
264 | m_playbin.set(property: "text-sink" , o: gstVideoOutput->gstSubtitleElement()); |
265 | |
266 | m_playbin.set(property: "audio-sink" , o: QGstElement::createFromPipelineDescription("fakesink" )); |
267 | |
268 | m_gstPlayBus.installMessageFilter(this); |
269 | |
270 | // we start without subtitles |
271 | gst_play_set_subtitle_track_enabled(play: m_gstPlay.get(), enabled: false); |
272 | |
273 | sourceSetup = m_playbin.connect(name: "source-setup" , callback: GCallback(sourceSetupCallback), userData: this); |
274 | |
275 | m_activeTrack.fill(u: -1); |
276 | |
277 | // TODO: how to detect stalled media? |
278 | } |
279 | |
280 | QGstreamerMediaPlayer::~QGstreamerMediaPlayer() |
281 | { |
282 | m_gstPlayBus.removeMessageFilter(static_cast<QGstreamerBusMessageFilter *>(this)); |
283 | gst_bus_set_flushing(bus: m_gstPlayBus.get(), TRUE); |
284 | gst_play_stop(play: m_gstPlay.get()); |
285 | |
286 | // NOTE: gst_play_stop is not sufficient, un-reffing m_gstPlay can deadlock |
287 | m_playbin.setStateSync(state: GST_STATE_NULL); |
288 | |
289 | m_playbin.set(property: "video-sink" , o: QGstElement::createFromPipelineDescription("fakesink" )); |
290 | m_playbin.set(property: "text-sink" , o: QGstElement::createFromPipelineDescription("fakesink" )); |
291 | m_playbin.set(property: "audio-sink" , o: QGstElement::createFromPipelineDescription("fakesink" )); |
292 | } |
293 | |
294 | void QGstreamerMediaPlayer::updatePositionFromPipeline() |
295 | { |
296 | using namespace std::chrono; |
297 | |
298 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ |
299 | gst_play_get_position(play: m_gstPlay.get()), |
300 | })); |
301 | } |
302 | |
303 | bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message) |
304 | { |
305 | switch (message.type()) { |
306 | case GST_MESSAGE_APPLICATION: |
307 | if (gst_play_is_play_message(msg: message.message())) |
308 | return processBusMessageApplication(message); |
309 | return false; |
310 | |
311 | default: |
312 | qCDebug(qLcMediaPlayer) << message; |
313 | |
314 | return false; |
315 | } |
316 | |
317 | return false; |
318 | } |
319 | |
320 | bool QGstreamerMediaPlayer::processBusMessageApplication(const QGstreamerMessage &message) |
321 | { |
322 | using namespace std::chrono; |
323 | GstPlayMessage type; |
324 | gst_play_message_parse_type(msg: message.message(), type: &type); |
325 | qCDebug(qLcMediaPlayer) << QGstPlayMessageAdaptor{ message }; |
326 | |
327 | switch (type) { |
328 | case GST_PLAY_MESSAGE_URI_LOADED: { |
329 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
330 | return false; |
331 | } |
332 | |
333 | case GST_PLAY_MESSAGE_POSITION_UPDATED: { |
334 | if (state() == QMediaPlayer::PlaybackState::PlayingState) { |
335 | |
336 | constexpr bool usePayload = false; |
337 | if constexpr (usePayload) { |
338 | GstClockTime position; |
339 | gst_play_message_parse_position_updated(msg: message.message(), position: &position); |
340 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ position })); |
341 | } else { |
342 | GstClockTime position = gst_play_get_position(play: m_gstPlay.get()); |
343 | positionChanged(ms: round<milliseconds>(d: nanoseconds{ position })); |
344 | } |
345 | } |
346 | return false; |
347 | } |
348 | case GST_PLAY_MESSAGE_DURATION_CHANGED: { |
349 | GstClockTime duration; |
350 | gst_play_message_parse_duration_updated(msg: message.message(), duration: &duration); |
351 | milliseconds durationInMs = round<milliseconds>(d: nanoseconds{ duration }); |
352 | durationChanged(ms: durationInMs); |
353 | |
354 | m_metaData.insert(k: QMediaMetaData::Duration, value: int(durationInMs.count())); |
355 | metaDataChanged(); |
356 | |
357 | return false; |
358 | } |
359 | case GST_PLAY_MESSAGE_BUFFERING: { |
360 | guint percent; |
361 | gst_play_message_parse_buffering_percent(msg: message.message(), percent: &percent); |
362 | updateBufferProgress(newProgress: percent * 0.01f); |
363 | return false; |
364 | } |
365 | case GST_PLAY_MESSAGE_STATE_CHANGED: { |
366 | GstPlayState state; |
367 | gst_play_message_parse_state_changed(msg: message.message(), state: &state); |
368 | |
369 | switch (state) { |
370 | case GstPlayState::GST_PLAY_STATE_STOPPED: |
371 | if (stateChangeToSkip) { |
372 | qCDebug(qLcMediaPlayer) << " skipping StoppedState transition" ; |
373 | |
374 | stateChangeToSkip -= 1; |
375 | return false; |
376 | } |
377 | stateChanged(newState: QMediaPlayer::StoppedState); |
378 | updateBufferProgress(newProgress: 0); |
379 | return false; |
380 | |
381 | case GstPlayState::GST_PLAY_STATE_PAUSED: |
382 | stateChanged(newState: QMediaPlayer::PausedState); |
383 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
384 | gstVideoOutput->setActive(true); |
385 | updateBufferProgress(newProgress: 1); |
386 | return false; |
387 | case GstPlayState::GST_PLAY_STATE_BUFFERING: |
388 | mediaStatusChanged(status: QMediaPlayer::BufferingMedia); |
389 | return false; |
390 | case GstPlayState::GST_PLAY_STATE_PLAYING: |
391 | stateChanged(newState: QMediaPlayer::PlayingState); |
392 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
393 | gstVideoOutput->setActive(true); |
394 | updateBufferProgress(newProgress: 1); |
395 | |
396 | return false; |
397 | default: |
398 | return false; |
399 | } |
400 | } |
401 | case GST_PLAY_MESSAGE_MEDIA_INFO_UPDATED: { |
402 | using namespace QGstPlaySupport; |
403 | |
404 | QUniqueGstPlayMediaInfoHandle info{}; |
405 | gst_play_message_parse_media_info_updated(msg: message.message(), info: &info); |
406 | |
407 | seekableChanged(seekable: gst_play_media_info_is_seekable(info: info.get())); |
408 | |
409 | const gchar *title = gst_play_media_info_get_title(info: info.get()); |
410 | m_metaData.insert(k: QMediaMetaData::Title, value: QString::fromUtf8(utf8: title)); |
411 | |
412 | metaDataChanged(); |
413 | tracksChanged(); |
414 | |
415 | return false; |
416 | } |
417 | case GST_PLAY_MESSAGE_END_OF_STREAM: { |
418 | if (doLoop()) { |
419 | positionChanged(ms: m_duration); |
420 | qCDebug(qLcMediaPlayer) << "EOS: restarting loop" ; |
421 | gst_play_play(play: m_gstPlay.get()); |
422 | positionChanged(ms: 0ms); |
423 | |
424 | // we will still get a GST_PLAY_MESSAGE_STATE_CHANGED message, which we will just ignore |
425 | // for now |
426 | stateChangeToSkip += 1; |
427 | } else { |
428 | qCDebug(qLcMediaPlayer) << "EOS: done" ; |
429 | positionChanged(ms: m_duration); |
430 | mediaStatusChanged(status: QMediaPlayer::EndOfMedia); |
431 | stateChanged(newState: QMediaPlayer::StoppedState); |
432 | gstVideoOutput->setActive(false); |
433 | } |
434 | |
435 | return false; |
436 | } |
437 | case GST_PLAY_MESSAGE_ERROR: |
438 | case GST_PLAY_MESSAGE_WARNING: |
439 | case GST_PLAY_MESSAGE_VIDEO_DIMENSIONS_CHANGED: |
440 | case GST_PLAY_MESSAGE_VOLUME_CHANGED: |
441 | case GST_PLAY_MESSAGE_MUTE_CHANGED: |
442 | case GST_PLAY_MESSAGE_SEEK_DONE: |
443 | return false; |
444 | |
445 | default: |
446 | Q_UNREACHABLE_RETURN(false); |
447 | } |
448 | } |
449 | |
450 | qint64 QGstreamerMediaPlayer::duration() const |
451 | { |
452 | return m_duration.count(); |
453 | } |
454 | |
455 | bool QGstreamerMediaPlayer::hasMedia() const |
456 | { |
457 | return !m_url.isEmpty() || m_stream; |
458 | } |
459 | |
460 | bool QGstreamerMediaPlayer::hasValidMedia() const |
461 | { |
462 | if (!hasMedia()) |
463 | return false; |
464 | |
465 | switch (mediaStatus()) { |
466 | case QMediaPlayer::MediaStatus::NoMedia: |
467 | case QMediaPlayer::MediaStatus::InvalidMedia: |
468 | return false; |
469 | |
470 | default: |
471 | return true; |
472 | } |
473 | } |
474 | |
475 | float QGstreamerMediaPlayer::bufferProgress() const |
476 | { |
477 | return m_bufferProgress; |
478 | } |
479 | |
480 | QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const |
481 | { |
482 | return QMediaTimeRange(); |
483 | } |
484 | |
485 | qreal QGstreamerMediaPlayer::playbackRate() const |
486 | { |
487 | return gst_play_get_rate(play: m_gstPlay.get()); |
488 | } |
489 | |
490 | void QGstreamerMediaPlayer::setPlaybackRate(qreal rate) |
491 | { |
492 | if (rate == playbackRate()) |
493 | return; |
494 | |
495 | qCDebug(qLcMediaPlayer) << "gst_play_set_rate" << rate; |
496 | gst_play_set_rate(play: m_gstPlay.get(), rate); |
497 | playbackRateChanged(rate); |
498 | } |
499 | |
500 | void QGstreamerMediaPlayer::setPosition(qint64 pos) |
501 | { |
502 | std::chrono::milliseconds posInMs{ pos }; |
503 | |
504 | setPosition(posInMs); |
505 | } |
506 | |
507 | void QGstreamerMediaPlayer::setPosition(std::chrono::milliseconds pos) |
508 | { |
509 | using namespace std::chrono; |
510 | |
511 | qCDebug(qLcMediaPlayer) << "gst_play_seek" << pos; |
512 | gst_play_seek(play: m_gstPlay.get(), position: nanoseconds(pos).count()); |
513 | |
514 | if (mediaStatus() == QMediaPlayer::EndOfMedia) |
515 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
516 | positionChanged(ms: pos); |
517 | } |
518 | |
519 | void QGstreamerMediaPlayer::play() |
520 | { |
521 | QMediaPlayer::PlaybackState currentState = state(); |
522 | if (currentState == QMediaPlayer::PlayingState || !hasValidMedia()) |
523 | return; |
524 | |
525 | if (currentState != QMediaPlayer::PausedState) |
526 | resetCurrentLoop(); |
527 | |
528 | if (mediaStatus() == QMediaPlayer::EndOfMedia) { |
529 | positionChanged(position: 0); |
530 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
531 | } |
532 | |
533 | if (m_pendingSeek) { |
534 | gst_play_seek(play: m_gstPlay.get(), position: m_pendingSeek->count()); |
535 | m_pendingSeek = std::nullopt; |
536 | } |
537 | |
538 | qCDebug(qLcMediaPlayer) << "gst_play_play" ; |
539 | gstVideoOutput->setActive(true); |
540 | gst_play_play(play: m_gstPlay.get()); |
541 | stateChanged(newState: QMediaPlayer::PlayingState); |
542 | } |
543 | |
544 | void QGstreamerMediaPlayer::pause() |
545 | { |
546 | if (state() == QMediaPlayer::PausedState || !hasMedia() |
547 | || m_resourceErrorState != ResourceErrorState::NoError) |
548 | return; |
549 | |
550 | gstVideoOutput->setActive(true); |
551 | |
552 | qCDebug(qLcMediaPlayer) << "gst_play_pause" ; |
553 | gst_play_pause(play: m_gstPlay.get()); |
554 | |
555 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
556 | stateChanged(newState: QMediaPlayer::PausedState); |
557 | } |
558 | |
559 | void QGstreamerMediaPlayer::stop() |
560 | { |
561 | using namespace std::chrono_literals; |
562 | if (state() == QMediaPlayer::StoppedState) { |
563 | if (position() != 0) { |
564 | m_pendingSeek = 0ms; |
565 | positionChanged(ms: 0ms); |
566 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
567 | } |
568 | return; |
569 | } |
570 | |
571 | qCDebug(qLcMediaPlayer) << "gst_play_stop" ; |
572 | gstVideoOutput->setActive(false); |
573 | gst_play_stop(play: m_gstPlay.get()); |
574 | |
575 | stateChanged(newState: QMediaPlayer::StoppedState); |
576 | |
577 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
578 | positionChanged(ms: 0ms); |
579 | } |
580 | |
581 | const QGstPipeline &QGstreamerMediaPlayer::pipeline() const |
582 | { |
583 | return m_playbin; |
584 | } |
585 | |
586 | bool QGstreamerMediaPlayer::canPlayQrc() const |
587 | { |
588 | return true; |
589 | } |
590 | |
591 | QUrl QGstreamerMediaPlayer::media() const |
592 | { |
593 | return m_url; |
594 | } |
595 | |
596 | const QIODevice *QGstreamerMediaPlayer::mediaStream() const |
597 | { |
598 | return m_stream; |
599 | } |
600 | |
601 | void QGstreamerMediaPlayer::sourceSetupCallback([[maybe_unused]] GstElement *playbin, |
602 | GstElement *source, QGstreamerMediaPlayer *) |
603 | { |
604 | // gst_play thread |
605 | |
606 | const gchar *typeName = g_type_name_from_instance(instance: (GTypeInstance *)source); |
607 | qCDebug(qLcMediaPlayer) << "Setting up source:" << typeName; |
608 | |
609 | if (typeName == std::string_view("GstRTSPSrc" )) { |
610 | QGstElement s(source, QGstElement::NeedsRef); |
611 | int latency{40}; |
612 | bool ok{false}; |
613 | int v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_LATENCY" , ok: &ok); |
614 | if (ok) |
615 | latency = v; |
616 | qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms" ; |
617 | s.set(property: "latency" , i: latency); |
618 | |
619 | bool drop{true}; |
620 | v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_DROP_ON_LATENCY" , ok: &ok); |
621 | if (ok && v == 0) |
622 | drop = false; |
623 | qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop; |
624 | s.set(property: "drop-on-latency" , b: drop); |
625 | |
626 | bool retrans{false}; |
627 | v = qEnvironmentVariableIntValue(varName: "QT_MEDIA_RTSP_DO_RETRANSMISSION" , ok: &ok); |
628 | if (ok && v != 0) |
629 | retrans = true; |
630 | qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans; |
631 | s.set(property: "do-retransmission" , b: retrans); |
632 | } |
633 | } |
634 | |
635 | void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream) |
636 | { |
637 | using namespace Qt::Literals; |
638 | using namespace std::chrono; |
639 | using namespace std::chrono_literals; |
640 | |
641 | m_resourceErrorState = ResourceErrorState::NoError; |
642 | m_url = content; |
643 | m_stream = stream; |
644 | QUrl streamURL; |
645 | if (stream) |
646 | streamURL = qGstRegisterQIODevice(stream); |
647 | |
648 | const QUrl &playUrl = stream ? streamURL : content; |
649 | |
650 | if (content.isEmpty() && !stream) { |
651 | mediaStatusChanged(status: QMediaPlayer::NoMedia); |
652 | resetStateForEmptyOrInvalidMedia(); |
653 | return; |
654 | } |
655 | |
656 | mediaStatusChanged(status: QMediaPlayer::LoadingMedia); |
657 | |
658 | // LATER: discover is synchronous, but we would be way more friendly to make it asynchronous. |
659 | bool mediaDiscovered = discover(url: playUrl); |
660 | if (!mediaDiscovered) { |
661 | m_resourceErrorState = ResourceErrorState::ErrorOccurred; |
662 | error(error: QMediaPlayer::Error::ResourceError, errorString: u"Resource cannot be discovered"_s ); |
663 | mediaStatusChanged(status: QMediaPlayer::InvalidMedia); |
664 | resetStateForEmptyOrInvalidMedia(); |
665 | return; |
666 | } |
667 | |
668 | positionChanged(ms: 0ms); |
669 | |
670 | gst_play_set_uri(play: m_gstPlay.get(), uri: playUrl.toEncoded().constData()); |
671 | } |
672 | |
673 | void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output) |
674 | { |
675 | if (gstAudioOutput == output) |
676 | return; |
677 | |
678 | auto *gstOutput = static_cast<QGstreamerAudioOutput *>(output); |
679 | if (gstOutput) |
680 | gstOutput->setAsync(true); |
681 | |
682 | gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); |
683 | if (gstAudioOutput) |
684 | m_playbin.set(property: "audio-sink" , o: gstAudioOutput->gstElement()); |
685 | else |
686 | m_playbin.set(property: "audio-sink" , o: QGstElement::createFromPipelineDescription("fakesink" )); |
687 | updateAudioTrackEnabled(); |
688 | |
689 | // FIXME: we need to have a gst_play API to change the sinks on the fly. |
690 | // finishStateChange a hack to avoid assertion failures in gstreamer |
691 | m_playbin.finishStateChange(); |
692 | } |
693 | |
694 | QMediaMetaData QGstreamerMediaPlayer::metaData() const |
695 | { |
696 | return m_metaData; |
697 | } |
698 | |
699 | void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink) |
700 | { |
701 | auto *gstSink = sink ? static_cast<QGstreamerVideoSink *>(sink->platformVideoSink()) : nullptr; |
702 | if (gstSink) |
703 | gstSink->setAsync(false); |
704 | |
705 | gstVideoOutput->setVideoSink(sink); |
706 | updateVideoTrackEnabled(); |
707 | |
708 | if (sink && state() == QMediaPlayer::PausedState) { |
709 | // FIXME: we want to get a the existing frame, but gst_play does not have such capabilities. |
710 | // seeking to the current position is a rather bad hack, but it's the best we can do for now |
711 | seekToCurrentPosition(); |
712 | } |
713 | } |
714 | |
715 | int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type) |
716 | { |
717 | QSpan<const QMediaMetaData> tracks = m_trackMetaData[type]; |
718 | return tracks.size(); |
719 | } |
720 | |
721 | QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index) |
722 | { |
723 | QSpan<const QMediaMetaData> tracks = m_trackMetaData[type]; |
724 | if (index < tracks.size()) |
725 | return tracks[index]; |
726 | return {}; |
727 | } |
728 | |
729 | int QGstreamerMediaPlayer::activeTrack(TrackType type) |
730 | { |
731 | return m_activeTrack[type]; |
732 | } |
733 | |
734 | void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index) |
735 | { |
736 | if (m_activeTrack[type] == index) |
737 | return; |
738 | |
739 | int formerTrack = m_activeTrack[type]; |
740 | m_activeTrack[type] = index; |
741 | |
742 | switch (type) { |
743 | case TrackType::VideoStream: { |
744 | if (index != -1) |
745 | gst_play_set_video_track(play: m_gstPlay.get(), stream_index: index); |
746 | updateVideoTrackEnabled(); |
747 | updateNativeSizeOnVideoOutput(); |
748 | break; |
749 | } |
750 | case TrackType::AudioStream: { |
751 | if (index != -1) |
752 | gst_play_set_audio_track(play: m_gstPlay.get(), stream_index: index); |
753 | updateAudioTrackEnabled(); |
754 | break; |
755 | } |
756 | case TrackType::SubtitleStream: { |
757 | if (index != -1) |
758 | gst_play_set_subtitle_track(play: m_gstPlay.get(), stream_index: index); |
759 | gst_play_set_subtitle_track_enabled(play: m_gstPlay.get(), enabled: index != -1); |
760 | break; |
761 | } |
762 | default: |
763 | Q_UNREACHABLE(); |
764 | }; |
765 | |
766 | if (formerTrack != -1 && index != -1) |
767 | // it can take several seconds for gstreamer to switch the track. so we seek to the current |
768 | // position |
769 | seekToCurrentPosition(); |
770 | } |
771 | |
772 | QT_END_NAMESPACE |
773 | |