1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include <qgstreamermediaplayer_p.h> |
5 | #include <qgstpipeline_p.h> |
6 | #include <qgstreamermetadata_p.h> |
7 | #include <qgstreamerformatinfo_p.h> |
8 | #include <qgstreameraudiooutput_p.h> |
9 | #include <qgstreamervideooutput_p.h> |
10 | #include <qgstreamervideosink_p.h> |
11 | #include "qgstreamermessage_p.h" |
12 | #include <qgstreameraudiodevice_p.h> |
13 | #include <qgstappsrc_p.h> |
14 | #include <qaudiodevice.h> |
15 | |
16 | #include <QtCore/qdir.h> |
17 | #include <QtCore/qsocketnotifier.h> |
18 | #include <QtCore/qurl.h> |
19 | #include <QtCore/qdebug.h> |
20 | #include <QtCore/qloggingcategory.h> |
21 | #include <QtNetwork/qnetworkaccessmanager.h> |
22 | #include <QtNetwork/qnetworkreply.h> |
23 | |
24 | #include <sys/types.h> |
25 | #include <sys/stat.h> |
26 | #include <fcntl.h> |
27 | |
28 | static Q_LOGGING_CATEGORY(qLcMediaPlayer, "qt.multimedia.player" ) |
29 | |
30 | QT_BEGIN_NAMESPACE |
31 | |
32 | QGstreamerMediaPlayer::TrackSelector::TrackSelector(TrackType type, QGstElement selector) |
33 | : selector(selector), type(type) |
34 | { |
35 | selector.set(property: "sync-streams" , b: true); |
36 | selector.set(property: "sync-mode" , i: 1 /*clock*/); |
37 | |
38 | if (type == SubtitleStream) |
39 | selector.set(property: "cache-buffers" , b: true); |
40 | } |
41 | |
42 | QGstPad QGstreamerMediaPlayer::TrackSelector::createInputPad() |
43 | { |
44 | auto pad = selector.getRequestPad(name: "sink_%u" ); |
45 | tracks.append(t: pad); |
46 | return pad; |
47 | } |
48 | |
49 | void QGstreamerMediaPlayer::TrackSelector::removeAllInputPads() |
50 | { |
51 | for (auto &pad : tracks) |
52 | selector.releaseRequestPad(pad); |
53 | tracks.clear(); |
54 | } |
55 | |
56 | void QGstreamerMediaPlayer::TrackSelector::removeInputPad(QGstPad pad) |
57 | { |
58 | selector.releaseRequestPad(pad); |
59 | tracks.removeOne(t: pad); |
60 | } |
61 | |
62 | QGstPad QGstreamerMediaPlayer::TrackSelector::inputPad(int index) |
63 | { |
64 | if (index >= 0 && index < tracks.count()) |
65 | return tracks[index]; |
66 | return {}; |
67 | } |
68 | |
69 | QGstreamerMediaPlayer::TrackSelector &QGstreamerMediaPlayer::trackSelector(TrackType type) |
70 | { |
71 | auto &ts = trackSelectors[type]; |
72 | Q_ASSERT(ts.type == type); |
73 | return ts; |
74 | } |
75 | |
76 | QMaybe<QPlatformMediaPlayer *> QGstreamerMediaPlayer::create(QMediaPlayer *parent) |
77 | { |
78 | auto videoOutput = QGstreamerVideoOutput::create(); |
79 | if (!videoOutput) |
80 | return videoOutput.error(); |
81 | |
82 | QGstElement decodebin("decodebin" , nullptr); |
83 | if (!decodebin) |
84 | return errorMessageCannotFindElement(element: "decodebin" ); |
85 | |
86 | QGstElement videoInputSelector("input-selector" , "videoInputSelector" ); |
87 | if (!videoInputSelector) |
88 | return errorMessageCannotFindElement(element: "input-selector" ); |
89 | |
90 | QGstElement audioInputSelector("input-selector" , "audioInputSelector" ); |
91 | if (!audioInputSelector) |
92 | return errorMessageCannotFindElement(element: "input-selector" ); |
93 | |
94 | QGstElement subTitleInputSelector("input-selector" , "subTitleInputSelector" ); |
95 | if (!subTitleInputSelector) |
96 | return errorMessageCannotFindElement(element: "input-selector" ); |
97 | |
98 | return new QGstreamerMediaPlayer(videoOutput.value(), decodebin, videoInputSelector, |
99 | audioInputSelector, subTitleInputSelector, parent); |
100 | } |
101 | |
102 | QGstreamerMediaPlayer::QGstreamerMediaPlayer(QGstreamerVideoOutput *videoOutput, |
103 | QGstElement decodebin, |
104 | QGstElement videoInputSelector, |
105 | QGstElement audioInputSelector, |
106 | QGstElement subTitleInputSelector, |
107 | QMediaPlayer *parent) |
108 | : QObject(parent), |
109 | QPlatformMediaPlayer(parent), |
110 | trackSelectors{ ._M_elems: { { VideoStream, videoInputSelector }, |
111 | { AudioStream, audioInputSelector }, |
112 | { SubtitleStream, subTitleInputSelector } } }, |
113 | playerPipeline("playerPipeline" ), |
114 | gstVideoOutput(videoOutput) |
115 | { |
116 | playerPipeline.setFlushOnConfigChanges(true); |
117 | |
118 | gstVideoOutput->setParent(this); |
119 | gstVideoOutput->setPipeline(playerPipeline); |
120 | |
121 | for (auto &ts : trackSelectors) |
122 | playerPipeline.add(element: ts.selector); |
123 | |
124 | playerPipeline.setState(GST_STATE_NULL); |
125 | playerPipeline.installMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this)); |
126 | playerPipeline.installMessageFilter(filter: static_cast<QGstreamerSyncMessageFilter *>(this)); |
127 | |
128 | gst_pipeline_use_clock(pipeline: playerPipeline.pipeline(), clock: gst_system_clock_obtain()); |
129 | |
130 | /* Taken from gstdicoverer.c: |
131 | * This is ugly. We get the GType of decodebin so we can quickly detect |
132 | * when a decodebin is added to uridecodebin so we can set the |
133 | * post-stream-topology setting to TRUE */ |
134 | decodebinType = G_OBJECT_TYPE(decodebin.element()); |
135 | connect(sender: &positionUpdateTimer, signal: &QTimer::timeout, context: this, slot: &QGstreamerMediaPlayer::updatePosition); |
136 | } |
137 | |
138 | QGstreamerMediaPlayer::~QGstreamerMediaPlayer() |
139 | { |
140 | playerPipeline.removeMessageFilter(filter: static_cast<QGstreamerBusMessageFilter *>(this)); |
141 | playerPipeline.removeMessageFilter(filter: static_cast<QGstreamerSyncMessageFilter *>(this)); |
142 | playerPipeline.setStateSync(GST_STATE_NULL); |
143 | topology.free(); |
144 | } |
145 | |
146 | qint64 QGstreamerMediaPlayer::position() const |
147 | { |
148 | if (playerPipeline.isNull() || m_url.isEmpty()) |
149 | return 0; |
150 | |
151 | return playerPipeline.position()/1e6; |
152 | } |
153 | |
154 | qint64 QGstreamerMediaPlayer::duration() const |
155 | { |
156 | return m_duration; |
157 | } |
158 | |
159 | float QGstreamerMediaPlayer::bufferProgress() const |
160 | { |
161 | return m_bufferProgress/100.; |
162 | } |
163 | |
164 | QMediaTimeRange QGstreamerMediaPlayer::availablePlaybackRanges() const |
165 | { |
166 | return QMediaTimeRange(); |
167 | } |
168 | |
169 | qreal QGstreamerMediaPlayer::playbackRate() const |
170 | { |
171 | return playerPipeline.playbackRate(); |
172 | } |
173 | |
174 | void QGstreamerMediaPlayer::setPlaybackRate(qreal rate) |
175 | { |
176 | if (playerPipeline.setPlaybackRate(rate)) |
177 | playbackRateChanged(rate); |
178 | } |
179 | |
180 | void QGstreamerMediaPlayer::setPosition(qint64 pos) |
181 | { |
182 | qint64 currentPos = playerPipeline.position()/1e6; |
183 | if (pos == currentPos) |
184 | return; |
185 | playerPipeline.finishStateChange(); |
186 | playerPipeline.setPosition(pos*1e6); |
187 | qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << pos << playerPipeline.position()/1e6; |
188 | if (mediaStatus() == QMediaPlayer::EndOfMedia) |
189 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
190 | positionChanged(position: pos); |
191 | } |
192 | |
193 | void QGstreamerMediaPlayer::play() |
194 | { |
195 | if (state() == QMediaPlayer::PlayingState || m_url.isEmpty()) |
196 | return; |
197 | resetCurrentLoop(); |
198 | |
199 | playerPipeline.setInStoppedState(false); |
200 | if (mediaStatus() == QMediaPlayer::EndOfMedia) { |
201 | playerPipeline.setPosition(0); |
202 | updatePosition(); |
203 | } |
204 | |
205 | qCDebug(qLcMediaPlayer) << "play()." ; |
206 | int ret = playerPipeline.setState(GST_STATE_PLAYING); |
207 | if (m_requiresSeekOnPlay) { |
208 | // Flushing the pipeline is required to get track changes |
209 | // immediately, when they happen while paused. |
210 | playerPipeline.flush(); |
211 | m_requiresSeekOnPlay = false; |
212 | } |
213 | if (ret == GST_STATE_CHANGE_FAILURE) |
214 | qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the playing state." ; |
215 | if (mediaStatus() == QMediaPlayer::LoadedMedia) |
216 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
217 | emit stateChanged(newState: QMediaPlayer::PlayingState); |
218 | positionUpdateTimer.start(msec: 100); |
219 | } |
220 | |
221 | void QGstreamerMediaPlayer::pause() |
222 | { |
223 | if (state() == QMediaPlayer::PausedState || m_url.isEmpty()) |
224 | return; |
225 | |
226 | positionUpdateTimer.stop(); |
227 | if (playerPipeline.inStoppedState()) { |
228 | playerPipeline.setInStoppedState(false); |
229 | playerPipeline.flush(); |
230 | } |
231 | int ret = playerPipeline.setState(GST_STATE_PAUSED); |
232 | if (ret == GST_STATE_CHANGE_FAILURE) |
233 | qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the paused state." ; |
234 | if (mediaStatus() == QMediaPlayer::EndOfMedia) { |
235 | playerPipeline.setPosition(0); |
236 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
237 | } |
238 | updatePosition(); |
239 | emit stateChanged(newState: QMediaPlayer::PausedState); |
240 | } |
241 | |
242 | void QGstreamerMediaPlayer::stop() |
243 | { |
244 | if (state() == QMediaPlayer::StoppedState) |
245 | return; |
246 | stopOrEOS(eos: false); |
247 | } |
248 | |
249 | void *QGstreamerMediaPlayer::nativePipeline() |
250 | { |
251 | return playerPipeline.pipeline(); |
252 | } |
253 | |
254 | void QGstreamerMediaPlayer::stopOrEOS(bool eos) |
255 | { |
256 | positionUpdateTimer.stop(); |
257 | playerPipeline.setInStoppedState(true); |
258 | bool ret = playerPipeline.setStateSync(GST_STATE_PAUSED); |
259 | if (!ret) |
260 | qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state." ; |
261 | if (!eos) |
262 | playerPipeline.setPosition(0); |
263 | updatePosition(); |
264 | emit stateChanged(newState: QMediaPlayer::StoppedState); |
265 | mediaStatusChanged(status: eos ? QMediaPlayer::EndOfMedia : QMediaPlayer::LoadedMedia); |
266 | } |
267 | |
268 | bool QGstreamerMediaPlayer::processBusMessage(const QGstreamerMessage &message) |
269 | { |
270 | if (message.isNull()) |
271 | return false; |
272 | |
273 | // qCDebug(qLcMediaPlayer) << "received bus message from" << message.source().name() << message.type() << (message.type() == GST_MESSAGE_TAG); |
274 | |
275 | GstMessage* gm = message.rawMessage(); |
276 | switch (message.type()) { |
277 | case GST_MESSAGE_TAG: { |
278 | // #### This isn't ideal. We shouldn't catch stream specific tags here, rather the global ones |
279 | GstTagList *tag_list; |
280 | gst_message_parse_tag(message: gm, tag_list: &tag_list); |
281 | //qCDebug(qLcMediaPlayer) << "Got tags: " << message.source().name() << gst_tag_list_to_string(tag_list); |
282 | auto metaData = QGstreamerMetaData::fromGstTagList(tags: tag_list); |
283 | for (auto k : metaData.keys()) |
284 | m_metaData.insert(k, value: metaData.value(k)); |
285 | break; |
286 | } |
287 | case GST_MESSAGE_DURATION_CHANGED: { |
288 | qint64 d = playerPipeline.duration()/1e6; |
289 | qCDebug(qLcMediaPlayer) << " duration changed message" << d; |
290 | if (d != m_duration) { |
291 | m_duration = d; |
292 | emit durationChanged(duration: duration()); |
293 | } |
294 | return false; |
295 | } |
296 | case GST_MESSAGE_EOS: |
297 | if (doLoop()) { |
298 | setPosition(0); |
299 | break; |
300 | } |
301 | stopOrEOS(eos: true); |
302 | break; |
303 | case GST_MESSAGE_BUFFERING: { |
304 | qCDebug(qLcMediaPlayer) << " buffering message" ; |
305 | int progress = 0; |
306 | gst_message_parse_buffering(message: gm, percent: &progress); |
307 | m_bufferProgress = progress; |
308 | mediaStatusChanged(status: m_bufferProgress == 100 ? QMediaPlayer::BufferedMedia : QMediaPlayer::BufferingMedia); |
309 | emit bufferProgressChanged(progress: m_bufferProgress/100.); |
310 | break; |
311 | } |
312 | case GST_MESSAGE_STATE_CHANGED: { |
313 | if (message.source() != playerPipeline) |
314 | return false; |
315 | |
316 | GstState oldState; |
317 | GstState newState; |
318 | GstState pending; |
319 | |
320 | gst_message_parse_state_changed(message: gm, oldstate: &oldState, newstate: &newState, pending: &pending); |
321 | qCDebug(qLcMediaPlayer) << " state changed message" << oldState << newState << pending; |
322 | |
323 | #ifdef DEBUG_PLAYBIN |
324 | static QStringList states = { |
325 | QStringLiteral("GST_STATE_VOID_PENDING" ), QStringLiteral("GST_STATE_NULL" ), |
326 | QStringLiteral("GST_STATE_READY" ), QStringLiteral("GST_STATE_PAUSED" ), |
327 | QStringLiteral("GST_STATE_PLAYING" ) }; |
328 | |
329 | qCDebug(qLcMediaPlayer) << QStringLiteral("state changed: old: %1 new: %2 pending: %3" ) \ |
330 | .arg(states[oldState]) \ |
331 | .arg(states[newState]) \ |
332 | .arg(states[pending]); |
333 | #endif |
334 | |
335 | switch (newState) { |
336 | case GST_STATE_VOID_PENDING: |
337 | case GST_STATE_NULL: |
338 | case GST_STATE_READY: |
339 | break; |
340 | case GST_STATE_PAUSED: |
341 | { |
342 | if (prerolling) { |
343 | qCDebug(qLcMediaPlayer) << "Preroll done, setting status to Loaded" ; |
344 | prerolling = false; |
345 | GST_DEBUG_BIN_TO_DOT_FILE(playerPipeline.bin(), GST_DEBUG_GRAPH_SHOW_ALL, "playerPipeline" ); |
346 | |
347 | qint64 d = playerPipeline.duration()/1e6; |
348 | if (d != m_duration) { |
349 | m_duration = d; |
350 | qCDebug(qLcMediaPlayer) << " duration changed" << d; |
351 | emit durationChanged(duration: duration()); |
352 | } |
353 | |
354 | parseStreamsAndMetadata(); |
355 | |
356 | emit tracksChanged(); |
357 | mediaStatusChanged(status: QMediaPlayer::LoadedMedia); |
358 | |
359 | GstQuery *query = gst_query_new_seeking(format: GST_FORMAT_TIME); |
360 | gboolean canSeek = false; |
361 | if (gst_element_query(element: playerPipeline.element(), query)) { |
362 | gst_query_parse_seeking(query, NULL, seekable: &canSeek, segment_start: nullptr, segment_end: nullptr); |
363 | qCDebug(qLcMediaPlayer) << " pipeline is seekable:" << canSeek; |
364 | } else { |
365 | qCDebug(qLcMediaPlayer) << " query for seekable failed." ; |
366 | } |
367 | gst_query_unref(q: query); |
368 | seekableChanged(seekable: canSeek); |
369 | } |
370 | |
371 | break; |
372 | } |
373 | case GST_STATE_PLAYING: |
374 | mediaStatusChanged(status: QMediaPlayer::BufferedMedia); |
375 | break; |
376 | } |
377 | break; |
378 | } |
379 | case GST_MESSAGE_ERROR: { |
380 | GError *err; |
381 | gchar *debug; |
382 | gst_message_parse_error(message: gm, gerror: &err, debug: &debug); |
383 | if (err->domain == GST_STREAM_ERROR && err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND) |
384 | emit error(error: QMediaPlayer::FormatError, errorString: tr(s: "Cannot play stream of type: <unknown>" )); |
385 | else |
386 | emit error(error: QMediaPlayer::ResourceError, errorString: QString::fromUtf8(utf8: err->message)); |
387 | playerPipeline.dumpGraph(fileName: "error" ); |
388 | mediaStatusChanged(status: QMediaPlayer::InvalidMedia); |
389 | g_error_free(error: err); |
390 | g_free(mem: debug); |
391 | break; |
392 | } |
393 | case GST_MESSAGE_WARNING: { |
394 | GError *err; |
395 | gchar *debug; |
396 | gst_message_parse_warning (message: gm, gerror: &err, debug: &debug); |
397 | qCWarning(qLcMediaPlayer) << "Warning:" << QString::fromUtf8(utf8: err->message); |
398 | playerPipeline.dumpGraph(fileName: "warning" ); |
399 | g_error_free (error: err); |
400 | g_free (mem: debug); |
401 | break; |
402 | } |
403 | case GST_MESSAGE_INFO: { |
404 | if (qLcMediaPlayer().isDebugEnabled()) { |
405 | GError *err; |
406 | gchar *debug; |
407 | gst_message_parse_info (message: gm, gerror: &err, debug: &debug); |
408 | qCDebug(qLcMediaPlayer) << "Info:" << QString::fromUtf8(utf8: err->message); |
409 | g_error_free (error: err); |
410 | g_free (mem: debug); |
411 | } |
412 | break; |
413 | } |
414 | case GST_MESSAGE_SEGMENT_START: { |
415 | qCDebug(qLcMediaPlayer) << " segment start message, updating position" ; |
416 | QGstStructure structure(gst_message_get_structure(message: gm)); |
417 | auto p = structure["position" ].toInt64(); |
418 | if (p) { |
419 | qint64 position = (*p)/1000000; |
420 | emit positionChanged(position); |
421 | } |
422 | break; |
423 | } |
424 | case GST_MESSAGE_ELEMENT: { |
425 | QGstStructure structure(gst_message_get_structure(message: gm)); |
426 | auto type = structure.name(); |
427 | if (type == "stream-topology" ) { |
428 | topology.free(); |
429 | topology = structure.copy(); |
430 | } |
431 | break; |
432 | } |
433 | |
434 | default: |
435 | // qCDebug(qLcMediaPlayer) << " default message handler, doing nothing"; |
436 | |
437 | break; |
438 | } |
439 | |
440 | return false; |
441 | } |
442 | |
443 | bool QGstreamerMediaPlayer::processSyncMessage(const QGstreamerMessage &message) |
444 | { |
445 | #if QT_CONFIG(gstreamer_gl) |
446 | if (message.type() != GST_MESSAGE_NEED_CONTEXT) |
447 | return false; |
448 | const gchar *type = nullptr; |
449 | gst_message_parse_context_type (message: message.rawMessage(), context_type: &type); |
450 | if (strcmp(s1: type, GST_GL_DISPLAY_CONTEXT_TYPE)) |
451 | return false; |
452 | if (!gstVideoOutput || !gstVideoOutput->gstreamerVideoSink()) |
453 | return false; |
454 | auto *context = gstVideoOutput->gstreamerVideoSink()->gstGlDisplayContext(); |
455 | if (!context) |
456 | return false; |
457 | gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message.rawMessage())), context); |
458 | playerPipeline.dumpGraph(fileName: "need_context" ); |
459 | return true; |
460 | #else |
461 | Q_UNUSED(message); |
462 | return false; |
463 | #endif |
464 | } |
465 | |
466 | QUrl QGstreamerMediaPlayer::media() const |
467 | { |
468 | return m_url; |
469 | } |
470 | |
471 | const QIODevice *QGstreamerMediaPlayer::mediaStream() const |
472 | { |
473 | return m_stream; |
474 | } |
475 | |
476 | void QGstreamerMediaPlayer::decoderPadAdded(const QGstElement &src, const QGstPad &pad) |
477 | { |
478 | if (src != decoder) |
479 | return; |
480 | |
481 | auto caps = pad.currentCaps(); |
482 | auto type = caps.at(index: 0).name(); |
483 | qCDebug(qLcMediaPlayer) << "Received new pad" << pad.name() << "from" << src.name() << "type" << type; |
484 | qCDebug(qLcMediaPlayer) << " " << caps.toString(); |
485 | |
486 | TrackType streamType = NTrackTypes; |
487 | if (type.startsWith(other: "video/x-raw" )) { |
488 | streamType = VideoStream; |
489 | } else if (type.startsWith(other: "audio/x-raw" )) { |
490 | streamType = AudioStream; |
491 | } else if (type.startsWith(other: "text/" )) { |
492 | streamType = SubtitleStream; |
493 | } else { |
494 | qCWarning(qLcMediaPlayer) << "Ignoring unknown media stream:" << pad.name() << type; |
495 | return; |
496 | } |
497 | |
498 | auto &ts = trackSelector(type: streamType); |
499 | QGstPad sinkPad = ts.createInputPad(); |
500 | if (!pad.link(sink: sinkPad)) { |
501 | qCWarning(qLcMediaPlayer) << "Failed to add track, cannot link pads" ; |
502 | return; |
503 | } |
504 | qCDebug(qLcMediaPlayer) << "Adding track" ; |
505 | |
506 | if (ts.trackCount() == 1) { |
507 | if (streamType == VideoStream) { |
508 | connectOutput(ts); |
509 | ts.setActiveInputPad(sinkPad); |
510 | emit videoAvailableChanged(videoAvailable: true); |
511 | } |
512 | else if (streamType == AudioStream) { |
513 | connectOutput(ts); |
514 | ts.setActiveInputPad(sinkPad); |
515 | emit audioAvailableChanged(audioAvailable: true); |
516 | } |
517 | } |
518 | |
519 | if (!prerolling) |
520 | emit tracksChanged(); |
521 | |
522 | decoderOutputMap.insert(key: pad.name(), value: sinkPad); |
523 | } |
524 | |
525 | void QGstreamerMediaPlayer::decoderPadRemoved(const QGstElement &src, const QGstPad &pad) |
526 | { |
527 | if (src != decoder) |
528 | return; |
529 | |
530 | qCDebug(qLcMediaPlayer) << "Removed pad" << pad.name() << "from" << src.name(); |
531 | auto track = decoderOutputMap.value(key: pad.name()); |
532 | if (track.isNull()) |
533 | return; |
534 | |
535 | auto ts = std::find_if(first: std::begin(cont&: trackSelectors), last: std::end(cont&: trackSelectors), |
536 | pred: [&](TrackSelector &ts){ return ts.selector == track.parent(); }); |
537 | if (ts == std::end(cont&: trackSelectors)) |
538 | return; |
539 | |
540 | qCDebug(qLcMediaPlayer) << " was linked to pad" << track.name() << "from" << ts->selector.name(); |
541 | ts->removeInputPad(pad: track); |
542 | |
543 | if (ts->trackCount() == 0) { |
544 | removeOutput(ts&: *ts); |
545 | if (ts->type == AudioStream) |
546 | audioAvailableChanged(audioAvailable: false); |
547 | else if (ts->type == VideoStream) |
548 | videoAvailableChanged(videoAvailable: false); |
549 | } |
550 | |
551 | if (!prerolling) |
552 | tracksChanged(); |
553 | } |
554 | |
555 | void QGstreamerMediaPlayer::removeAllOutputs() |
556 | { |
557 | for (auto &ts : trackSelectors) { |
558 | removeOutput(ts); |
559 | ts.removeAllInputPads(); |
560 | } |
561 | audioAvailableChanged(audioAvailable: false); |
562 | videoAvailableChanged(videoAvailable: false); |
563 | } |
564 | |
565 | void QGstreamerMediaPlayer::connectOutput(TrackSelector &ts) |
566 | { |
567 | if (ts.isConnected) |
568 | return; |
569 | |
570 | QGstElement e; |
571 | switch (ts.type) { |
572 | case AudioStream: |
573 | e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{}; |
574 | break; |
575 | case VideoStream: |
576 | e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{}; |
577 | break; |
578 | case SubtitleStream: |
579 | if (gstVideoOutput) |
580 | gstVideoOutput->linkSubtitleStream(subtitleSrc: ts.selector); |
581 | break; |
582 | default: |
583 | return; |
584 | } |
585 | |
586 | if (!e.isNull()) { |
587 | qCDebug(qLcMediaPlayer) << "connecting output for track type" << ts.type; |
588 | playerPipeline.add(element: e); |
589 | ts.selector.link(next: e); |
590 | e.setState(GST_STATE_PAUSED); |
591 | } |
592 | |
593 | ts.isConnected = true; |
594 | } |
595 | |
596 | void QGstreamerMediaPlayer::removeOutput(TrackSelector &ts) |
597 | { |
598 | if (!ts.isConnected) |
599 | return; |
600 | |
601 | QGstElement e; |
602 | switch (ts.type) { |
603 | case AudioStream: |
604 | e = gstAudioOutput ? gstAudioOutput->gstElement() : QGstElement{}; |
605 | break; |
606 | case VideoStream: |
607 | e = gstVideoOutput ? gstVideoOutput->gstElement() : QGstElement{}; |
608 | break; |
609 | case SubtitleStream: |
610 | if (gstVideoOutput) |
611 | gstVideoOutput->unlinkSubtitleStream(); |
612 | break; |
613 | default: |
614 | break; |
615 | } |
616 | |
617 | if (!e.isNull()) { |
618 | qCDebug(qLcMediaPlayer) << "removing output for track type" << ts.type; |
619 | playerPipeline.remove(element: e); |
620 | e.setStateSync(GST_STATE_NULL); |
621 | } |
622 | |
623 | ts.isConnected = false; |
624 | } |
625 | |
626 | void QGstreamerMediaPlayer::uridecodebinElementAddedCallback(GstElement */*uridecodebin*/, GstElement *child, QGstreamerMediaPlayer *that) |
627 | { |
628 | QGstElement c(child); |
629 | qCDebug(qLcMediaPlayer) << "New element added to uridecodebin:" << c.name(); |
630 | |
631 | if (G_OBJECT_TYPE(child) == that->decodebinType) { |
632 | qCDebug(qLcMediaPlayer) << " -> setting post-stream-topology property" ; |
633 | c.set(property: "post-stream-topology" , b: true); |
634 | } |
635 | } |
636 | |
637 | void QGstreamerMediaPlayer::sourceSetupCallback(GstElement *uridecodebin, GstElement *source, QGstreamerMediaPlayer *that) |
638 | { |
639 | Q_UNUSED(uridecodebin) |
640 | Q_UNUSED(that) |
641 | |
642 | qCDebug(qLcMediaPlayer) << "Setting up source:" << g_type_name_from_instance(instance: (GTypeInstance*)source); |
643 | |
644 | if (QLatin1String("GstRTSPSrc" ) == QString::fromUtf8(utf8: g_type_name_from_instance(instance: (GTypeInstance*)source))) { |
645 | QGstElement s(source); |
646 | int latency{40}; |
647 | bool ok{false}; |
648 | int v = QString::fromLocal8Bit(ba: qgetenv(varName: "QT_MEDIA_RTSP_LATENCY" )).toUInt(ok: &ok); |
649 | if (ok) |
650 | latency = v; |
651 | qCDebug(qLcMediaPlayer) << " -> setting source latency to:" << latency << "ms" ; |
652 | s.set(property: "latency" , i: latency); |
653 | |
654 | bool drop{true}; |
655 | v = QString::fromLocal8Bit(ba: qgetenv(varName: "QT_MEDIA_RTSP_DROP_ON_LATENCY" )).toUInt(ok: &ok); |
656 | if (ok && v == 0) |
657 | drop = false; |
658 | qCDebug(qLcMediaPlayer) << " -> setting drop-on-latency to:" << drop; |
659 | s.set(property: "drop-on-latency" , b: drop); |
660 | |
661 | bool retrans{false}; |
662 | v = QString::fromLocal8Bit(ba: qgetenv(varName: "QT_MEDIA_RTSP_DO_RETRANSMISSION" )).toUInt(ok: &ok); |
663 | if (ok && v not_eq 0) |
664 | retrans = true; |
665 | qCDebug(qLcMediaPlayer) << " -> setting do-retransmission to:" << retrans; |
666 | s.set(property: "do-retransmission" , b: retrans); |
667 | } |
668 | } |
669 | |
670 | void QGstreamerMediaPlayer::setMedia(const QUrl &content, QIODevice *stream) |
671 | { |
672 | qCDebug(qLcMediaPlayer) << Q_FUNC_INFO << "setting location to" << content; |
673 | |
674 | prerolling = true; |
675 | |
676 | bool ret = playerPipeline.setStateSync(GST_STATE_NULL); |
677 | if (!ret) |
678 | qCDebug(qLcMediaPlayer) << "Unable to set the pipeline to the stopped state." ; |
679 | |
680 | m_url = content; |
681 | m_stream = stream; |
682 | |
683 | if (!src.isNull()) |
684 | playerPipeline.remove(element: src); |
685 | if (!decoder.isNull()) |
686 | playerPipeline.remove(element: decoder); |
687 | src = QGstElement(); |
688 | decoder = QGstElement(); |
689 | removeAllOutputs(); |
690 | seekableChanged(seekable: false); |
691 | playerPipeline.setInStoppedState(true); |
692 | |
693 | if (m_duration != 0) { |
694 | m_duration = 0; |
695 | durationChanged(duration: 0); |
696 | } |
697 | stateChanged(newState: QMediaPlayer::StoppedState); |
698 | if (position() != 0) |
699 | positionChanged(position: 0); |
700 | mediaStatusChanged(status: QMediaPlayer::NoMedia); |
701 | if (!m_metaData.isEmpty()) { |
702 | m_metaData.clear(); |
703 | metaDataChanged(); |
704 | } |
705 | |
706 | if (content.isEmpty()) |
707 | return; |
708 | |
709 | if (m_stream) { |
710 | if (!m_appSrc) { |
711 | auto maybeAppSrc = QGstAppSrc::create(parent: this); |
712 | if (maybeAppSrc) { |
713 | m_appSrc = maybeAppSrc.value(); |
714 | } else { |
715 | emit error(error: QMediaPlayer::ResourceError, errorString: maybeAppSrc.error()); |
716 | return; |
717 | } |
718 | } |
719 | src = m_appSrc->element(); |
720 | decoder = QGstElement("decodebin" , "decoder" ); |
721 | if (!decoder) { |
722 | emit error(error: QMediaPlayer::ResourceError, errorString: errorMessageCannotFindElement(element: "decodebin" )); |
723 | return; |
724 | } |
725 | decoder.set(property: "post-stream-topology" , b: true); |
726 | playerPipeline.add(e1: src, e2: decoder); |
727 | src.link(next: decoder); |
728 | |
729 | m_appSrc->setup(stream: m_stream); |
730 | seekableChanged(seekable: !stream->isSequential()); |
731 | } else { |
732 | // use uridecodebin |
733 | decoder = QGstElement("uridecodebin" , "uridecoder" ); |
734 | if (!decoder) { |
735 | emit error(error: QMediaPlayer::ResourceError, errorString: errorMessageCannotFindElement(element: "uridecodebin" )); |
736 | return; |
737 | } |
738 | playerPipeline.add(element: decoder); |
739 | // can't set post-stream-topology to true, as uridecodebin doesn't have the property. Use a hack |
740 | decoder.connect(name: "element-added" , callback: GCallback(QGstreamerMediaPlayer::uridecodebinElementAddedCallback), userData: this); |
741 | decoder.connect(name: "source-setup" , callback: GCallback(QGstreamerMediaPlayer::sourceSetupCallback), userData: this); |
742 | |
743 | decoder.set(property: "uri" , str: content.toEncoded().constData()); |
744 | if (m_bufferProgress != 0) { |
745 | m_bufferProgress = 0; |
746 | emit bufferProgressChanged(progress: 0.); |
747 | } |
748 | } |
749 | decoder.onPadAdded<&QGstreamerMediaPlayer::decoderPadAdded>(instance: this); |
750 | decoder.onPadRemoved<&QGstreamerMediaPlayer::decoderPadRemoved>(instance: this); |
751 | |
752 | mediaStatusChanged(status: QMediaPlayer::LoadingMedia); |
753 | |
754 | if (state() == QMediaPlayer::PlayingState) { |
755 | int ret = playerPipeline.setState(GST_STATE_PLAYING); |
756 | if (ret == GST_STATE_CHANGE_FAILURE) |
757 | qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the playing state." ; |
758 | } else { |
759 | int ret = playerPipeline.setState(GST_STATE_PAUSED); |
760 | if (!ret) |
761 | qCWarning(qLcMediaPlayer) << "Unable to set the pipeline to the paused state." ; |
762 | } |
763 | |
764 | playerPipeline.setPosition(0); |
765 | positionChanged(position: 0); |
766 | } |
767 | |
768 | void QGstreamerMediaPlayer::setAudioOutput(QPlatformAudioOutput *output) |
769 | { |
770 | if (gstAudioOutput == output) |
771 | return; |
772 | |
773 | auto &ts = trackSelector(type: AudioStream); |
774 | |
775 | playerPipeline.beginConfig(); |
776 | if (gstAudioOutput) { |
777 | removeOutput(ts); |
778 | gstAudioOutput->setPipeline({}); |
779 | } |
780 | gstAudioOutput = static_cast<QGstreamerAudioOutput *>(output); |
781 | if (gstAudioOutput) { |
782 | gstAudioOutput->setPipeline(playerPipeline); |
783 | connectOutput(ts); |
784 | } |
785 | playerPipeline.endConfig(); |
786 | } |
787 | |
788 | QMediaMetaData QGstreamerMediaPlayer::metaData() const |
789 | { |
790 | return m_metaData; |
791 | } |
792 | |
793 | void QGstreamerMediaPlayer::setVideoSink(QVideoSink *sink) |
794 | { |
795 | gstVideoOutput->setVideoSink(sink); |
796 | } |
797 | |
798 | static QGstStructure endOfChain(const QGstStructure &s) |
799 | { |
800 | QGstStructure e = s; |
801 | while (1) { |
802 | auto next = e["next" ].toStructure(); |
803 | if (!next.isNull()) |
804 | e = next; |
805 | else |
806 | break; |
807 | } |
808 | return e; |
809 | } |
810 | |
811 | void QGstreamerMediaPlayer::parseStreamsAndMetadata() |
812 | { |
813 | qCDebug(qLcMediaPlayer) << "============== parse topology ============" ; |
814 | if (topology.isNull()) { |
815 | qCDebug(qLcMediaPlayer) << " null topology" ; |
816 | return; |
817 | } |
818 | auto caps = topology["caps" ].toCaps(); |
819 | auto structure = caps.at(index: 0); |
820 | auto fileFormat = QGstreamerFormatInfo::fileFormatForCaps(structure); |
821 | qCDebug(qLcMediaPlayer) << caps.toString() << fileFormat; |
822 | m_metaData.insert(k: QMediaMetaData::FileFormat, value: QVariant::fromValue(value: fileFormat)); |
823 | m_metaData.insert(k: QMediaMetaData::Duration, value: duration()); |
824 | m_metaData.insert(k: QMediaMetaData::Url, value: m_url); |
825 | QGValue tags = topology["tags" ]; |
826 | if (!tags.isNull()) { |
827 | GstTagList *tagList = nullptr; |
828 | gst_structure_get(structure: topology.structure, first_fieldname: "tags" , GST_TYPE_TAG_LIST, &tagList, nullptr); |
829 | const auto metaData = QGstreamerMetaData::fromGstTagList(tags: tagList); |
830 | for (auto k : metaData.keys()) |
831 | m_metaData.insert(k, value: metaData.value(k)); |
832 | } |
833 | |
834 | auto demux = endOfChain(s: topology); |
835 | auto next = demux["next" ]; |
836 | if (!next.isList()) { |
837 | qCDebug(qLcMediaPlayer) << " no additional streams" ; |
838 | emit metaDataChanged(); |
839 | return; |
840 | } |
841 | |
842 | // collect stream info |
843 | int size = next.listSize(); |
844 | for (int i = 0; i < size; ++i) { |
845 | auto val = next.at(index: i); |
846 | caps = val.toStructure()["caps" ].toCaps(); |
847 | structure = caps.at(index: 0); |
848 | if (structure.name().startsWith(other: "audio/" )) { |
849 | auto codec = QGstreamerFormatInfo::audioCodecForCaps(structure); |
850 | m_metaData.insert(k: QMediaMetaData::AudioCodec, value: QVariant::fromValue(value: codec)); |
851 | qCDebug(qLcMediaPlayer) << " audio" << caps.toString() << (int)codec; |
852 | } else if (structure.name().startsWith(other: "video/" )) { |
853 | auto codec = QGstreamerFormatInfo::videoCodecForCaps(structure); |
854 | m_metaData.insert(k: QMediaMetaData::VideoCodec, value: QVariant::fromValue(value: codec)); |
855 | qCDebug(qLcMediaPlayer) << " video" << caps.toString() << (int)codec; |
856 | auto framerate = structure["framerate" ].getFraction(); |
857 | if (framerate) |
858 | m_metaData.insert(k: QMediaMetaData::VideoFrameRate, value: *framerate); |
859 | auto width = structure["width" ].toInt(); |
860 | auto height = structure["height" ].toInt(); |
861 | if (width && height) |
862 | m_metaData.insert(k: QMediaMetaData::Resolution, value: QSize(*width, *height)); |
863 | } |
864 | } |
865 | |
866 | auto sinkPad = trackSelector(type: VideoStream).activeInputPad(); |
867 | if (!sinkPad.isNull()) { |
868 | bool hasTags = g_object_class_find_property (G_OBJECT_GET_CLASS (sinkPad.object()), property_name: "tags" ) != NULL; |
869 | |
870 | GstTagList *tl = nullptr; |
871 | g_object_get(object: sinkPad.object(), first_property_name: "tags" , &tl, nullptr); |
872 | qCDebug(qLcMediaPlayer) << " tags=" << hasTags << (tl ? gst_tag_list_to_string(list: tl) : "(null)" ); |
873 | } |
874 | |
875 | |
876 | qCDebug(qLcMediaPlayer) << "============== end parse topology ============" ; |
877 | emit metaDataChanged(); |
878 | playerPipeline.dumpGraph(fileName: "playback" ); |
879 | } |
880 | |
881 | int QGstreamerMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type) |
882 | { |
883 | return trackSelector(type).trackCount(); |
884 | } |
885 | |
886 | QMediaMetaData QGstreamerMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int index) |
887 | { |
888 | auto track = trackSelector(type).inputPad(index); |
889 | if (track.isNull()) |
890 | return {}; |
891 | |
892 | GstTagList *tagList = nullptr; |
893 | g_object_get(object: track.object(), first_property_name: "tags" , &tagList, nullptr); |
894 | |
895 | return tagList ? QGstreamerMetaData::fromGstTagList(tags: tagList) : QMediaMetaData{}; |
896 | } |
897 | |
898 | int QGstreamerMediaPlayer::activeTrack(TrackType type) |
899 | { |
900 | return trackSelector(type).activeInputIndex(); |
901 | } |
902 | |
903 | void QGstreamerMediaPlayer::setActiveTrack(TrackType type, int index) |
904 | { |
905 | auto &ts = trackSelector(type); |
906 | auto track = ts.inputPad(index); |
907 | if (track.isNull() && index != -1) { |
908 | qCWarning(qLcMediaPlayer) << "Attempt to set an incorrect index" << index |
909 | << "for the track type" << type; |
910 | return; |
911 | } |
912 | |
913 | qCDebug(qLcMediaPlayer) << "Setting the index" << index << "for the track type" << type; |
914 | if (type == QPlatformMediaPlayer::SubtitleStream) |
915 | gstVideoOutput->flushSubtitles(); |
916 | |
917 | playerPipeline.beginConfig(); |
918 | if (track.isNull()) { |
919 | removeOutput(ts); |
920 | } else { |
921 | ts.setActiveInputPad(track); |
922 | connectOutput(ts); |
923 | } |
924 | playerPipeline.endConfig(); |
925 | |
926 | // seek to force an immediate change of the stream |
927 | if (playerPipeline.state() == GST_STATE_PLAYING) |
928 | playerPipeline.flush(); |
929 | else |
930 | m_requiresSeekOnPlay = true; |
931 | } |
932 | |
933 | QT_END_NAMESPACE |
934 | |
935 | #include "moc_qgstreamermediaplayer_p.cpp" |
936 | |