1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | #include "qffmpegplaybackengine_p.h" |
4 | |
5 | #include "qvideosink.h" |
6 | #include "qaudiooutput.h" |
7 | #include "private/qplatformaudiooutput_p.h" |
8 | #include "private/qplatformvideosink_p.h" |
9 | #include "private/qaudiobufferoutput_p.h" |
10 | #include "qiodevice.h" |
11 | #include "playbackengine/qffmpegdemuxer_p.h" |
12 | #include "playbackengine/qffmpegstreamdecoder_p.h" |
13 | #include "playbackengine/qffmpegsubtitlerenderer_p.h" |
14 | #include "playbackengine/qffmpegvideorenderer_p.h" |
15 | #include "playbackengine/qffmpegaudiorenderer_p.h" |
16 | |
17 | #include <qloggingcategory.h> |
18 | |
19 | QT_BEGIN_NAMESPACE |
20 | |
21 | namespace QFFmpeg { |
22 | |
23 | static Q_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine" ); |
24 | |
25 | // The helper is needed since on some compilers std::unique_ptr |
26 | // doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0 |
27 | template<typename Array> |
28 | inline Array defaultObjectsArray() |
29 | { |
30 | using T = typename Array::value_type; |
31 | return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } }; |
32 | } |
33 | |
34 | // TODO: investigate what's better: profile and try network case |
35 | // Most likely, shouldPauseStreams = false is better because of: |
36 | // - packet and frame buffers are not big, the saturration of the is pretty fast. |
37 | // - after any pause a user has some preloaded buffers, so the playback is |
38 | // supposed to be more stable in cases with a weak processor or bad internet. |
39 | // - the code is simplier, usage is more convenient. |
40 | // |
41 | static constexpr bool shouldPauseStreams = false; |
42 | |
43 | PlaybackEngine::PlaybackEngine() |
44 | : m_demuxer({}, {}), |
45 | m_streams(defaultObjectsArray<decltype(m_streams)>()), |
46 | m_renderers(defaultObjectsArray<decltype(m_renderers)>()) |
47 | { |
48 | qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine" ; |
49 | qRegisterMetaType<QFFmpeg::Packet>(); |
50 | qRegisterMetaType<QFFmpeg::Frame>(); |
51 | } |
52 | |
53 | PlaybackEngine::~PlaybackEngine() { |
54 | qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine" ; |
55 | |
56 | finalizeOutputs(); |
57 | forEachExistingObject(action: [](auto &object) { object.reset(); }); |
58 | deleteFreeThreads(); |
59 | } |
60 | |
61 | void PlaybackEngine::onRendererFinished() |
62 | { |
63 | auto isAtEnd = [this](auto trackType) { |
64 | return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd(); |
65 | }; |
66 | |
67 | if (!isAtEnd(QPlatformMediaPlayer::VideoStream)) |
68 | return; |
69 | |
70 | if (!isAtEnd(QPlatformMediaPlayer::AudioStream)) |
71 | return; |
72 | |
73 | if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream()) |
74 | return; |
75 | |
76 | if (std::exchange(obj&: m_state, new_val: QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState) |
77 | return; |
78 | |
79 | finilizeTime(pos: duration()); |
80 | |
81 | forceUpdate(); |
82 | |
83 | qCDebug(qLcPlaybackEngine) << "Playback engine end of stream" ; |
84 | |
85 | emit endOfStream(); |
86 | } |
87 | |
88 | void PlaybackEngine::onRendererLoopChanged(quint64 id, qint64 offset, int loopIndex) |
89 | { |
90 | if (!hasRenderer(id)) |
91 | return; |
92 | |
93 | if (loopIndex > m_currentLoopOffset.index) { |
94 | m_currentLoopOffset = { .pos: offset, .index: loopIndex }; |
95 | emit loopChanged(); |
96 | } else if (loopIndex == m_currentLoopOffset.index && offset != m_currentLoopOffset.pos) { |
97 | qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset << "vs" |
98 | << m_currentLoopOffset.pos; |
99 | m_currentLoopOffset.pos = offset; |
100 | } |
101 | } |
102 | |
103 | void PlaybackEngine::onRendererSynchronized(quint64 id, std::chrono::steady_clock::time_point tp, |
104 | qint64 pos) |
105 | { |
106 | if (!hasRenderer(id)) |
107 | return; |
108 | |
109 | Q_ASSERT(m_renderers[QPlatformMediaPlayer::AudioStream] |
110 | && m_renderers[QPlatformMediaPlayer::AudioStream]->id() == id); |
111 | |
112 | m_timeController.sync(tp, pos); |
113 | |
114 | forEachExistingObject<Renderer>(action: [&](auto &renderer) { |
115 | if (id != renderer->id()) |
116 | renderer->syncSoft(tp, pos); |
117 | }); |
118 | } |
119 | |
120 | void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) { |
121 | if (!m_media.avContext()) |
122 | return; |
123 | |
124 | if (state == m_state) |
125 | return; |
126 | |
127 | const auto prevState = std::exchange(obj&: m_state, new_val&: state); |
128 | |
129 | if (m_state == QMediaPlayer::StoppedState) { |
130 | finalizeOutputs(); |
131 | finilizeTime(pos: 0); |
132 | } |
133 | |
134 | if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState) |
135 | recreateObjects(); |
136 | |
137 | if (prevState == QMediaPlayer::StoppedState) |
138 | triggerStepIfNeeded(); |
139 | |
140 | updateObjectsPausedState(); |
141 | } |
142 | |
143 | void PlaybackEngine::updateObjectsPausedState() |
144 | { |
145 | const auto paused = m_state != QMediaPlayer::PlayingState; |
146 | m_timeController.setPaused(paused); |
147 | |
148 | forEachExistingObject(action: [&](auto &object) { |
149 | bool objectPaused = false; |
150 | |
151 | if constexpr (std::is_same_v<decltype(*object), Renderer &>) |
152 | objectPaused = paused; |
153 | else if constexpr (shouldPauseStreams) { |
154 | auto streamPaused = [](bool p, auto &r) { |
155 | const auto needMoreFrames = r && r->stepInProgress(); |
156 | return p && !needMoreFrames; |
157 | }; |
158 | |
159 | if constexpr (std::is_same_v<decltype(*object), StreamDecoder &>) |
160 | objectPaused = streamPaused(paused, renderer(object->trackType())); |
161 | else |
162 | objectPaused = std::accumulate(m_renderers.begin(), m_renderers.end(), paused, |
163 | streamPaused); |
164 | } |
165 | |
166 | object->setPaused(objectPaused); |
167 | }); |
168 | } |
169 | |
170 | void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const |
171 | { |
172 | Q_ASSERT(engine); |
173 | if (!std::exchange(obj&: engine->m_threadsDirty, new_val: true)) |
174 | QMetaObject::invokeMethod(object: engine, function: &PlaybackEngine::deleteFreeThreads, type: Qt::QueuedConnection); |
175 | |
176 | object->kill(); |
177 | } |
178 | |
179 | void PlaybackEngine::registerObject(PlaybackEngineObject &object) |
180 | { |
181 | connect(sender: &object, signal: &PlaybackEngineObject::error, context: this, slot: &PlaybackEngine::errorOccured); |
182 | |
183 | auto threadName = objectThreadName(object); |
184 | auto &thread = m_threads[threadName]; |
185 | if (!thread) { |
186 | thread = std::make_unique<QThread>(); |
187 | thread->setObjectName(threadName); |
188 | thread->start(); |
189 | } |
190 | |
191 | Q_ASSERT(object.thread() != thread.get()); |
192 | object.moveToThread(thread: thread.get()); |
193 | } |
194 | |
195 | PlaybackEngine::RendererPtr |
196 | PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType) |
197 | { |
198 | switch (trackType) { |
199 | case QPlatformMediaPlayer::VideoStream: |
200 | return m_videoSink ? createPlaybackEngineObject<VideoRenderer>( |
201 | args&: m_timeController, args&: m_videoSink, args: m_media.transformation()) |
202 | : RendererPtr{ {}, {} }; |
203 | case QPlatformMediaPlayer::AudioStream: |
204 | return m_audioOutput || m_audioBufferOutput |
205 | ? createPlaybackEngineObject<AudioRenderer>(args&: m_timeController, args&: m_audioOutput, args&: m_audioBufferOutput) |
206 | : RendererPtr{ {}, {} }; |
207 | case QPlatformMediaPlayer::SubtitleStream: |
208 | return m_videoSink |
209 | ? createPlaybackEngineObject<SubtitleRenderer>(args&: m_timeController, args&: m_videoSink) |
210 | : RendererPtr{ {}, {} }; |
211 | default: |
212 | return { {}, {} }; |
213 | } |
214 | } |
215 | |
216 | template<typename C, typename Action> |
217 | void PlaybackEngine::forEachExistingObject(Action &&action) |
218 | { |
219 | auto handleNotNullObject = [&](auto &object) { |
220 | if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>) |
221 | if (object) |
222 | action(object); |
223 | }; |
224 | |
225 | handleNotNullObject(m_demuxer); |
226 | std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject); |
227 | std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject); |
228 | } |
229 | |
230 | template<typename Action> |
231 | void PlaybackEngine::forEachExistingObject(Action &&action) |
232 | { |
233 | forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action)); |
234 | } |
235 | |
236 | void PlaybackEngine::seek(qint64 pos) |
237 | { |
238 | pos = boundPosition(position: pos); |
239 | |
240 | m_timeController.setPaused(true); |
241 | m_timeController.sync(trackPos: m_currentLoopOffset.pos + pos); |
242 | |
243 | forceUpdate(); |
244 | } |
245 | |
246 | void PlaybackEngine::setLoops(int loops) |
247 | { |
248 | if (!isSeekable()) { |
249 | qWarning() << "Cannot set loops for non-seekable source" ; |
250 | return; |
251 | } |
252 | |
253 | if (std::exchange(obj&: m_loops, new_val&: loops) == loops) |
254 | return; |
255 | |
256 | qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops |
257 | << "index:" << m_currentLoopOffset.index; |
258 | |
259 | if (m_demuxer) |
260 | m_demuxer->setLoops(loops); |
261 | } |
262 | |
263 | void PlaybackEngine::triggerStepIfNeeded() |
264 | { |
265 | if (m_state != QMediaPlayer::PausedState) |
266 | return; |
267 | |
268 | if (m_renderers[QPlatformMediaPlayer::VideoStream]) |
269 | m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep(); |
270 | |
271 | // TODO: maybe trigger SubtitleStream. |
272 | // If trigger it, we have to make seeking for the current subtitle frame more stable. |
273 | // Or set some timeout for seeking. |
274 | } |
275 | |
276 | QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object) |
277 | { |
278 | QString result = object.metaObject()->className(); |
279 | if (auto stream = qobject_cast<const StreamDecoder *>(object: &object)) |
280 | result += QString::number(stream->trackType()); |
281 | |
282 | return result; |
283 | } |
284 | |
285 | void PlaybackEngine::setPlaybackRate(float rate) { |
286 | if (rate == playbackRate()) |
287 | return; |
288 | |
289 | m_timeController.setPlaybackRate(rate); |
290 | forEachExistingObject<Renderer>(action: [rate](auto &renderer) { renderer->setPlaybackRate(rate); }); |
291 | } |
292 | |
293 | float PlaybackEngine::playbackRate() const { |
294 | return m_timeController.playbackRate(); |
295 | } |
296 | |
297 | void PlaybackEngine::recreateObjects() |
298 | { |
299 | m_timeController.setPaused(true); |
300 | |
301 | forEachExistingObject(action: [](auto &object) { object.reset(); }); |
302 | |
303 | createObjectsIfNeeded(); |
304 | } |
305 | |
306 | void PlaybackEngine::createObjectsIfNeeded() |
307 | { |
308 | if (m_state == QMediaPlayer::StoppedState || !m_media.avContext()) |
309 | return; |
310 | |
311 | for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) |
312 | createStreamAndRenderer(trackType: static_cast<QPlatformMediaPlayer::TrackType>(i)); |
313 | |
314 | createDemuxer(); |
315 | } |
316 | |
317 | void PlaybackEngine::forceUpdate() |
318 | { |
319 | recreateObjects(); |
320 | triggerStepIfNeeded(); |
321 | updateObjectsPausedState(); |
322 | } |
323 | |
324 | void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType) |
325 | { |
326 | auto codec = codecForTrack(trackType); |
327 | |
328 | auto &renderer = m_renderers[trackType]; |
329 | |
330 | if (!codec) |
331 | return; |
332 | |
333 | if (!renderer) { |
334 | renderer = createRenderer(trackType); |
335 | |
336 | if (!renderer) |
337 | return; |
338 | |
339 | connect(sender: renderer.get(), signal: &Renderer::synchronized, context: this, |
340 | slot: &PlaybackEngine::onRendererSynchronized); |
341 | |
342 | connect(sender: renderer.get(), signal: &Renderer::loopChanged, context: this, |
343 | slot: &PlaybackEngine::onRendererLoopChanged); |
344 | |
345 | if constexpr (shouldPauseStreams) |
346 | connect(sender: renderer.get(), signal: &Renderer::forceStepDone, context: this, |
347 | slot: &PlaybackEngine::updateObjectsPausedState); |
348 | |
349 | connect(sender: renderer.get(), signal: &PlaybackEngineObject::atEnd, context: this, |
350 | slot: &PlaybackEngine::onRendererFinished); |
351 | } |
352 | |
353 | auto &stream = m_streams[trackType] = |
354 | createPlaybackEngineObject<StreamDecoder>(args&: *codec, args: renderer->seekPosition()); |
355 | |
356 | Q_ASSERT(trackType == stream->trackType()); |
357 | |
358 | connect(sender: stream.get(), signal: &StreamDecoder::requestHandleFrame, context: renderer.get(), slot: &Renderer::render); |
359 | connect(sender: stream.get(), signal: &PlaybackEngineObject::atEnd, context: renderer.get(), |
360 | slot: &Renderer::onFinalFrameReceived); |
361 | connect(sender: renderer.get(), signal: &Renderer::frameProcessed, context: stream.get(), |
362 | slot: &StreamDecoder::onFrameProcessed); |
363 | } |
364 | |
365 | std::optional<Codec> PlaybackEngine::codecForTrack(QPlatformMediaPlayer::TrackType trackType) |
366 | { |
367 | const auto streamIndex = m_media.currentStreamIndex(trackType); |
368 | if (streamIndex < 0) |
369 | return {}; |
370 | |
371 | auto &result = m_codecs[trackType]; |
372 | |
373 | if (!result) { |
374 | qCDebug(qLcPlaybackEngine) |
375 | << "Create codec for stream:" << streamIndex << "trackType:" << trackType; |
376 | auto maybeCodec = |
377 | Codec::create(stream: m_media.avContext()->streams[streamIndex], formatContext: m_media.avContext()); |
378 | |
379 | if (!maybeCodec) { |
380 | emit errorOccured(QMediaPlayer::FormatError, |
381 | "Cannot create codec," + maybeCodec.error()); |
382 | return {}; |
383 | } |
384 | |
385 | result = maybeCodec.value(); |
386 | } |
387 | |
388 | return result; |
389 | } |
390 | |
391 | bool PlaybackEngine::hasMediaStream() const |
392 | { |
393 | return m_renderers[QPlatformMediaPlayer::AudioStream] |
394 | || m_renderers[QPlatformMediaPlayer::VideoStream]; |
395 | } |
396 | |
397 | void PlaybackEngine::createDemuxer() |
398 | { |
399 | std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 }; |
400 | |
401 | bool hasStreams = false; |
402 | forEachExistingObject<StreamDecoder>(action: [&](auto &stream) { |
403 | hasStreams = true; |
404 | const auto trackType = stream->trackType(); |
405 | streamIndexes[trackType] = m_media.currentStreamIndex(trackType); |
406 | }); |
407 | |
408 | if (!hasStreams) |
409 | return; |
410 | |
411 | const PositionWithOffset positionWithOffset{ .pos: currentPosition(topPos: false), .offset: m_currentLoopOffset }; |
412 | |
413 | m_demuxer = createPlaybackEngineObject<Demuxer>(args: m_media.avContext(), args: positionWithOffset, |
414 | args&: streamIndexes, args&: m_loops); |
415 | |
416 | connect(sender: m_demuxer.get(), signal: &Demuxer::packetsBuffered, context: this, slot: &PlaybackEngine::buffered); |
417 | |
418 | forEachExistingObject<StreamDecoder>(action: [&](auto &stream) { |
419 | connect(m_demuxer.get(), Demuxer::signalByTrackType(trackType: stream->trackType()), stream.get(), |
420 | &StreamDecoder::decode); |
421 | connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(), |
422 | &StreamDecoder::onFinalPacketReceived); |
423 | connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(), |
424 | &Demuxer::onPacketProcessed); |
425 | }); |
426 | |
427 | if (!isSeekable() || duration() <= 0) { |
428 | // We need initial synchronization for such streams |
429 | forEachExistingObject(action: [&](auto &object) { |
430 | using Type = std::remove_reference_t<decltype(*object)>; |
431 | if constexpr (!std::is_same_v<Type, Demuxer>) |
432 | connect(m_demuxer.get(), &Demuxer::firstPacketFound, object.get(), |
433 | &Type::setInitialPosition); |
434 | }); |
435 | |
436 | auto updateTimeController = [this](TimeController::TimePoint tp, qint64 pos) { |
437 | m_timeController.sync(tp, pos); |
438 | }; |
439 | |
440 | connect(sender: m_demuxer.get(), signal: &Demuxer::firstPacketFound, context: this, slot&: updateTimeController); |
441 | } |
442 | } |
443 | |
444 | void PlaybackEngine::deleteFreeThreads() { |
445 | m_threadsDirty = false; |
446 | auto freeThreads = std::move(m_threads); |
447 | |
448 | forEachExistingObject(action: [&](auto &object) { |
449 | m_threads.insert(freeThreads.extract(objectThreadName(object: *object))); |
450 | }); |
451 | |
452 | for (auto &[name, thr] : freeThreads) |
453 | thr->quit(); |
454 | |
455 | for (auto &[name, thr] : freeThreads) |
456 | thr->wait(); |
457 | } |
458 | |
459 | void PlaybackEngine::setMedia(MediaDataHolder media) |
460 | { |
461 | Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media |
462 | Q_ASSERT(m_state == QMediaPlayer::StoppedState); |
463 | Q_ASSERT(m_threads.empty()); |
464 | |
465 | m_media = std::move(media); |
466 | updateVideoSinkSize(); |
467 | } |
468 | |
469 | void PlaybackEngine::setVideoSink(QVideoSink *sink) |
470 | { |
471 | auto prev = std::exchange(obj&: m_videoSink, new_val&: sink); |
472 | if (prev == sink) |
473 | return; |
474 | |
475 | updateVideoSinkSize(prevSink: prev); |
476 | updateActiveVideoOutput(sink); |
477 | |
478 | if (!sink || !prev) { |
479 | // might need some improvements |
480 | forceUpdate(); |
481 | } |
482 | } |
483 | |
484 | void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) { |
485 | setAudioSink(output ? output->q : nullptr); |
486 | } |
487 | |
488 | void PlaybackEngine::setAudioSink(QAudioOutput *output) |
489 | { |
490 | QAudioOutput *prev = std::exchange(obj&: m_audioOutput, new_val&: output); |
491 | if (prev == output) |
492 | return; |
493 | |
494 | updateActiveAudioOutput(output); |
495 | |
496 | if (!output || !prev) { |
497 | // might need some improvements |
498 | forceUpdate(); |
499 | } |
500 | } |
501 | |
502 | void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output) |
503 | { |
504 | QAudioBufferOutput *prev = std::exchange(obj&: m_audioBufferOutput, new_val&: output); |
505 | if (prev == output) |
506 | return; |
507 | updateActiveAudioOutput(output); |
508 | } |
509 | |
510 | qint64 PlaybackEngine::currentPosition(bool topPos) const { |
511 | std::optional<qint64> pos; |
512 | |
513 | for (size_t i = 0; i < m_renderers.size(); ++i) { |
514 | const auto &renderer = m_renderers[i]; |
515 | if (!renderer) |
516 | continue; |
517 | |
518 | // skip subtitle stream for finding lower rendering position |
519 | if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream()) |
520 | continue; |
521 | |
522 | const auto rendererPos = renderer->lastPosition(); |
523 | pos = !pos ? rendererPos |
524 | : topPos ? std::max(a: *pos, b: rendererPos) |
525 | : std::min(a: *pos, b: rendererPos); |
526 | } |
527 | |
528 | if (!pos) |
529 | pos = m_timeController.currentPosition(); |
530 | |
531 | return boundPosition(position: *pos - m_currentLoopOffset.pos); |
532 | } |
533 | |
534 | qint64 PlaybackEngine::duration() const |
535 | { |
536 | return m_media.duration(); |
537 | } |
538 | |
539 | bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); } |
540 | |
541 | const QList<MediaDataHolder::StreamInfo> & |
542 | PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const |
543 | { |
544 | return m_media.streamInfo(trackType); |
545 | } |
546 | |
547 | const QMediaMetaData &PlaybackEngine::metaData() const |
548 | { |
549 | return m_media.metaData(); |
550 | } |
551 | |
552 | int PlaybackEngine::activeTrack(QPlatformMediaPlayer::TrackType type) const |
553 | { |
554 | return m_media.activeTrack(type); |
555 | } |
556 | |
557 | void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber) |
558 | { |
559 | if (!m_media.setActiveTrack(type: trackType, streamNumber)) |
560 | return; |
561 | |
562 | m_codecs[trackType] = {}; |
563 | |
564 | m_renderers[trackType].reset(); |
565 | m_streams = defaultObjectsArray<decltype(m_streams)>(); |
566 | m_demuxer.reset(); |
567 | |
568 | updateVideoSinkSize(); |
569 | createObjectsIfNeeded(); |
570 | updateObjectsPausedState(); |
571 | } |
572 | |
573 | void PlaybackEngine::finilizeTime(qint64 pos) |
574 | { |
575 | Q_ASSERT(pos >= 0 && pos <= duration()); |
576 | |
577 | m_timeController.setPaused(true); |
578 | m_timeController.sync(trackPos: pos); |
579 | m_currentLoopOffset = {}; |
580 | } |
581 | |
582 | void PlaybackEngine::finalizeOutputs() |
583 | { |
584 | if (m_audioBufferOutput) |
585 | updateActiveAudioOutput(output: static_cast<QAudioBufferOutput *>(nullptr)); |
586 | if (m_audioOutput) |
587 | updateActiveAudioOutput(output: static_cast<QAudioOutput *>(nullptr)); |
588 | updateActiveVideoOutput(sink: nullptr, cleanOutput: true); |
589 | } |
590 | |
591 | bool PlaybackEngine::hasRenderer(quint64 id) const |
592 | { |
593 | return std::any_of(first: m_renderers.begin(), last: m_renderers.end(), |
594 | pred: [id](auto &renderer) { return renderer && renderer->id() == id; }); |
595 | } |
596 | |
597 | template <typename AudioOutput> |
598 | void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output) |
599 | { |
600 | if (auto renderer = |
601 | qobject_cast<AudioRenderer *>(object: m_renderers[QPlatformMediaPlayer::AudioStream].get())) |
602 | renderer->setOutput(output); |
603 | } |
604 | |
605 | void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput) |
606 | { |
607 | if (auto renderer = qobject_cast<SubtitleRenderer *>( |
608 | object: m_renderers[QPlatformMediaPlayer::SubtitleStream].get())) |
609 | renderer->setOutput(sink, cleanPrevSink: cleanOutput); |
610 | if (auto renderer = |
611 | qobject_cast<VideoRenderer *>(object: m_renderers[QPlatformMediaPlayer::VideoStream].get())) |
612 | renderer->setOutput(sink, cleanPrevSink: cleanOutput); |
613 | } |
614 | |
615 | void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink) |
616 | { |
617 | auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr; |
618 | if (!platformVideoSink) |
619 | return; |
620 | |
621 | if (prevSink && prevSink->platformVideoSink()) |
622 | platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize()); |
623 | else { |
624 | const auto streamIndex = m_media.currentStreamIndex(trackType: QPlatformMediaPlayer::VideoStream); |
625 | if (streamIndex >= 0) { |
626 | const auto context = m_media.avContext(); |
627 | const auto stream = context->streams[streamIndex]; |
628 | const AVRational pixelAspectRatio = |
629 | av_guess_sample_aspect_ratio(format: context, stream, frame: nullptr); |
630 | // auto size = metaData().value(QMediaMetaData::Resolution) |
631 | const QSize size = |
632 | qCalculateFrameSize(resolution: { stream->codecpar->width, stream->codecpar->height }, |
633 | pixelAspectRatio: { .numerator: pixelAspectRatio.num, .denominator: pixelAspectRatio.den }); |
634 | |
635 | platformVideoSink->setNativeSize( |
636 | qRotatedFrameSize(size, rotation: m_media.transformation().rotation)); |
637 | } |
638 | } |
639 | } |
640 | |
641 | qint64 PlaybackEngine::boundPosition(qint64 position) const |
642 | { |
643 | position = qMax(a: position, b: 0); |
644 | return duration() > 0 ? qMin(a: position, b: duration()) : position; |
645 | } |
646 | } |
647 | |
648 | QT_END_NAMESPACE |
649 | |
650 | #include "moc_qffmpegplaybackengine_p.cpp" |
651 | |