1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3#include "qffmpegplaybackengine_p.h"
4
5#include "qvideosink.h"
6#include "qaudiooutput.h"
7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
10#include "qiodevice.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
16
17#include <qloggingcategory.h>
18
19QT_BEGIN_NAMESPACE
20
21namespace QFFmpeg {
22
23static Q_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
24
25// The helper is needed since on some compilers std::unique_ptr
26// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
27template <typename Array>
28inline static Array defaultObjectsArray()
29{
30 using T = typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
32}
33
34// TODO: investigate what's better: profile and try network case
35// Most likely, shouldPauseStreams = false is better because of:
36// - packet and frame buffers are not big, the saturration of the is pretty fast.
37// - after any pause a user has some preloaded buffers, so the playback is
38// supposed to be more stable in cases with a weak processor or bad internet.
39// - the code is simplier, usage is more convenient.
40//
41static constexpr bool shouldPauseStreams = false;
42
43PlaybackEngine::PlaybackEngine()
44 : m_demuxer({}, {}),
45 m_streams(defaultObjectsArray<decltype(m_streams)>()),
46 m_renderers(defaultObjectsArray<decltype(m_renderers)>())
47{
48 qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine";
49 qRegisterMetaType<QFFmpeg::Packet>();
50 qRegisterMetaType<QFFmpeg::Frame>();
51 qRegisterMetaType<QFFmpeg::TrackPosition>();
52 qRegisterMetaType<QFFmpeg::TrackDuration>();
53}
54
55PlaybackEngine::~PlaybackEngine() {
56 qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine";
57
58 finalizeOutputs();
59 forEachExistingObject(action: [](auto &object) { object.reset(); });
60 deleteFreeThreads();
61}
62
63void PlaybackEngine::onRendererFinished()
64{
65 auto isAtEnd = [this](auto trackType) {
66 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
67 };
68
69 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
70 return;
71
72 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
73 return;
74
75 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
76 return;
77
78 if (std::exchange(obj&: m_state, new_val: QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
79 return;
80
81 finilizeTime(pos: duration().asTimePoint());
82
83 forceUpdate();
84
85 qCDebug(qLcPlaybackEngine) << "Playback engine end of stream";
86
87 emit endOfStream();
88}
89
90void PlaybackEngine::onRendererLoopChanged(quint64 id, TrackPosition offset, int loopIndex)
91{
92 if (!hasRenderer(id))
93 return;
94
95 if (loopIndex > m_currentLoopOffset.loopIndex) {
96 m_currentLoopOffset = { .loopStartTimeUs: offset, .loopIndex: loopIndex };
97 emit loopChanged();
98 } else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
99 qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset.get() << "vs"
100 << m_currentLoopOffset.loopStartTimeUs.get();
101 m_currentLoopOffset.loopStartTimeUs = offset;
102 }
103}
104
105void PlaybackEngine::onFirsPacketFound(quint64 id, TrackPosition absSeekPos)
106{
107 if (!m_demuxer || m_demuxer->id() != id)
108 return;
109
110 if (m_shouldUpdateTimeOnFirstPacket) {
111 const auto timePoint = RealClock::now();
112 const RealClock::time_point expectedTimePoint =
113 m_timeController.timeFromPosition(pos: absSeekPos);
114 const auto delay = std::chrono::duration_cast<std::chrono::microseconds>(
115 d: timePoint - expectedTimePoint);
116 qCDebug(qLcPlaybackEngine) << "Delay of demuxer initialization:" << delay;
117 m_timeController.sync(tp: timePoint, pos: absSeekPos);
118
119 m_shouldUpdateTimeOnFirstPacket = false; // turn the flag back to ensure the consistency.
120 }
121
122 forEachExistingObject<Renderer>(action: [&](auto &renderer) { renderer->start(m_timeController); });
123}
124
125void PlaybackEngine::onRendererSynchronized(quint64 id, RealClock::time_point tp, TrackPosition pos)
126{
127 if (!hasRenderer(id))
128 return;
129
130 Q_ASSERT(m_renderers[QPlatformMediaPlayer::AudioStream]
131 && m_renderers[QPlatformMediaPlayer::AudioStream]->id() == id);
132
133 m_timeController.sync(tp, pos);
134
135 forEachExistingObject<Renderer>(action: [&](auto &renderer) {
136 if (id != renderer->id())
137 renderer->syncSoft(tp, pos);
138 });
139}
140
141void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) {
142 if (!m_media.avContext())
143 return;
144
145 if (state == m_state)
146 return;
147
148 const auto prevState = std::exchange(obj&: m_state, new_val&: state);
149
150 if (m_state == QMediaPlayer::StoppedState) {
151 finalizeOutputs();
152 finilizeTime(pos: TrackPosition(0));
153 }
154
155 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
156 recreateObjects();
157
158 if (prevState == QMediaPlayer::StoppedState)
159 triggerStepIfNeeded();
160
161 updateObjectsPausedState();
162}
163
164void PlaybackEngine::updateObjectsPausedState()
165{
166 const auto paused = m_state != QMediaPlayer::PlayingState;
167 m_timeController.setPaused(paused);
168
169 forEachExistingObject(action: [&](auto &object) {
170 bool objectPaused = false;
171
172 if constexpr (std::is_same_v<decltype(*object), Renderer &>)
173 objectPaused = paused;
174 else if constexpr (shouldPauseStreams) {
175 auto streamPaused = [](bool p, auto &r) {
176 const auto needMoreFrames = r && r->stepInProgress();
177 return p && !needMoreFrames;
178 };
179
180 if constexpr (std::is_same_v<decltype(*object), StreamDecoder &>)
181 objectPaused = streamPaused(paused, renderer(object->trackType()));
182 else
183 objectPaused = std::accumulate(m_renderers.begin(), m_renderers.end(), paused,
184 streamPaused);
185 }
186
187 object->setPaused(objectPaused);
188 });
189}
190
191void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const
192{
193 Q_ASSERT(engine);
194 if (!std::exchange(obj&: engine->m_threadsDirty, new_val: true))
195 QMetaObject::invokeMethod(object: engine, function: &PlaybackEngine::deleteFreeThreads, type: Qt::QueuedConnection);
196
197 object->kill();
198}
199
200void PlaybackEngine::registerObject(PlaybackEngineObject &object)
201{
202 connect(sender: &object, signal: &PlaybackEngineObject::error, context: this, slot: &PlaybackEngine::errorOccured);
203
204 auto threadName = objectThreadName(object);
205 auto &thread = m_threads[threadName];
206 if (!thread) {
207 thread = std::make_unique<QThread>();
208 thread->setObjectName(threadName);
209 thread->start();
210 }
211
212 Q_ASSERT(object.thread() != thread.get());
213 object.moveToThread(thread: thread.get());
214}
215
216PlaybackEngine::RendererPtr
217PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
218{
219 switch (trackType) {
220 case QPlatformMediaPlayer::VideoStream:
221 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
222 args&: m_timeController, args&: m_videoSink, args: m_media.transformation())
223 : RendererPtr{ {}, {} };
224 case QPlatformMediaPlayer::AudioStream:
225 return m_audioOutput || m_audioBufferOutput
226 ? createPlaybackEngineObject<AudioRenderer>(args&: m_timeController, args&: m_audioOutput, args&: m_audioBufferOutput)
227 : RendererPtr{ {}, {} };
228 case QPlatformMediaPlayer::SubtitleStream:
229 return m_videoSink
230 ? createPlaybackEngineObject<SubtitleRenderer>(args&: m_timeController, args&: m_videoSink)
231 : RendererPtr{ {}, {} };
232 default:
233 return { {}, {} };
234 }
235}
236
237template<typename C, typename Action>
238void PlaybackEngine::forEachExistingObject(Action &&action)
239{
240 auto handleNotNullObject = [&](auto &object) {
241 if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>)
242 if (object)
243 action(object);
244 };
245
246 handleNotNullObject(m_demuxer);
247 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
248 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
249}
250
251template<typename Action>
252void PlaybackEngine::forEachExistingObject(Action &&action)
253{
254 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
255}
256
257void PlaybackEngine::seek(TrackPosition pos)
258{
259 pos = boundPosition(position: pos);
260
261 m_timeController.setPaused(true);
262 m_timeController.sync(trackPos: m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
263 m_seekPending = true;
264
265 forceUpdate();
266}
267
268void PlaybackEngine::setLoops(int loops)
269{
270 if (!isSeekable()) {
271 qWarning() << "Cannot set loops for non-seekable source";
272 return;
273 }
274
275 if (std::exchange(obj&: m_loops, new_val&: loops) == loops)
276 return;
277
278 qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops
279 << "index:" << m_currentLoopOffset.loopIndex;
280
281 if (m_demuxer)
282 m_demuxer->setLoops(loops);
283}
284
285void PlaybackEngine::triggerStepIfNeeded()
286{
287 if (m_state != QMediaPlayer::PausedState)
288 return;
289
290 if (m_renderers[QPlatformMediaPlayer::VideoStream])
291 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
292
293 // TODO: maybe trigger SubtitleStream.
294 // If trigger it, we have to make seeking for the current subtitle frame more stable.
295 // Or set some timeout for seeking.
296}
297
298QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object)
299{
300 QString result = QString::fromLatin1(ba: object.metaObject()->className());
301 if (auto stream = qobject_cast<const StreamDecoder *>(object: &object))
302 result += QString::number(stream->trackType());
303
304 return result;
305}
306
307void PlaybackEngine::setPlaybackRate(float rate) {
308 if (rate == playbackRate())
309 return;
310
311 m_timeController.setPlaybackRate(rate);
312 forEachExistingObject<Renderer>(action: [rate](auto &renderer) { renderer->setPlaybackRate(rate); });
313}
314
315float PlaybackEngine::playbackRate() const {
316 return m_timeController.playbackRate();
317}
318
319void PlaybackEngine::recreateObjects()
320{
321 m_timeController.setPaused(true);
322
323 forEachExistingObject(action: [](auto &object) { object.reset(); });
324
325 createObjectsIfNeeded();
326}
327
328void PlaybackEngine::createObjectsIfNeeded()
329{
330 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
331 return;
332
333 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
334 createStreamAndRenderer(trackType: static_cast<QPlatformMediaPlayer::TrackType>(i));
335
336 createDemuxer();
337}
338
339void PlaybackEngine::forceUpdate()
340{
341 recreateObjects();
342 triggerStepIfNeeded();
343 updateObjectsPausedState();
344}
345
346void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
347{
348 auto codecContext = codecContextForTrack(trackType);
349
350 auto &renderer = m_renderers[trackType];
351
352 if (!codecContext)
353 return;
354
355 if (!renderer) {
356 renderer = createRenderer(trackType);
357
358 if (!renderer)
359 return;
360
361 connect(sender: renderer.get(), signal: &Renderer::synchronized, context: this,
362 slot: &PlaybackEngine::onRendererSynchronized);
363
364 connect(sender: renderer.get(), signal: &Renderer::loopChanged, context: this,
365 slot: &PlaybackEngine::onRendererLoopChanged);
366
367 if constexpr (shouldPauseStreams)
368 connect(sender: renderer.get(), signal: &Renderer::forceStepDone, context: this,
369 slot: &PlaybackEngine::updateObjectsPausedState);
370
371 connect(sender: renderer.get(), signal: &PlaybackEngineObject::atEnd, context: this,
372 slot: &PlaybackEngine::onRendererFinished);
373 }
374
375 auto &stream = m_streams[trackType] =
376 createPlaybackEngineObject<StreamDecoder>(args&: *codecContext, args: renderer->seekPosition());
377
378 Q_ASSERT(trackType == stream->trackType());
379
380 connect(sender: stream.get(), signal: &StreamDecoder::requestHandleFrame, context: renderer.get(), slot: &Renderer::render);
381 connect(sender: stream.get(), signal: &PlaybackEngineObject::atEnd, context: renderer.get(),
382 slot: &Renderer::onFinalFrameReceived);
383 connect(sender: renderer.get(), signal: &Renderer::frameProcessed, context: stream.get(),
384 slot: &StreamDecoder::onFrameProcessed);
385}
386
387std::optional<CodecContext> PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
388{
389 const auto streamIndex = m_media.currentStreamIndex(trackType);
390 if (streamIndex < 0)
391 return {};
392
393 auto &codecContext = m_codecContexts[trackType];
394
395 if (!codecContext) {
396 qCDebug(qLcPlaybackEngine)
397 << "Create codec for stream:" << streamIndex << "trackType:" << trackType;
398 auto maybeCodecContext = CodecContext::create(stream: m_media.avContext()->streams[streamIndex],
399 formatContext: m_media.avContext());
400
401 if (!maybeCodecContext) {
402 emit errorOccured(QMediaPlayer::FormatError,
403 u"Cannot create codec," + maybeCodecContext.error());
404 return {};
405 }
406
407 codecContext = maybeCodecContext.value();
408 }
409
410 return codecContext;
411}
412
413bool PlaybackEngine::hasMediaStream() const
414{
415 return m_renderers[QPlatformMediaPlayer::AudioStream]
416 || m_renderers[QPlatformMediaPlayer::VideoStream];
417}
418
419void PlaybackEngine::createDemuxer()
420{
421 std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
422
423 bool hasStreams = false;
424 forEachExistingObject<StreamDecoder>(action: [&](auto &stream) {
425 hasStreams = true;
426 const auto trackType = stream->trackType();
427 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
428 });
429
430 if (!hasStreams)
431 return;
432
433 const TrackPosition currentLoopPosUs = currentPosition(topPos: false);
434
435 m_demuxer = createPlaybackEngineObject<Demuxer>(args: m_media.avContext(), args: currentLoopPosUs,
436 args&: m_seekPending, args&: m_currentLoopOffset,
437 args&: streamIndexes, args&: m_loops);
438
439 m_seekPending = false;
440
441 connect(sender: m_demuxer.get(), signal: &Demuxer::packetsBuffered, context: this, slot: &PlaybackEngine::buffered);
442
443 forEachExistingObject<StreamDecoder>(action: [&](auto &stream) {
444 connect(m_demuxer.get(), Demuxer::signalByTrackType(trackType: stream->trackType()), stream.get(),
445 &StreamDecoder::decode);
446 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
447 &StreamDecoder::onFinalPacketReceived);
448 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
449 &Demuxer::onPacketProcessed);
450 });
451
452 m_shouldUpdateTimeOnFirstPacket = true;
453 connect(sender: m_demuxer.get(), signal: &Demuxer::firstPacketFound, context: this, slot: &PlaybackEngine::onFirsPacketFound);
454}
455
456void PlaybackEngine::deleteFreeThreads() {
457 m_threadsDirty = false;
458 auto freeThreads = std::move(m_threads);
459
460 forEachExistingObject(action: [&](auto &object) {
461 m_threads.insert(freeThreads.extract(objectThreadName(object: *object)));
462 });
463
464 for (auto &[name, thr] : freeThreads)
465 thr->quit();
466
467 for (auto &[name, thr] : freeThreads)
468 thr->wait();
469}
470
471void PlaybackEngine::setMedia(MediaDataHolder media)
472{
473 Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media
474 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
475 Q_ASSERT(m_threads.empty());
476
477 m_media = std::move(media);
478 updateVideoSinkSize();
479}
480
481void PlaybackEngine::setVideoSink(QVideoSink *sink)
482{
483 auto prev = std::exchange(obj&: m_videoSink, new_val&: sink);
484 if (prev == sink)
485 return;
486
487 updateVideoSinkSize(prevSink: prev);
488 updateActiveVideoOutput(sink);
489
490 if (!sink || !prev) {
491 // might need some improvements
492 forceUpdate();
493 }
494}
495
496void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
497 setAudioSink(output ? output->q : nullptr);
498}
499
500void PlaybackEngine::setAudioSink(QAudioOutput *output)
501{
502 QAudioOutput *prev = std::exchange(obj&: m_audioOutput, new_val&: output);
503 if (prev == output)
504 return;
505
506 updateActiveAudioOutput(output);
507
508 if (!output || !prev) {
509 // might need some improvements
510 forceUpdate();
511 }
512}
513
514void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
515{
516 QAudioBufferOutput *prev = std::exchange(obj&: m_audioBufferOutput, new_val&: output);
517 if (prev == output)
518 return;
519 updateActiveAudioOutput(output);
520}
521
522TrackPosition PlaybackEngine::currentPosition(bool topPos) const
523{
524 std::optional<TrackPosition> pos;
525
526 for (size_t i = 0; i < m_renderers.size(); ++i) {
527 const auto &renderer = m_renderers[i];
528 if (!renderer)
529 continue;
530
531 // skip subtitle stream for finding lower rendering position
532 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
533 continue;
534
535 const auto rendererPos = renderer->lastPosition();
536 pos = !pos ? rendererPos
537 : topPos ? std::max(a: *pos, b: rendererPos)
538 : std::min(a: *pos, b: rendererPos);
539 }
540
541 if (!pos)
542 pos = m_timeController.currentPosition();
543
544 return boundPosition(position: *pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
545}
546
547TrackDuration PlaybackEngine::duration() const
548{
549 return m_media.duration();
550}
551
552bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); }
553
554const QList<MediaDataHolder::StreamInfo> &
555PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const
556{
557 return m_media.streamInfo(trackType);
558}
559
560const QMediaMetaData &PlaybackEngine::metaData() const
561{
562 return m_media.metaData();
563}
564
565int PlaybackEngine::activeTrack(QPlatformMediaPlayer::TrackType type) const
566{
567 return m_media.activeTrack(type);
568}
569
570void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber)
571{
572 if (!m_media.setActiveTrack(type: trackType, streamNumber))
573 return;
574
575 m_codecContexts[trackType] = {};
576
577 m_renderers[trackType].reset();
578 m_streams = defaultObjectsArray<decltype(m_streams)>();
579 m_demuxer.reset();
580
581 updateVideoSinkSize();
582 createObjectsIfNeeded();
583 updateObjectsPausedState();
584
585 // We strive to have a smooth playback if we change the active track. It means that
586 // we don't want to do any time shiftings. Instead, we rely on the fact that
587 // buffers in renderers are not empty to compensate the demuxer's lag.
588 m_shouldUpdateTimeOnFirstPacket = false;
589}
590
591void PlaybackEngine::finilizeTime(TrackPosition pos)
592{
593 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
594
595 m_timeController.setPaused(true);
596 m_timeController.sync(trackPos: pos);
597 m_currentLoopOffset = {};
598}
599
600void PlaybackEngine::finalizeOutputs()
601{
602 if (m_audioBufferOutput)
603 updateActiveAudioOutput(output: static_cast<QAudioBufferOutput *>(nullptr));
604 if (m_audioOutput)
605 updateActiveAudioOutput(output: static_cast<QAudioOutput *>(nullptr));
606 updateActiveVideoOutput(sink: nullptr, cleanOutput: true);
607}
608
609bool PlaybackEngine::hasRenderer(quint64 id) const
610{
611 return std::any_of(first: m_renderers.begin(), last: m_renderers.end(),
612 pred: [id](auto &renderer) { return renderer && renderer->id() == id; });
613}
614
615template <typename AudioOutput>
616void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
617{
618 if (auto renderer =
619 qobject_cast<AudioRenderer *>(object: m_renderers[QPlatformMediaPlayer::AudioStream].get()))
620 renderer->setOutput(output);
621}
622
623void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput)
624{
625 if (auto renderer = qobject_cast<SubtitleRenderer *>(
626 object: m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
627 renderer->setOutput(sink, cleanPrevSink: cleanOutput);
628 if (auto renderer =
629 qobject_cast<VideoRenderer *>(object: m_renderers[QPlatformMediaPlayer::VideoStream].get()))
630 renderer->setOutput(sink, cleanPrevSink: cleanOutput);
631}
632
633void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink)
634{
635 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr;
636 if (!platformVideoSink)
637 return;
638
639 if (prevSink && prevSink->platformVideoSink())
640 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
641 else {
642 const auto streamIndex = m_media.currentStreamIndex(trackType: QPlatformMediaPlayer::VideoStream);
643 if (streamIndex >= 0) {
644 const auto context = m_media.avContext();
645 const auto stream = context->streams[streamIndex];
646 const AVRational pixelAspectRatio =
647 av_guess_sample_aspect_ratio(format: context, stream, frame: nullptr);
648 // auto size = metaData().value(QMediaMetaData::Resolution)
649 const QSize size =
650 qCalculateFrameSize(resolution: { stream->codecpar->width, stream->codecpar->height },
651 pixelAspectRatio: { .numerator: pixelAspectRatio.num, .denominator: pixelAspectRatio.den });
652
653 platformVideoSink->setNativeSize(
654 qRotatedFrameSize(size, rotation: m_media.transformation().rotation));
655 }
656 }
657}
658
659TrackPosition PlaybackEngine::boundPosition(TrackPosition position) const
660{
661 position = qMax(a: position, b: TrackPosition(0));
662 return duration() > TrackDuration(0) ? qMin(a: position, b: duration().asTimePoint()) : position;
663}
664} // namespace QFFmpeg
665
666QT_END_NAMESPACE
667
668#include "moc_qffmpegplaybackengine_p.cpp"
669

source code of qtmultimedia/src/plugins/multimedia/ffmpeg/qffmpegplaybackengine.cpp