1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "playbackengine/qffmpegmediadataholder_p.h" |
5 | |
6 | #include "qffmpegmediametadata_p.h" |
7 | #include "qffmpegmediaformatinfo_p.h" |
8 | #include "qffmpegioutils_p.h" |
9 | #include "qiodevice.h" |
10 | #include "qdatetime.h" |
11 | #include "qloggingcategory.h" |
12 | |
13 | #include <math.h> |
14 | #include <optional> |
15 | |
16 | extern "C" { |
17 | #include "libavutil/display.h" |
18 | } |
19 | |
20 | QT_BEGIN_NAMESPACE |
21 | |
22 | static Q_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder" ) |
23 | |
24 | namespace QFFmpeg { |
25 | |
26 | static std::optional<qint64> streamDuration(const AVStream &stream) |
27 | { |
28 | const auto &factor = stream.time_base; |
29 | |
30 | if (stream.duration > 0 && factor.num > 0 && factor.den > 0) { |
31 | return qint64(1000000) * stream.duration * factor.num / factor.den; |
32 | } |
33 | |
34 | // In some cases ffmpeg reports negative duration that is definitely invalid. |
35 | // However, the correct duration may be read from the metadata. |
36 | |
37 | if (stream.duration < 0) { |
38 | qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration |
39 | << "is invalid. Taking it from the metadata" ; |
40 | } |
41 | |
42 | if (const auto duration = av_dict_get(m: stream.metadata, key: "DURATION" , prev: nullptr, flags: 0)) { |
43 | const auto time = QTime::fromString(string: QString::fromUtf8(utf8: duration->value)); |
44 | return qint64(1000) * time.msecsSinceStartOfDay(); |
45 | } |
46 | |
47 | return {}; |
48 | } |
49 | |
50 | static QTransform displayMatrixToTransform(const int32_t *displayMatrix) |
51 | { |
52 | // displayMatrix is stored as |
53 | // |
54 | // . -- X axis |
55 | // | |
56 | // | | a b u | |
57 | // Y | c d v | |
58 | // axis | x y w | |
59 | // |
60 | // where a, b, c, d, x, y are 16.16 fixed-point values, |
61 | // and u, v, w are 30.2 point values. |
62 | // Only a, b, c, d impacts on mirroring and rotation, |
63 | // so it's enough to propagate them to QTransform. |
64 | // |
65 | // If we were interested in getting proper XY scales, |
66 | // we would divide a,b,c,d by 2^16. The whole scale doesn't |
67 | // impact mirroring and rotation, so we don't do so. |
68 | |
69 | auto toRotateMirrorValue = [displayMatrix](int index) { |
70 | // toRotateScaleValue would be: |
71 | // return displayMatrix[index] / qreal(1 << 16); |
72 | return displayMatrix[index]; |
73 | }; |
74 | |
75 | return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1), |
76 | toRotateMirrorValue(3), toRotateMirrorValue(4), |
77 | 0, 0); |
78 | } |
79 | |
80 | static VideoTransformation streamTransformation(const AVStream *stream) |
81 | { |
82 | Q_ASSERT(stream); |
83 | |
84 | using SideDataSize = decltype(AVPacketSideData::size); |
85 | constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9; |
86 | const AVPacketSideData *sideData = streamSideData(stream, type: AV_PKT_DATA_DISPLAYMATRIX); |
87 | if (!sideData || sideData->size < displayMatrixSize) |
88 | return {}; |
89 | |
90 | const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data); |
91 | const QTransform transform = displayMatrixToTransform(displayMatrix); |
92 | const VideoTransformationOpt result = qVideoTransformationFromMatrix(matrix: transform); |
93 | if (!result) { |
94 | qCWarning(qLcMediaDataHolder) |
95 | << "Video stream contains malformed display matrix" << transform; |
96 | return {}; |
97 | } |
98 | return *result; |
99 | } |
100 | |
101 | static bool colorTransferSupportsHdr(const AVStream *stream) |
102 | { |
103 | if (!stream) |
104 | return false; |
105 | |
106 | const AVCodecParameters *codecPar = stream->codecpar; |
107 | if (!codecPar) |
108 | return false; |
109 | |
110 | const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(colorTrc: codecPar->color_trc); |
111 | |
112 | // Assume that content is using HDR if the color transfer supports high |
113 | // dynamic range. The video may still not utilize the extended range, |
114 | // but we can't determine the actual range without decoding frames. |
115 | return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084 |
116 | || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67; |
117 | } |
118 | |
119 | VideoTransformation MediaDataHolder::transformation() const |
120 | { |
121 | // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation: |
122 | // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt(); |
123 | // return static_cast<QtVideo::Rotation>(orientation); |
124 | |
125 | const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream]; |
126 | if (streamIndex < 0) |
127 | return {}; |
128 | |
129 | return streamTransformation(stream: m_context->streams[streamIndex]); |
130 | } |
131 | |
132 | AVFormatContext *MediaDataHolder::avContext() |
133 | { |
134 | return m_context.get(); |
135 | } |
136 | |
137 | int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const |
138 | { |
139 | return m_currentAVStreamIndex[trackType]; |
140 | } |
141 | |
142 | static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, |
143 | const AVStream *stream) |
144 | { |
145 | Q_ASSERT(stream); |
146 | const auto *codecPar = stream->codecpar; |
147 | |
148 | switch (trackType) { |
149 | case QPlatformMediaPlayer::VideoStream: |
150 | metaData.insert(k: QMediaMetaData::VideoBitRate, value: (int)codecPar->bit_rate); |
151 | metaData.insert(k: QMediaMetaData::VideoCodec, |
152 | value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::videoCodecForAVCodecId( |
153 | id: codecPar->codec_id))); |
154 | metaData.insert(k: QMediaMetaData::Resolution, value: QSize(codecPar->width, codecPar->height)); |
155 | metaData.insert(k: QMediaMetaData::VideoFrameRate, |
156 | value: qreal(stream->avg_frame_rate.num) / qreal(stream->avg_frame_rate.den)); |
157 | metaData.insert(k: QMediaMetaData::Orientation, |
158 | value: QVariant::fromValue(value: streamTransformation(stream).rotation)); |
159 | metaData.insert(k: QMediaMetaData::HasHdrContent, value: colorTransferSupportsHdr(stream)); |
160 | break; |
161 | case QPlatformMediaPlayer::AudioStream: |
162 | metaData.insert(k: QMediaMetaData::AudioBitRate, value: (int)codecPar->bit_rate); |
163 | metaData.insert(k: QMediaMetaData::AudioCodec, |
164 | value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::audioCodecForAVCodecId( |
165 | id: codecPar->codec_id))); |
166 | break; |
167 | default: |
168 | break; |
169 | } |
170 | }; |
171 | |
172 | QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType) |
173 | { |
174 | switch (mediaType) { |
175 | case AVMEDIA_TYPE_AUDIO: |
176 | return QPlatformMediaPlayer::AudioStream; |
177 | case AVMEDIA_TYPE_VIDEO: |
178 | return QPlatformMediaPlayer::VideoStream; |
179 | case AVMEDIA_TYPE_SUBTITLE: |
180 | return QPlatformMediaPlayer::SubtitleStream; |
181 | default: |
182 | return QPlatformMediaPlayer::NTrackTypes; |
183 | } |
184 | } |
185 | |
186 | namespace { |
187 | QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError> |
188 | loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancelToken> &cancelToken) |
189 | { |
190 | const QByteArray url = mediaUrl.toString(options: QUrl::PreferLocalFile).toUtf8(); |
191 | |
192 | AVFormatContextUPtr context{ avformat_alloc_context() }; |
193 | |
194 | if (stream) { |
195 | if (!stream->isOpen()) { |
196 | if (!stream->open(mode: QIODevice::ReadOnly)) |
197 | return MediaDataHolder::ContextError{ |
198 | .code: QMediaPlayer::ResourceError, .description: QLatin1String("Could not open source device." ) |
199 | }; |
200 | } |
201 | if (!stream->isSequential()) |
202 | stream->seek(pos: 0); |
203 | |
204 | constexpr int bufferSize = 32768; |
205 | unsigned char *buffer = (unsigned char *)av_malloc(size: bufferSize); |
206 | context->pb = avio_alloc_context(buffer, buffer_size: bufferSize, write_flag: false, opaque: stream, read_packet: &readQIODevice, write_packet: nullptr, |
207 | seek: &seekQIODevice); |
208 | } |
209 | |
210 | AVDictionaryHolder dict; |
211 | constexpr auto NetworkTimeoutUs = "5000000" ; |
212 | av_dict_set(pm: dict, key: "timeout" , value: NetworkTimeoutUs, flags: 0); |
213 | |
214 | const QByteArray protocolWhitelist = qgetenv(varName: "QT_FFMPEG_PROTOCOL_WHITELIST" ); |
215 | if (!protocolWhitelist.isNull()) |
216 | av_dict_set(pm: dict, key: "protocol_whitelist" , value: protocolWhitelist.data(), flags: 0); |
217 | |
218 | context->interrupt_callback.opaque = cancelToken.get(); |
219 | context->interrupt_callback.callback = [](void *opaque) { |
220 | const auto *cancelToken = static_cast<const ICancelToken *>(opaque); |
221 | if (cancelToken && cancelToken->isCancelled()) |
222 | return 1; |
223 | return 0; |
224 | }; |
225 | |
226 | int ret = 0; |
227 | { |
228 | AVFormatContext * = context.release(); |
229 | ret = avformat_open_input(ps: &contextRaw, url: url.constData(), fmt: nullptr, options: dict); |
230 | context.reset(p: contextRaw); |
231 | } |
232 | |
233 | if (ret < 0) { |
234 | auto code = QMediaPlayer::ResourceError; |
235 | if (ret == AVERROR(EACCES)) |
236 | code = QMediaPlayer::AccessDeniedError; |
237 | else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA) |
238 | code = QMediaPlayer::FormatError; |
239 | |
240 | return MediaDataHolder::ContextError{ .code: code, .description: QMediaPlayer::tr(s: "Could not open file" ) }; |
241 | } |
242 | |
243 | ret = avformat_find_stream_info(ic: context.get(), options: nullptr); |
244 | if (ret < 0) { |
245 | return MediaDataHolder::ContextError{ |
246 | .code: QMediaPlayer::FormatError, |
247 | .description: QMediaPlayer::tr(s: "Could not find stream information for media file" ) |
248 | }; |
249 | } |
250 | |
251 | #ifndef QT_NO_DEBUG |
252 | av_dump_format(ic: context.get(), index: 0, url: url.constData(), is_output: 0); |
253 | #endif |
254 | return context; |
255 | } |
256 | |
257 | } // namespace |
258 | |
259 | MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream, |
260 | const std::shared_ptr<ICancelToken> &cancelToken) |
261 | { |
262 | QMaybe context = loadMedia(mediaUrl: url, stream, cancelToken); |
263 | if (context) { |
264 | // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism |
265 | return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } }; |
266 | } |
267 | return context.error(); |
268 | } |
269 | |
270 | MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context, |
271 | const std::shared_ptr<ICancelToken> &cancelToken) |
272 | : m_cancelToken{ cancelToken } |
273 | { |
274 | Q_ASSERT(context); |
275 | |
276 | m_context = std::move(context); |
277 | m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE); |
278 | |
279 | for (unsigned int i = 0; i < m_context->nb_streams; ++i) { |
280 | |
281 | const auto *stream = m_context->streams[i]; |
282 | const auto trackType = trackTypeFromMediaType(mediaType: stream->codecpar->codec_type); |
283 | |
284 | if (trackType == QPlatformMediaPlayer::NTrackTypes) |
285 | continue; |
286 | |
287 | if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC) |
288 | continue; // Ignore attached picture streams because we treat them as metadata |
289 | |
290 | auto metaData = QFFmpegMetaData::fromAVMetaData(tags: stream->metadata); |
291 | const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT; |
292 | |
293 | if (trackType != QPlatformMediaPlayer::SubtitleStream) { |
294 | insertMediaData(metaData, trackType, stream); |
295 | |
296 | if (isDefault && m_requestedStreams[trackType] < 0) |
297 | m_requestedStreams[trackType] = m_streamMap[trackType].size(); |
298 | } |
299 | |
300 | if (auto duration = streamDuration(stream: *stream)) { |
301 | m_duration = qMax(a: m_duration, b: *duration); |
302 | metaData.insert(k: QMediaMetaData::Duration, value: *duration / qint64(1000)); |
303 | } |
304 | |
305 | m_streamMap[trackType].append(t: { .avStreamIndex: (int)i, .isDefault: isDefault, .metaData: metaData }); |
306 | } |
307 | |
308 | // With some media files, streams may be lacking duration info. Let's |
309 | // get it from ffmpeg's duration estimation instead. |
310 | if (m_duration == 0 && m_context->duration > 0ll) { |
311 | m_duration = m_context->duration; |
312 | } |
313 | |
314 | for (auto trackType : |
315 | { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) { |
316 | auto &requestedStream = m_requestedStreams[trackType]; |
317 | auto &streamMap = m_streamMap[trackType]; |
318 | |
319 | if (requestedStream < 0 && !streamMap.empty()) |
320 | requestedStream = 0; |
321 | |
322 | if (requestedStream >= 0) |
323 | m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex; |
324 | } |
325 | |
326 | updateMetaData(); |
327 | } |
328 | |
329 | namespace { |
330 | |
331 | /*! |
332 | \internal |
333 | |
334 | Attempt to find an attached picture from the context's streams. |
335 | This will find ID3v2 pictures on audio files, and also pictures |
336 | attached to videos. |
337 | */ |
338 | QImage getAttachedPicture(const AVFormatContext *context) |
339 | { |
340 | if (!context) |
341 | return {}; |
342 | |
343 | for (unsigned int i = 0; i < context->nb_streams; ++i) { |
344 | const AVStream* stream = context->streams[i]; |
345 | if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC)) |
346 | continue; |
347 | |
348 | const AVPacket *compressedImage = &stream->attached_pic; |
349 | if (!compressedImage || !compressedImage->data || compressedImage->size <= 0) |
350 | continue; |
351 | |
352 | // Feed raw compressed data to QImage::fromData, which will decompress it |
353 | // if it is a recognized format. |
354 | QImage image = QImage::fromData(data: { compressedImage->data, compressedImage->size }); |
355 | if (!image.isNull()) |
356 | return image; |
357 | } |
358 | |
359 | return {}; |
360 | } |
361 | |
362 | } |
363 | |
364 | void MediaDataHolder::updateMetaData() |
365 | { |
366 | m_metaData = {}; |
367 | |
368 | if (!m_context) |
369 | return; |
370 | |
371 | m_metaData = QFFmpegMetaData::fromAVMetaData(tags: m_context->metadata); |
372 | m_metaData.insert(k: QMediaMetaData::FileFormat, |
373 | value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::fileFormatForAVInputFormat( |
374 | format: m_context->iformat))); |
375 | m_metaData.insert(k: QMediaMetaData::Duration, value: m_duration / qint64(1000)); |
376 | |
377 | if (!m_cachedThumbnail.has_value()) |
378 | m_cachedThumbnail = getAttachedPicture(context: m_context.get()); |
379 | |
380 | if (!m_cachedThumbnail->isNull()) |
381 | m_metaData.insert(k: QMediaMetaData::ThumbnailImage, value: m_cachedThumbnail.value()); |
382 | |
383 | for (auto trackType : |
384 | { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) { |
385 | const auto streamIndex = m_currentAVStreamIndex[trackType]; |
386 | if (streamIndex >= 0) |
387 | insertMediaData(metaData&: m_metaData, trackType, stream: m_context->streams[streamIndex]); |
388 | } |
389 | } |
390 | |
391 | bool MediaDataHolder::setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber) |
392 | { |
393 | if (!m_context) |
394 | return false; |
395 | |
396 | if (streamNumber < 0 || streamNumber >= m_streamMap[type].size()) |
397 | streamNumber = -1; |
398 | if (m_requestedStreams[type] == streamNumber) |
399 | return false; |
400 | m_requestedStreams[type] = streamNumber; |
401 | const int avStreamIndex = m_streamMap[type].value(i: streamNumber).avStreamIndex; |
402 | |
403 | const int oldIndex = m_currentAVStreamIndex[type]; |
404 | qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to" |
405 | << avStreamIndex; |
406 | |
407 | // TODO: maybe add additional verifications |
408 | m_currentAVStreamIndex[type] = avStreamIndex; |
409 | |
410 | updateMetaData(); |
411 | |
412 | return true; |
413 | } |
414 | |
415 | int MediaDataHolder::activeTrack(QPlatformMediaPlayer::TrackType type) const |
416 | { |
417 | return type < QPlatformMediaPlayer::NTrackTypes ? m_requestedStreams[type] : -1; |
418 | } |
419 | |
420 | const QList<MediaDataHolder::StreamInfo> &MediaDataHolder::streamInfo( |
421 | QPlatformMediaPlayer::TrackType trackType) const |
422 | { |
423 | Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes); |
424 | |
425 | return m_streamMap[trackType]; |
426 | } |
427 | |
428 | } // namespace QFFmpeg |
429 | |
430 | QT_END_NAMESPACE |
431 | |