| 1 | // Copyright (C) 2021 The Qt Company Ltd. | 
| 2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only | 
| 3 |  | 
| 4 | #include "playbackengine/qffmpegmediadataholder_p.h" | 
| 5 |  | 
| 6 | #include "qffmpegmediametadata_p.h" | 
| 7 | #include "qffmpegmediaformatinfo_p.h" | 
| 8 | #include "qffmpegioutils_p.h" | 
| 9 | #include "qiodevice.h" | 
| 10 | #include "qdatetime.h" | 
| 11 | #include "qloggingcategory.h" | 
| 12 |  | 
| 13 | #include <math.h> | 
| 14 | #include <optional> | 
| 15 |  | 
| 16 | extern "C"  { | 
| 17 | #include "libavutil/display.h" | 
| 18 | } | 
| 19 |  | 
| 20 | QT_BEGIN_NAMESPACE | 
| 21 |  | 
| 22 | static Q_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder" ) | 
| 23 |  | 
| 24 | namespace QFFmpeg { | 
| 25 |  | 
| 26 | static std::optional<TrackDuration> streamDuration(const AVStream &stream) | 
| 27 | { | 
| 28 |     if (stream.duration > 0) | 
| 29 |         return toTrackDuration(streamDuration: AVStreamDuration(stream.duration), avStream: &stream); | 
| 30 |  | 
| 31 |     // In some cases ffmpeg reports negative duration that is definitely invalid. | 
| 32 |     // However, the correct duration may be read from the metadata. | 
| 33 |  | 
| 34 |     if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) { | 
| 35 |         qCWarning(qLcMediaDataHolder) << "AVStream duration"  << stream.duration | 
| 36 |                                       << "is invalid. Taking it from the metadata" ; | 
| 37 |     } | 
| 38 |  | 
| 39 |     if (const auto duration = av_dict_get(m: stream.metadata, key: "DURATION" , prev: nullptr, flags: 0)) { | 
| 40 |         const auto time = QTime::fromString(string: QString::fromUtf8(utf8: duration->value)); | 
| 41 |         return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay()); | 
| 42 |     } | 
| 43 |  | 
| 44 |     return {}; | 
| 45 | } | 
| 46 |  | 
| 47 | static QTransform displayMatrixToTransform(const int32_t *displayMatrix) | 
| 48 | { | 
| 49 |     // displayMatrix is stored as | 
| 50 |     // | 
| 51 |     //  . -- X axis | 
| 52 |     //  | | 
| 53 |     //  |    | a b u | | 
| 54 |     //  Y    | c d v | | 
| 55 |     // axis  | x y w | | 
| 56 |     // | 
| 57 |     // where a, b, c, d, x, y are 16.16 fixed-point values, | 
| 58 |     // and u, v, w are 30.2 point values. | 
| 59 |     // Only a, b, c, d impacts on mirroring and rotation, | 
| 60 |     // so it's enough to propagate them to QTransform. | 
| 61 |     // | 
| 62 |     // If we were interested in getting proper XY scales, | 
| 63 |     // we would divide a,b,c,d by 2^16. The whole scale doesn't | 
| 64 |     // impact mirroring and rotation, so we don't do so. | 
| 65 |  | 
| 66 |     auto toRotateMirrorValue = [displayMatrix](int index) { | 
| 67 |         // toRotateScaleValue would be: | 
| 68 |         // return displayMatrix[index] / qreal(1 << 16); | 
| 69 |         return displayMatrix[index]; | 
| 70 |     }; | 
| 71 |  | 
| 72 |     return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1), | 
| 73 |                       toRotateMirrorValue(3), toRotateMirrorValue(4), | 
| 74 |                       0, 0); | 
| 75 | } | 
| 76 |  | 
| 77 | static VideoTransformation streamTransformation(const AVStream *stream) | 
| 78 | { | 
| 79 |     Q_ASSERT(stream); | 
| 80 |  | 
| 81 |     using SideDataSize = decltype(AVPacketSideData::size); | 
| 82 |     constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9; | 
| 83 |     const AVPacketSideData *sideData = streamSideData(stream, type: AV_PKT_DATA_DISPLAYMATRIX); | 
| 84 |     if (!sideData || sideData->size < displayMatrixSize) | 
| 85 |         return {}; | 
| 86 |  | 
| 87 |     const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data); | 
| 88 |     const QTransform transform = displayMatrixToTransform(displayMatrix); | 
| 89 |     const VideoTransformationOpt result = qVideoTransformationFromMatrix(matrix: transform); | 
| 90 |     if (!result) { | 
| 91 |         qCWarning(qLcMediaDataHolder) | 
| 92 |                 << "Video stream contains malformed display matrix"  << transform; | 
| 93 |         return {}; | 
| 94 |     } | 
| 95 |     return *result; | 
| 96 | } | 
| 97 |  | 
| 98 | static bool colorTransferSupportsHdr(const AVStream *stream) | 
| 99 | { | 
| 100 |     if (!stream) | 
| 101 |         return false; | 
| 102 |  | 
| 103 |     const AVCodecParameters *codecPar = stream->codecpar; | 
| 104 |     if (!codecPar) | 
| 105 |         return false; | 
| 106 |  | 
| 107 |     const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(colorTrc: codecPar->color_trc); | 
| 108 |  | 
| 109 |     // Assume that content is using HDR if the color transfer supports high | 
| 110 |     // dynamic range. The video may still not utilize the extended range, | 
| 111 |     // but we can't determine the actual range without decoding frames. | 
| 112 |     return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084 | 
| 113 |             || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67; | 
| 114 | } | 
| 115 |  | 
| 116 | VideoTransformation MediaDataHolder::transformation() const | 
| 117 | { | 
| 118 |     // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation: | 
| 119 |     // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt(); | 
| 120 |     // return static_cast<QtVideo::Rotation>(orientation); | 
| 121 |  | 
| 122 |     const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream]; | 
| 123 |     if (streamIndex < 0) | 
| 124 |         return {}; | 
| 125 |  | 
| 126 |     return streamTransformation(stream: m_context->streams[streamIndex]); | 
| 127 | } | 
| 128 |  | 
| 129 | AVFormatContext *MediaDataHolder::avContext() | 
| 130 | { | 
| 131 |     return m_context.get(); | 
| 132 | } | 
| 133 |  | 
| 134 | int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const | 
| 135 | { | 
| 136 |     return m_currentAVStreamIndex[trackType]; | 
| 137 | } | 
| 138 |  | 
| 139 | static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, | 
| 140 |                             const AVStream *stream) | 
| 141 | { | 
| 142 |     Q_ASSERT(stream); | 
| 143 |     const auto *codecPar = stream->codecpar; | 
| 144 |  | 
| 145 |     switch (trackType) { | 
| 146 |     case QPlatformMediaPlayer::VideoStream: | 
| 147 |         metaData.insert(k: QMediaMetaData::VideoBitRate, value: (int)codecPar->bit_rate); | 
| 148 |         metaData.insert(k: QMediaMetaData::VideoCodec, | 
| 149 |                         value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::videoCodecForAVCodecId( | 
| 150 |                                 id: codecPar->codec_id))); | 
| 151 |         metaData.insert(k: QMediaMetaData::Resolution, value: QSize(codecPar->width, codecPar->height)); | 
| 152 |         metaData.insert(k: QMediaMetaData::VideoFrameRate, | 
| 153 |                         value: qreal(stream->avg_frame_rate.num) / qreal(stream->avg_frame_rate.den)); | 
| 154 |         metaData.insert(k: QMediaMetaData::Orientation, | 
| 155 |                         value: QVariant::fromValue(value: streamTransformation(stream).rotation)); | 
| 156 |         metaData.insert(k: QMediaMetaData::HasHdrContent, value: colorTransferSupportsHdr(stream)); | 
| 157 |         break; | 
| 158 |     case QPlatformMediaPlayer::AudioStream: | 
| 159 |         metaData.insert(k: QMediaMetaData::AudioBitRate, value: (int)codecPar->bit_rate); | 
| 160 |         metaData.insert(k: QMediaMetaData::AudioCodec, | 
| 161 |                         value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::audioCodecForAVCodecId( | 
| 162 |                                 id: codecPar->codec_id))); | 
| 163 |         break; | 
| 164 |     default: | 
| 165 |         break; | 
| 166 |     } | 
| 167 | }; | 
| 168 |  | 
| 169 | QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType) | 
| 170 | { | 
| 171 |     switch (mediaType) { | 
| 172 |     case AVMEDIA_TYPE_AUDIO: | 
| 173 |         return QPlatformMediaPlayer::AudioStream; | 
| 174 |     case AVMEDIA_TYPE_VIDEO: | 
| 175 |         return QPlatformMediaPlayer::VideoStream; | 
| 176 |     case AVMEDIA_TYPE_SUBTITLE: | 
| 177 |         return QPlatformMediaPlayer::SubtitleStream; | 
| 178 |     default: | 
| 179 |         return QPlatformMediaPlayer::NTrackTypes; | 
| 180 |     } | 
| 181 | } | 
| 182 |  | 
| 183 | namespace { | 
| 184 | QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError> | 
| 185 | loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancelToken> &cancelToken) | 
| 186 | { | 
| 187 |     const QByteArray url = mediaUrl.toString(options: QUrl::PreferLocalFile).toUtf8(); | 
| 188 |  | 
| 189 |     AVFormatContextUPtr context{ avformat_alloc_context() }; | 
| 190 |  | 
| 191 |     if (stream) { | 
| 192 |         if (!stream->isOpen()) { | 
| 193 |             if (!stream->open(mode: QIODevice::ReadOnly)) | 
| 194 |                 return MediaDataHolder::ContextError{ | 
| 195 |                     .code: QMediaPlayer::ResourceError, .description: QLatin1String("Could not open source device." ) | 
| 196 |                 }; | 
| 197 |         } | 
| 198 |  | 
| 199 |         auto seek = &seekQIODevice; | 
| 200 |  | 
| 201 |         if (!stream->isSequential()) { | 
| 202 |             stream->seek(pos: 0); | 
| 203 |         } else { | 
| 204 |             context->ctx_flags |= AVFMTCTX_UNSEEKABLE; | 
| 205 |             seek = nullptr; | 
| 206 |         } | 
| 207 |  | 
| 208 |         constexpr int bufferSize = 32768; | 
| 209 |         unsigned char *buffer = (unsigned char *)av_malloc(size: bufferSize); | 
| 210 |         context->pb = avio_alloc_context(buffer, buffer_size: bufferSize, write_flag: false, opaque: stream, read_packet: &readQIODevice, write_packet: nullptr, | 
| 211 |                                          seek); | 
| 212 |     } | 
| 213 |  | 
| 214 |     AVDictionaryHolder dict; | 
| 215 |     constexpr auto NetworkTimeoutUs = "5000000" ; | 
| 216 |     av_dict_set(pm: dict, key: "timeout" , value: NetworkTimeoutUs, flags: 0); | 
| 217 |  | 
| 218 |     const QByteArray protocolWhitelist = qgetenv(varName: "QT_FFMPEG_PROTOCOL_WHITELIST" ); | 
| 219 |     if (!protocolWhitelist.isNull()) | 
| 220 |         av_dict_set(pm: dict, key: "protocol_whitelist" , value: protocolWhitelist.data(), flags: 0); | 
| 221 |  | 
| 222 |     context->interrupt_callback.opaque = cancelToken.get(); | 
| 223 |     context->interrupt_callback.callback = [](void *opaque) { | 
| 224 |         const auto *cancelToken = static_cast<const ICancelToken *>(opaque); | 
| 225 |         if (cancelToken && cancelToken->isCancelled()) | 
| 226 |             return 1; | 
| 227 |         return 0; | 
| 228 |     }; | 
| 229 |  | 
| 230 |     int ret = 0; | 
| 231 |     { | 
| 232 |         AVFormatContext * = context.release(); | 
| 233 |         ret = avformat_open_input(ps: &contextRaw, url: url.constData(), fmt: nullptr, options: dict); | 
| 234 |         context.reset(p: contextRaw); | 
| 235 |     } | 
| 236 |  | 
| 237 |     if (ret < 0) { | 
| 238 |         auto code = QMediaPlayer::ResourceError; | 
| 239 |         if (ret == AVERROR(EACCES)) | 
| 240 |             code = QMediaPlayer::AccessDeniedError; | 
| 241 |         else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA) | 
| 242 |             code = QMediaPlayer::FormatError; | 
| 243 |  | 
| 244 |         qCWarning(qLcMediaDataHolder) | 
| 245 |                 << "Could not open media. FFmpeg error description:"  << err2str(errnum: ret); | 
| 246 |  | 
| 247 |         return MediaDataHolder::ContextError{ .code: code, .description: QMediaPlayer::tr(s: "Could not open file" ) }; | 
| 248 |     } | 
| 249 |  | 
| 250 |     ret = avformat_find_stream_info(ic: context.get(), options: nullptr); | 
| 251 |     if (ret < 0) { | 
| 252 |         return MediaDataHolder::ContextError{ | 
| 253 |             .code: QMediaPlayer::FormatError, | 
| 254 |             .description: QMediaPlayer::tr(s: "Could not find stream information for media file" ) | 
| 255 |         }; | 
| 256 |     } | 
| 257 |  | 
| 258 |     if (qLcMediaDataHolder().isInfoEnabled()) | 
| 259 |         av_dump_format(ic: context.get(), index: 0, url: url.constData(), is_output: 0); | 
| 260 |  | 
| 261 |  | 
| 262 |     return context; | 
| 263 | } | 
| 264 |  | 
| 265 | } // namespace | 
| 266 |  | 
| 267 | MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream, | 
| 268 |                                                const std::shared_ptr<ICancelToken> &cancelToken) | 
| 269 | { | 
| 270 |     QMaybe context = loadMedia(mediaUrl: url, stream, cancelToken); | 
| 271 |     if (context) { | 
| 272 |         // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism | 
| 273 |         return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } }; | 
| 274 |     } | 
| 275 |     return context.error(); | 
| 276 | } | 
| 277 |  | 
| 278 | MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context, | 
| 279 |                                  const std::shared_ptr<ICancelToken> &cancelToken) | 
| 280 |     : m_cancelToken{ cancelToken } | 
| 281 | { | 
| 282 |     Q_ASSERT(context); | 
| 283 |  | 
| 284 |     m_context = std::move(context); | 
| 285 |     m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE); | 
| 286 |  | 
| 287 |     for (unsigned int i = 0; i < m_context->nb_streams; ++i) { | 
| 288 |  | 
| 289 |         const auto *stream = m_context->streams[i]; | 
| 290 |         const auto trackType = trackTypeFromMediaType(mediaType: stream->codecpar->codec_type); | 
| 291 |  | 
| 292 |         if (trackType == QPlatformMediaPlayer::NTrackTypes) | 
| 293 |             continue; | 
| 294 |  | 
| 295 |         if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC) | 
| 296 |             continue; // Ignore attached picture streams because we treat them as metadata | 
| 297 |  | 
| 298 |         if (stream->time_base.num <= 0 || stream->time_base.den <= 0) { | 
| 299 |             // An invalid stream timebase is not expected to be given by FFmpeg | 
| 300 |             qCWarning(qLcMediaDataHolder) << "A stream for the track type"  << trackType | 
| 301 |                                           << "has an invalid timebase:"  << stream->time_base; | 
| 302 |             continue; | 
| 303 |         } | 
| 304 |  | 
| 305 |         auto metaData = QFFmpegMetaData::fromAVMetaData(tags: stream->metadata); | 
| 306 |         const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT; | 
| 307 |  | 
| 308 |         if (trackType != QPlatformMediaPlayer::SubtitleStream) { | 
| 309 |             insertMediaData(metaData, trackType, stream); | 
| 310 |  | 
| 311 |             if (isDefault && m_requestedStreams[trackType] < 0) | 
| 312 |                 m_requestedStreams[trackType] = m_streamMap[trackType].size(); | 
| 313 |         } | 
| 314 |  | 
| 315 |         if (auto duration = streamDuration(stream: *stream)) { | 
| 316 |             m_duration = qMax(a: m_duration, b: *duration); | 
| 317 |             metaData.insert(k: QMediaMetaData::Duration, value: toUserDuration(trackDuration: *duration).get()); | 
| 318 |         } | 
| 319 |  | 
| 320 |         m_streamMap[trackType].append(t: { .avStreamIndex: (int)i, .isDefault: isDefault, .metaData: metaData }); | 
| 321 |     } | 
| 322 |  | 
| 323 |     // With some media files, streams may be lacking duration info. Let's | 
| 324 |     // get it from ffmpeg's duration estimation instead. | 
| 325 |     if (m_duration == TrackDuration(0) && m_context->duration > 0ll) { | 
| 326 |         m_duration = toTrackDuration(contextDuration: AVContextDuration(m_context->duration)); | 
| 327 |     } | 
| 328 |  | 
| 329 |     for (auto trackType : | 
| 330 |          { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) { | 
| 331 |         auto &requestedStream = m_requestedStreams[trackType]; | 
| 332 |         auto &streamMap = m_streamMap[trackType]; | 
| 333 |  | 
| 334 |         if (requestedStream < 0 && !streamMap.empty()) | 
| 335 |             requestedStream = 0; | 
| 336 |  | 
| 337 |         if (requestedStream >= 0) | 
| 338 |             m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex; | 
| 339 |     } | 
| 340 |  | 
| 341 |     updateMetaData(); | 
| 342 | } | 
| 343 |  | 
| 344 | namespace { | 
| 345 |  | 
| 346 | /*! | 
| 347 |     \internal | 
| 348 |  | 
| 349 |     Attempt to find an attached picture from the context's streams. | 
| 350 |     This will find ID3v2 pictures on audio files, and also pictures | 
| 351 |     attached to videos. | 
| 352 |  */ | 
| 353 | QImage getAttachedPicture(const AVFormatContext *context) | 
| 354 | { | 
| 355 |     if (!context) | 
| 356 |         return {}; | 
| 357 |  | 
| 358 |     for (unsigned int i = 0; i < context->nb_streams; ++i) { | 
| 359 |         const AVStream* stream = context->streams[i]; | 
| 360 |         if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC)) | 
| 361 |             continue; | 
| 362 |  | 
| 363 |         const AVPacket *compressedImage = &stream->attached_pic; | 
| 364 |         if (!compressedImage || !compressedImage->data || compressedImage->size <= 0) | 
| 365 |             continue; | 
| 366 |  | 
| 367 |         // Feed raw compressed data to QImage::fromData, which will decompress it | 
| 368 |         // if it is a recognized format. | 
| 369 |         QImage image = QImage::fromData(data: { compressedImage->data, compressedImage->size }); | 
| 370 |         if (!image.isNull()) | 
| 371 |             return image; | 
| 372 |     } | 
| 373 |  | 
| 374 |     return {}; | 
| 375 | } | 
| 376 |  | 
| 377 | } // namespace | 
| 378 |  | 
| 379 | void MediaDataHolder::updateMetaData() | 
| 380 | { | 
| 381 |     m_metaData = {}; | 
| 382 |  | 
| 383 |     if (!m_context) | 
| 384 |         return; | 
| 385 |  | 
| 386 |     m_metaData = QFFmpegMetaData::fromAVMetaData(tags: m_context->metadata); | 
| 387 |     m_metaData.insert(k: QMediaMetaData::FileFormat, | 
| 388 |                       value: QVariant::fromValue(value: QFFmpegMediaFormatInfo::fileFormatForAVInputFormat( | 
| 389 |                               format: *m_context->iformat))); | 
| 390 |     m_metaData.insert(k: QMediaMetaData::Duration, value: toUserDuration(trackDuration: m_duration).get()); | 
| 391 |  | 
| 392 |     if (!m_cachedThumbnail.has_value()) | 
| 393 |         m_cachedThumbnail = getAttachedPicture(context: m_context.get()); | 
| 394 |  | 
| 395 |     if (!m_cachedThumbnail->isNull()) | 
| 396 |         m_metaData.insert(k: QMediaMetaData::ThumbnailImage, value: m_cachedThumbnail.value()); | 
| 397 |  | 
| 398 |     for (auto trackType : | 
| 399 |          { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) { | 
| 400 |         const auto streamIndex = m_currentAVStreamIndex[trackType]; | 
| 401 |         if (streamIndex >= 0) | 
| 402 |             insertMediaData(metaData&: m_metaData, trackType, stream: m_context->streams[streamIndex]); | 
| 403 |     } | 
| 404 | } | 
| 405 |  | 
| 406 | bool MediaDataHolder::setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber) | 
| 407 | { | 
| 408 |     if (!m_context) | 
| 409 |         return false; | 
| 410 |  | 
| 411 |     if (streamNumber < 0 || streamNumber >= m_streamMap[type].size()) | 
| 412 |         streamNumber = -1; | 
| 413 |     if (m_requestedStreams[type] == streamNumber) | 
| 414 |         return false; | 
| 415 |     m_requestedStreams[type] = streamNumber; | 
| 416 |     const int avStreamIndex = m_streamMap[type].value(i: streamNumber).avStreamIndex; | 
| 417 |  | 
| 418 |     const int oldIndex = m_currentAVStreamIndex[type]; | 
| 419 |     qCDebug(qLcMediaDataHolder) << ">>>>> change track"  << type << "from"  << oldIndex << "to"  | 
| 420 |                                 << avStreamIndex; | 
| 421 |  | 
| 422 |     // TODO: maybe add additional verifications | 
| 423 |     m_currentAVStreamIndex[type] = avStreamIndex; | 
| 424 |  | 
| 425 |     updateMetaData(); | 
| 426 |  | 
| 427 |     return true; | 
| 428 | } | 
| 429 |  | 
| 430 | int MediaDataHolder::activeTrack(QPlatformMediaPlayer::TrackType type) const | 
| 431 | { | 
| 432 |     return type < QPlatformMediaPlayer::NTrackTypes ? m_requestedStreams[type] : -1; | 
| 433 | } | 
| 434 |  | 
| 435 | const QList<MediaDataHolder::StreamInfo> &MediaDataHolder::streamInfo( | 
| 436 |         QPlatformMediaPlayer::TrackType trackType) const | 
| 437 | { | 
| 438 |     Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes); | 
| 439 |  | 
| 440 |     return m_streamMap[trackType]; | 
| 441 | } | 
| 442 |  | 
| 443 | } // namespace QFFmpeg | 
| 444 |  | 
| 445 | QT_END_NAMESPACE | 
| 446 |  |