1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "qffmpegvideobuffer_p.h"
5#include "private/qvideotexturehelper_p.h"
6#include "private/qmultimediautils_p.h"
7#include "qffmpeghwaccel_p.h"
8#include "qloggingcategory.h"
9
10extern "C" {
11#include <libavutil/pixdesc.h>
12#include <libavutil/hdr_dynamic_metadata.h>
13#include <libavutil/mastering_display_metadata.h>
14}
15
16QT_BEGIN_NAMESPACE
17
18using namespace QFFmpeg;
19
20static bool isFrameFlipped(const AVFrame& frame) {
21 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
22 if (frame.linesize[i] < 0)
23 return true;
24 }
25
26 return false;
27}
28
29static Q_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
30
31QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
32 : QHwVideoBuffer(QVideoFrame::NoHandle),
33 m_frame(frame.get()),
34 m_size(qCalculateFrameSize(resolution: { frame->width, frame->height },
35 pixelAspectRatio: { .numerator: pixelAspectRatio.num, .denominator: pixelAspectRatio.den }))
36{
37 if (frame->hw_frames_ctx) {
38 m_hwFrame = std::move(frame);
39 m_pixelFormat = toQtPixelFormat(avPixelFormat: HWAccel::format(frame: m_hwFrame.get()));
40 return;
41 }
42
43 m_swFrame = std::move(frame);
44 m_pixelFormat = toQtPixelFormat(avPixelFormat: AVPixelFormat(m_swFrame->format));
45
46 convertSWFrame();
47}
48
49QFFmpegVideoBuffer::~QFFmpegVideoBuffer() = default;
50
51void QFFmpegVideoBuffer::convertSWFrame()
52{
53 Q_ASSERT(m_swFrame);
54
55 const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
56 const auto targetAVPixelFormat = toAVPixelFormat(pixelFormat: m_pixelFormat);
57
58 const QSize actualSize(m_swFrame->width, m_swFrame->height);
59 if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(frame: *m_swFrame)
60 || m_size != actualSize) {
61 Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
62 // convert the format into something we can handle
63 SwsContextUPtr scaleContext = createSwsContext(srcSize: actualSize, srcPixFmt: actualAVPixelFormat, dstSize: m_size,
64 dstPixFmt: targetAVPixelFormat, SWS_BICUBIC);
65
66 auto newFrame = makeAVFrame();
67 newFrame->width = m_size.width();
68 newFrame->height = m_size.height();
69 newFrame->format = targetAVPixelFormat;
70 av_frame_get_buffer(frame: newFrame.get(), align: 0);
71
72 sws_scale(c: scaleContext.get(), srcSlice: m_swFrame->data, srcStride: m_swFrame->linesize, srcSliceY: 0, srcSliceH: m_swFrame->height,
73 dst: newFrame->data, dstStride: newFrame->linesize);
74 if (m_frame == m_swFrame.get())
75 m_frame = newFrame.get();
76 m_swFrame = std::move(newFrame);
77 }
78}
79
80void QFFmpegVideoBuffer::setTextureConverter(const TextureConverter &converter)
81{
82 m_textureConverter = converter;
83 m_textureConverter.init(frame: m_hwFrame.get());
84 m_type = converter.isNull() ? QVideoFrame::NoHandle : QVideoFrame::RhiTextureHandle;
85}
86
87QVideoFrameFormat::ColorSpace QFFmpegVideoBuffer::colorSpace() const
88{
89 return fromAvColorSpace(colorSpace: m_frame->colorspace);
90}
91
92QVideoFrameFormat::ColorTransfer QFFmpegVideoBuffer::colorTransfer() const
93{
94 return fromAvColorTransfer(colorTrc: m_frame->color_trc);
95}
96
97QVideoFrameFormat::ColorRange QFFmpegVideoBuffer::colorRange() const
98{
99 return fromAvColorRange(colorRange: m_frame->color_range);
100}
101
102float QFFmpegVideoBuffer::maxNits()
103{
104 float maxNits = -1;
105 for (int i = 0; i < m_frame->nb_side_data; ++i) {
106 AVFrameSideData *sd = m_frame->side_data[i];
107 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
108 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
109 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
110 auto maybeLum = QFFmpeg::mul(a: qreal(10'000.), b: data->max_luminance);
111 if (maybeLum)
112 maxNits = float(maybeLum.value());
113 }
114 }
115 return maxNits;
116}
117
118QAbstractVideoBuffer::MapData QFFmpegVideoBuffer::map(QVideoFrame::MapMode mode)
119{
120 if (!m_swFrame) {
121 Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
122 m_swFrame = makeAVFrame();
123 /* retrieve data from GPU to CPU */
124 int ret = av_hwframe_transfer_data(dst: m_swFrame.get(), src: m_hwFrame.get(), flags: 0);
125 if (ret < 0) {
126 qWarning() << "Error transferring the data to system memory:" << ret;
127 return {};
128 }
129 convertSWFrame();
130 }
131
132 m_mode = mode;
133
134 MapData mapData;
135 auto *desc = QVideoTextureHelper::textureDescription(format: pixelFormat());
136 mapData.planeCount = desc->nplanes;
137 for (int i = 0; i < mapData.planeCount; ++i) {
138 Q_ASSERT(m_swFrame->linesize[i] >= 0);
139
140 mapData.data[i] = m_swFrame->data[i];
141 mapData.bytesPerLine[i] = m_swFrame->linesize[i];
142 mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(height: m_swFrame->height, plane: i);
143 }
144
145 if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
146 m_type = QVideoFrame::NoHandle;
147 m_hwFrame.reset();
148 if (m_textures) {
149 qCDebug(qLcFFmpegVideoBuffer)
150 << "Mapping of FFmpeg video buffer with write mode when "
151 "textures have been created. Visual artifacts might "
152 "happen if the frame is still in the rendering pipeline";
153 m_textures.reset();
154 }
155 }
156
157 return mapData;
158}
159
160void QFFmpegVideoBuffer::unmap()
161{
162 // nothing to do here for SW buffers.
163 // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
164 m_mode = QVideoFrame::NotMapped;
165}
166
167std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
168{
169 if (m_textures)
170 return {};
171 if (!m_hwFrame)
172 return {};
173 if (m_textureConverter.isNull()) {
174 m_textures = nullptr;
175 return {};
176 }
177
178 m_textures.reset(p: m_textureConverter.getTextures(frame: m_hwFrame.get()));
179 if (!m_textures) {
180 static thread_local int lastFormat = 0;
181 if (std::exchange(obj&: lastFormat, new_val&: m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
182 qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
183 }
184 return {};
185}
186
187quint64 QFFmpegVideoBuffer::textureHandle(QRhi *rhi, int plane) const
188{
189 return m_textures ? m_textures->textureHandle(rhi, plane) : 0;
190}
191
192QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::pixelFormat() const
193{
194 return m_pixelFormat;
195}
196
197QSize QFFmpegVideoBuffer::size() const
198{
199 return m_size;
200}
201
202QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
203{
204 if (needsConversion)
205 *needsConversion = false;
206
207 switch (avPixelFormat) {
208 default:
209 break;
210 case AV_PIX_FMT_NONE:
211 Q_ASSERT(!"Invalid avPixelFormat!");
212 return QVideoFrameFormat::Format_Invalid;
213 case AV_PIX_FMT_ARGB:
214 return QVideoFrameFormat::Format_ARGB8888;
215 case AV_PIX_FMT_0RGB:
216 return QVideoFrameFormat::Format_XRGB8888;
217 case AV_PIX_FMT_BGRA:
218 return QVideoFrameFormat::Format_BGRA8888;
219 case AV_PIX_FMT_BGR0:
220 return QVideoFrameFormat::Format_BGRX8888;
221 case AV_PIX_FMT_ABGR:
222 return QVideoFrameFormat::Format_ABGR8888;
223 case AV_PIX_FMT_0BGR:
224 return QVideoFrameFormat::Format_XBGR8888;
225 case AV_PIX_FMT_RGBA:
226 return QVideoFrameFormat::Format_RGBA8888;
227 case AV_PIX_FMT_RGB0:
228 return QVideoFrameFormat::Format_RGBX8888;
229
230 case AV_PIX_FMT_YUV422P:
231 return QVideoFrameFormat::Format_YUV422P;
232 case AV_PIX_FMT_YUV420P:
233 return QVideoFrameFormat::Format_YUV420P;
234 case AV_PIX_FMT_YUV420P10:
235 return QVideoFrameFormat::Format_YUV420P10;
236 case AV_PIX_FMT_UYVY422:
237 return QVideoFrameFormat::Format_UYVY;
238 case AV_PIX_FMT_YUYV422:
239 return QVideoFrameFormat::Format_YUYV;
240 case AV_PIX_FMT_NV12:
241 return QVideoFrameFormat::Format_NV12;
242 case AV_PIX_FMT_NV21:
243 return QVideoFrameFormat::Format_NV21;
244 case AV_PIX_FMT_GRAY8:
245 return QVideoFrameFormat::Format_Y8;
246 case AV_PIX_FMT_GRAY16:
247 return QVideoFrameFormat::Format_Y16;
248
249 case AV_PIX_FMT_P010:
250 return QVideoFrameFormat::Format_P010;
251 case AV_PIX_FMT_P016:
252 return QVideoFrameFormat::Format_P016;
253 case AV_PIX_FMT_MEDIACODEC:
254 return QVideoFrameFormat::Format_SamplerExternalOES;
255 }
256
257 if (needsConversion)
258 *needsConversion = true;
259
260 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(pix_fmt: avPixelFormat);
261
262 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
263 return QVideoFrameFormat::Format_RGBA8888;
264
265 if (descriptor->comp[0].depth > 8)
266 return QVideoFrameFormat::Format_P016;
267 return QVideoFrameFormat::Format_YUV420P;
268}
269
270AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
271{
272 switch (pixelFormat) {
273 default:
274 case QVideoFrameFormat::Format_Invalid:
275 case QVideoFrameFormat::Format_AYUV:
276 case QVideoFrameFormat::Format_AYUV_Premultiplied:
277 case QVideoFrameFormat::Format_YV12:
278 case QVideoFrameFormat::Format_IMC1:
279 case QVideoFrameFormat::Format_IMC2:
280 case QVideoFrameFormat::Format_IMC3:
281 case QVideoFrameFormat::Format_IMC4:
282 return AV_PIX_FMT_NONE;
283 case QVideoFrameFormat::Format_Jpeg:
284 // We're using the data from the converted QImage here, which is in BGRA.
285 return AV_PIX_FMT_BGRA;
286 case QVideoFrameFormat::Format_ARGB8888:
287 return AV_PIX_FMT_ARGB;
288 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
289 case QVideoFrameFormat::Format_XRGB8888:
290 return AV_PIX_FMT_0RGB;
291 case QVideoFrameFormat::Format_BGRA8888:
292 return AV_PIX_FMT_BGRA;
293 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
294 case QVideoFrameFormat::Format_BGRX8888:
295 return AV_PIX_FMT_BGR0;
296 case QVideoFrameFormat::Format_ABGR8888:
297 return AV_PIX_FMT_ABGR;
298 case QVideoFrameFormat::Format_XBGR8888:
299 return AV_PIX_FMT_0BGR;
300 case QVideoFrameFormat::Format_RGBA8888:
301 return AV_PIX_FMT_RGBA;
302 // to be added in 6.8:
303 // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
304 case QVideoFrameFormat::Format_RGBX8888:
305 return AV_PIX_FMT_RGB0;
306
307 case QVideoFrameFormat::Format_YUV422P:
308 return AV_PIX_FMT_YUV422P;
309 case QVideoFrameFormat::Format_YUV420P:
310 return AV_PIX_FMT_YUV420P;
311 case QVideoFrameFormat::Format_YUV420P10:
312 return AV_PIX_FMT_YUV420P10;
313 case QVideoFrameFormat::Format_UYVY:
314 return AV_PIX_FMT_UYVY422;
315 case QVideoFrameFormat::Format_YUYV:
316 return AV_PIX_FMT_YUYV422;
317 case QVideoFrameFormat::Format_NV12:
318 return AV_PIX_FMT_NV12;
319 case QVideoFrameFormat::Format_NV21:
320 return AV_PIX_FMT_NV21;
321 case QVideoFrameFormat::Format_Y8:
322 return AV_PIX_FMT_GRAY8;
323 case QVideoFrameFormat::Format_Y16:
324 return AV_PIX_FMT_GRAY16;
325
326 case QVideoFrameFormat::Format_P010:
327 return AV_PIX_FMT_P010;
328 case QVideoFrameFormat::Format_P016:
329 return AV_PIX_FMT_P016;
330
331 case QVideoFrameFormat::Format_SamplerExternalOES:
332 return AV_PIX_FMT_MEDIACODEC;
333 }
334}
335
336QT_END_NAMESPACE
337

source code of qtmultimedia/src/plugins/multimedia/ffmpeg/qffmpegvideobuffer.cpp