1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include <QtMultimedia/private/qtmultimediaglobal_p.h> |
5 | #include "qgstutils_p.h" |
6 | |
7 | #include <QtCore/qdatetime.h> |
8 | #include <QtCore/qdir.h> |
9 | #include <QtCore/qbytearray.h> |
10 | #include <QtCore/qvariant.h> |
11 | #include <QtCore/qregularexpression.h> |
12 | #include <QtCore/qsize.h> |
13 | #include <QtCore/qset.h> |
14 | #include <QtCore/qstringlist.h> |
15 | #include <QtGui/qimage.h> |
16 | #include <qaudioformat.h> |
17 | #include <QtCore/qelapsedtimer.h> |
18 | #include <QtMultimedia/qvideoframeformat.h> |
19 | #include <private/qmultimediautils_p.h> |
20 | |
21 | #include <gst/audio/audio.h> |
22 | #include <gst/video/video.h> |
23 | |
24 | template<typename T, int N> constexpr int lengthOf(const T (&)[N]) { return N; } |
25 | |
26 | QT_BEGIN_NAMESPACE |
27 | |
28 | |
29 | namespace { |
30 | |
31 | static const char *audioSampleFormatNames[QAudioFormat::NSampleFormats] = { |
32 | nullptr, |
33 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
34 | "U8" , |
35 | "S16LE" , |
36 | "S32LE" , |
37 | "F32LE" |
38 | #else |
39 | "U8" , |
40 | "S16BE" , |
41 | "S32BE" , |
42 | "F32BE" |
43 | #endif |
44 | }; |
45 | |
46 | static QAudioFormat::SampleFormat gstSampleFormatToSampleFormat(const char *fmt) |
47 | { |
48 | if (fmt) { |
49 | for (int i = 1; i < QAudioFormat::NSampleFormats; ++i) { |
50 | if (strcmp(s1: fmt, s2: audioSampleFormatNames[i])) |
51 | continue; |
52 | return QAudioFormat::SampleFormat(i); |
53 | } |
54 | } |
55 | return QAudioFormat::Unknown; |
56 | } |
57 | |
58 | } |
59 | |
60 | /* |
61 | Returns audio format for a sample \a sample. |
62 | If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned. |
63 | */ |
64 | QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample) |
65 | { |
66 | auto caps = QGstCaps(gst_sample_get_caps(sample), QGstCaps::NeedsRef); |
67 | if (caps.isNull()) |
68 | return {}; |
69 | return audioFormatForCaps(caps); |
70 | } |
71 | |
72 | QAudioFormat QGstUtils::audioFormatForCaps(const QGstCaps &caps) |
73 | { |
74 | QAudioFormat format; |
75 | QGstStructure s = caps.at(index: 0); |
76 | if (s.name() != "audio/x-raw" ) |
77 | return format; |
78 | |
79 | auto rate = s["rate" ].toInt(); |
80 | auto channels = s["channels" ].toInt(); |
81 | QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(fmt: s["format" ].toString()); |
82 | if (!rate || !channels || fmt == QAudioFormat::Unknown) |
83 | return format; |
84 | |
85 | format.setSampleRate(*rate); |
86 | format.setChannelCount(*channels); |
87 | format.setSampleFormat(fmt); |
88 | |
89 | return format; |
90 | } |
91 | |
92 | /* |
93 | Builds GstCaps for an audio format \a format. |
94 | Returns 0 if the audio format is not valid. |
95 | |
96 | \note Caller must unreference GstCaps. |
97 | */ |
98 | |
99 | QGstCaps QGstUtils::capsForAudioFormat(const QAudioFormat &format) |
100 | { |
101 | if (!format.isValid()) |
102 | return {}; |
103 | |
104 | auto sampleFormat = format.sampleFormat(); |
105 | auto caps = gst_caps_new_simple( |
106 | media_type: "audio/x-raw" , |
107 | fieldname: "format" , G_TYPE_STRING, audioSampleFormatNames[sampleFormat], |
108 | "rate" , G_TYPE_INT , format.sampleRate(), |
109 | "channels" , G_TYPE_INT , format.channelCount(), |
110 | "layout" , G_TYPE_STRING, "interleaved" , |
111 | nullptr); |
112 | |
113 | return QGstCaps(caps, QGstCaps::HasRef); |
114 | } |
115 | |
116 | QList<QAudioFormat::SampleFormat> QGValue::getSampleFormats() const |
117 | { |
118 | if (!GST_VALUE_HOLDS_LIST(value)) |
119 | return {}; |
120 | |
121 | QList<QAudioFormat::SampleFormat> formats; |
122 | guint nFormats = gst_value_list_get_size(value); |
123 | for (guint f = 0; f < nFormats; ++f) { |
124 | QGValue v = gst_value_list_get_value(value, index: f); |
125 | auto *name = v.toString(); |
126 | QAudioFormat::SampleFormat fmt = gstSampleFormatToSampleFormat(fmt: name); |
127 | if (fmt == QAudioFormat::Unknown) |
128 | continue;; |
129 | formats.append(t: fmt); |
130 | } |
131 | return formats; |
132 | } |
133 | |
134 | namespace { |
135 | |
136 | struct VideoFormat |
137 | { |
138 | QVideoFrameFormat::PixelFormat pixelFormat; |
139 | GstVideoFormat gstFormat; |
140 | }; |
141 | |
142 | static const VideoFormat qt_videoFormatLookup[] = |
143 | { |
144 | { .pixelFormat: QVideoFrameFormat::Format_YUV420P, .gstFormat: GST_VIDEO_FORMAT_I420 }, |
145 | { .pixelFormat: QVideoFrameFormat::Format_YUV422P, .gstFormat: GST_VIDEO_FORMAT_Y42B }, |
146 | { .pixelFormat: QVideoFrameFormat::Format_YV12 , .gstFormat: GST_VIDEO_FORMAT_YV12 }, |
147 | { .pixelFormat: QVideoFrameFormat::Format_UYVY , .gstFormat: GST_VIDEO_FORMAT_UYVY }, |
148 | { .pixelFormat: QVideoFrameFormat::Format_YUYV , .gstFormat: GST_VIDEO_FORMAT_YUY2 }, |
149 | { .pixelFormat: QVideoFrameFormat::Format_NV12 , .gstFormat: GST_VIDEO_FORMAT_NV12 }, |
150 | { .pixelFormat: QVideoFrameFormat::Format_NV21 , .gstFormat: GST_VIDEO_FORMAT_NV21 }, |
151 | { .pixelFormat: QVideoFrameFormat::Format_AYUV , .gstFormat: GST_VIDEO_FORMAT_AYUV }, |
152 | { .pixelFormat: QVideoFrameFormat::Format_Y8 , .gstFormat: GST_VIDEO_FORMAT_GRAY8 }, |
153 | { .pixelFormat: QVideoFrameFormat::Format_XRGB8888 , .gstFormat: GST_VIDEO_FORMAT_xRGB }, |
154 | { .pixelFormat: QVideoFrameFormat::Format_XBGR8888 , .gstFormat: GST_VIDEO_FORMAT_xBGR }, |
155 | { .pixelFormat: QVideoFrameFormat::Format_RGBX8888 , .gstFormat: GST_VIDEO_FORMAT_RGBx }, |
156 | { .pixelFormat: QVideoFrameFormat::Format_BGRX8888 , .gstFormat: GST_VIDEO_FORMAT_BGRx }, |
157 | { .pixelFormat: QVideoFrameFormat::Format_ARGB8888, .gstFormat: GST_VIDEO_FORMAT_ARGB }, |
158 | { .pixelFormat: QVideoFrameFormat::Format_ABGR8888, .gstFormat: GST_VIDEO_FORMAT_ABGR }, |
159 | { .pixelFormat: QVideoFrameFormat::Format_RGBA8888, .gstFormat: GST_VIDEO_FORMAT_RGBA }, |
160 | { .pixelFormat: QVideoFrameFormat::Format_BGRA8888, .gstFormat: GST_VIDEO_FORMAT_BGRA }, |
161 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
162 | { .pixelFormat: QVideoFrameFormat::Format_Y16 , .gstFormat: GST_VIDEO_FORMAT_GRAY16_LE }, |
163 | { .pixelFormat: QVideoFrameFormat::Format_P010 , .gstFormat: GST_VIDEO_FORMAT_P010_10LE }, |
164 | #else |
165 | { QVideoFrameFormat::Format_Y16 , GST_VIDEO_FORMAT_GRAY16_BE }, |
166 | { QVideoFrameFormat::Format_P010 , GST_VIDEO_FORMAT_P010_10BE }, |
167 | #endif |
168 | }; |
169 | |
170 | static int indexOfVideoFormat(QVideoFrameFormat::PixelFormat format) |
171 | { |
172 | for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i) |
173 | if (qt_videoFormatLookup[i].pixelFormat == format) |
174 | return i; |
175 | |
176 | return -1; |
177 | } |
178 | |
179 | static int indexOfVideoFormat(GstVideoFormat format) |
180 | { |
181 | for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i) |
182 | if (qt_videoFormatLookup[i].gstFormat == format) |
183 | return i; |
184 | |
185 | return -1; |
186 | } |
187 | |
188 | } |
189 | |
190 | QVideoFrameFormat QGstCaps::formatForCaps(GstVideoInfo *info) const |
191 | { |
192 | GstVideoInfo vidInfo; |
193 | GstVideoInfo *infoPtr = info ? info : &vidInfo; |
194 | |
195 | if (gst_video_info_from_caps(info: infoPtr, caps)) { |
196 | int index = indexOfVideoFormat(format: infoPtr->finfo->format); |
197 | |
198 | if (index != -1) { |
199 | QVideoFrameFormat format( |
200 | QSize(infoPtr->width, infoPtr->height), |
201 | qt_videoFormatLookup[index].pixelFormat); |
202 | |
203 | if (infoPtr->fps_d > 0) |
204 | format.setFrameRate(qreal(infoPtr->fps_n) / infoPtr->fps_d); |
205 | |
206 | QVideoFrameFormat::ColorRange range = QVideoFrameFormat::ColorRange_Unknown; |
207 | switch (infoPtr->colorimetry.range) { |
208 | case GST_VIDEO_COLOR_RANGE_UNKNOWN: |
209 | break; |
210 | case GST_VIDEO_COLOR_RANGE_0_255: |
211 | range = QVideoFrameFormat::ColorRange_Full; |
212 | break; |
213 | case GST_VIDEO_COLOR_RANGE_16_235: |
214 | range = QVideoFrameFormat::ColorRange_Video; |
215 | break; |
216 | } |
217 | format.setColorRange(range); |
218 | |
219 | QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined; |
220 | switch (infoPtr->colorimetry.matrix) { |
221 | case GST_VIDEO_COLOR_MATRIX_UNKNOWN: |
222 | case GST_VIDEO_COLOR_MATRIX_RGB: |
223 | case GST_VIDEO_COLOR_MATRIX_FCC: |
224 | break; |
225 | case GST_VIDEO_COLOR_MATRIX_BT709: |
226 | colorSpace = QVideoFrameFormat::ColorSpace_BT709; |
227 | break; |
228 | case GST_VIDEO_COLOR_MATRIX_BT601: |
229 | colorSpace = QVideoFrameFormat::ColorSpace_BT601; |
230 | break; |
231 | case GST_VIDEO_COLOR_MATRIX_SMPTE240M: |
232 | colorSpace = QVideoFrameFormat::ColorSpace_AdobeRgb; |
233 | break; |
234 | case GST_VIDEO_COLOR_MATRIX_BT2020: |
235 | colorSpace = QVideoFrameFormat::ColorSpace_BT2020; |
236 | break; |
237 | } |
238 | format.setColorSpace(colorSpace); |
239 | |
240 | QVideoFrameFormat::ColorTransfer transfer = QVideoFrameFormat::ColorTransfer_Unknown; |
241 | switch (infoPtr->colorimetry.transfer) { |
242 | case GST_VIDEO_TRANSFER_UNKNOWN: |
243 | break; |
244 | case GST_VIDEO_TRANSFER_GAMMA10: |
245 | transfer = QVideoFrameFormat::ColorTransfer_Linear; |
246 | break; |
247 | case GST_VIDEO_TRANSFER_GAMMA22: |
248 | case GST_VIDEO_TRANSFER_SMPTE240M: |
249 | case GST_VIDEO_TRANSFER_SRGB: |
250 | case GST_VIDEO_TRANSFER_ADOBERGB: |
251 | transfer = QVideoFrameFormat::ColorTransfer_Gamma22; |
252 | break; |
253 | case GST_VIDEO_TRANSFER_GAMMA18: |
254 | case GST_VIDEO_TRANSFER_GAMMA20: |
255 | // not quite, but best fit |
256 | case GST_VIDEO_TRANSFER_BT709: |
257 | case GST_VIDEO_TRANSFER_BT2020_12: |
258 | transfer = QVideoFrameFormat::ColorTransfer_BT709; |
259 | break; |
260 | case GST_VIDEO_TRANSFER_GAMMA28: |
261 | transfer = QVideoFrameFormat::ColorTransfer_Gamma28; |
262 | break; |
263 | case GST_VIDEO_TRANSFER_LOG100: |
264 | case GST_VIDEO_TRANSFER_LOG316: |
265 | break; |
266 | #if GST_CHECK_VERSION(1, 18, 0) |
267 | case GST_VIDEO_TRANSFER_SMPTE2084: |
268 | transfer = QVideoFrameFormat::ColorTransfer_ST2084; |
269 | break; |
270 | case GST_VIDEO_TRANSFER_ARIB_STD_B67: |
271 | transfer = QVideoFrameFormat::ColorTransfer_STD_B67; |
272 | break; |
273 | case GST_VIDEO_TRANSFER_BT2020_10: |
274 | transfer = QVideoFrameFormat::ColorTransfer_BT709; |
275 | break; |
276 | case GST_VIDEO_TRANSFER_BT601: |
277 | transfer = QVideoFrameFormat::ColorTransfer_BT601; |
278 | break; |
279 | #endif |
280 | } |
281 | format.setColorTransfer(transfer); |
282 | |
283 | return format; |
284 | } |
285 | } |
286 | return QVideoFrameFormat(); |
287 | } |
288 | |
289 | void QGstCaps::addPixelFormats(const QList<QVideoFrameFormat::PixelFormat> &formats, const char *modifier) |
290 | { |
291 | if (!gst_caps_is_writable(caps)) |
292 | caps = gst_caps_make_writable(caps); |
293 | |
294 | GValue list = {}; |
295 | g_value_init(value: &list, GST_TYPE_LIST); |
296 | |
297 | for (QVideoFrameFormat::PixelFormat format : formats) { |
298 | int index = indexOfVideoFormat(format); |
299 | if (index == -1) |
300 | continue; |
301 | GValue item = {}; |
302 | |
303 | g_value_init(value: &item, G_TYPE_STRING); |
304 | g_value_set_string(value: &item, v_string: gst_video_format_to_string(format: qt_videoFormatLookup[index].gstFormat)); |
305 | gst_value_list_append_value(value: &list, append_value: &item); |
306 | g_value_unset(value: &item); |
307 | } |
308 | |
309 | auto *structure = gst_structure_new(name: "video/x-raw" , |
310 | firstfield: "framerate" , GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1, |
311 | "width" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
312 | "height" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
313 | nullptr); |
314 | gst_structure_set_value(structure, fieldname: "format" , value: &list); |
315 | gst_caps_append_structure(caps, structure); |
316 | g_value_unset(value: &list); |
317 | |
318 | if (modifier) |
319 | gst_caps_set_features(caps, index: size() - 1, features: gst_caps_features_from_string(features: modifier)); |
320 | } |
321 | |
322 | QGstCaps QGstCaps::fromCameraFormat(const QCameraFormat &format) |
323 | { |
324 | QSize size = format.resolution(); |
325 | GstStructure *structure = nullptr; |
326 | if (format.pixelFormat() == QVideoFrameFormat::Format_Jpeg) { |
327 | structure = gst_structure_new(name: "image/jpeg" , |
328 | firstfield: "width" , G_TYPE_INT, size.width(), |
329 | "height" , G_TYPE_INT, size.height(), |
330 | nullptr); |
331 | } else { |
332 | int index = indexOfVideoFormat(format: format.pixelFormat()); |
333 | if (index < 0) |
334 | return {}; |
335 | auto gstFormat = qt_videoFormatLookup[index].gstFormat; |
336 | structure = gst_structure_new(name: "video/x-raw" , |
337 | firstfield: "format" , G_TYPE_STRING, gst_video_format_to_string(format: gstFormat), |
338 | "width" , G_TYPE_INT, size.width(), |
339 | "height" , G_TYPE_INT, size.height(), |
340 | nullptr); |
341 | } |
342 | auto caps = QGstCaps::create(); |
343 | gst_caps_append_structure(caps: caps.caps, structure); |
344 | return caps; |
345 | } |
346 | |
347 | void QGstUtils::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer) |
348 | { |
349 | // GStreamer uses nanoseconds, Qt uses microseconds |
350 | qint64 startTime = GST_BUFFER_TIMESTAMP(buffer); |
351 | if (startTime >= 0) { |
352 | frame->setStartTime(startTime/G_GINT64_CONSTANT (1000)); |
353 | |
354 | qint64 duration = GST_BUFFER_DURATION(buffer); |
355 | if (duration >= 0) |
356 | frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000)); |
357 | } |
358 | } |
359 | |
360 | QSize QGstStructure::resolution() const |
361 | { |
362 | QSize size; |
363 | |
364 | int w, h; |
365 | if (structure && |
366 | gst_structure_get_int(structure, fieldname: "width" , value: &w) && |
367 | gst_structure_get_int(structure, fieldname: "height" , value: &h)) { |
368 | size.rwidth() = w; |
369 | size.rheight() = h; |
370 | } |
371 | |
372 | return size; |
373 | } |
374 | |
375 | QVideoFrameFormat::PixelFormat QGstStructure::pixelFormat() const |
376 | { |
377 | QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid; |
378 | |
379 | if (!structure) |
380 | return pixelFormat; |
381 | |
382 | if (gst_structure_has_name(structure, name: "video/x-raw" )) { |
383 | const gchar *s = gst_structure_get_string(structure, fieldname: "format" ); |
384 | if (s) { |
385 | GstVideoFormat format = gst_video_format_from_string(format: s); |
386 | int index = indexOfVideoFormat(format); |
387 | |
388 | if (index != -1) |
389 | pixelFormat = qt_videoFormatLookup[index].pixelFormat; |
390 | } |
391 | } else if (gst_structure_has_name(structure, name: "image/jpeg" )) { |
392 | pixelFormat = QVideoFrameFormat::Format_Jpeg; |
393 | } |
394 | |
395 | return pixelFormat; |
396 | } |
397 | |
398 | QGRange<float> QGstStructure::frameRateRange() const |
399 | { |
400 | float minRate = 0.; |
401 | float maxRate = 0.; |
402 | |
403 | if (!structure) |
404 | return {.min: 0.f, .max: 0.f}; |
405 | |
406 | auto = [] (const GValue *v) -> float { |
407 | return (float)gst_value_get_fraction_numerator(value: v)/(float)gst_value_get_fraction_denominator(value: v); |
408 | }; |
409 | auto = [&] (const GValue *v) { |
410 | auto insert = [&] (float min, float max) { |
411 | if (max > maxRate) |
412 | maxRate = max; |
413 | if (min < minRate) |
414 | minRate = min; |
415 | }; |
416 | |
417 | if (GST_VALUE_HOLDS_FRACTION(v)) { |
418 | float rate = extractFraction(v); |
419 | insert(rate, rate); |
420 | } else if (GST_VALUE_HOLDS_FRACTION_RANGE(v)) { |
421 | auto *min = gst_value_get_fraction_range_max(value: v); |
422 | auto *max = gst_value_get_fraction_range_max(value: v); |
423 | insert(extractFraction(min), extractFraction(max)); |
424 | } |
425 | }; |
426 | |
427 | const GValue *gstFrameRates = gst_structure_get_value(structure, fieldname: "framerate" ); |
428 | if (gstFrameRates) { |
429 | if (GST_VALUE_HOLDS_LIST(gstFrameRates)) { |
430 | guint nFrameRates = gst_value_list_get_size(value: gstFrameRates); |
431 | for (guint f = 0; f < nFrameRates; ++f) { |
432 | extractFrameRate(gst_value_list_get_value(value: gstFrameRates, index: f)); |
433 | } |
434 | } else { |
435 | extractFrameRate(gstFrameRates); |
436 | } |
437 | } else { |
438 | const GValue *min = gst_structure_get_value(structure, fieldname: "min-framerate" ); |
439 | const GValue *max = gst_structure_get_value(structure, fieldname: "max-framerate" ); |
440 | if (min && max) { |
441 | minRate = extractFraction(min); |
442 | maxRate = extractFraction(max); |
443 | } |
444 | } |
445 | |
446 | return {.min: minRate, .max: maxRate}; |
447 | } |
448 | |
449 | GList *qt_gst_video_sinks() |
450 | { |
451 | return gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK |
452 | | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, |
453 | minrank: GST_RANK_MARGINAL); |
454 | } |
455 | |
456 | QT_END_NAMESPACE |
457 | |