1 | /**************************************************************************** |
2 | ** |
3 | ** Copyright (C) 2016 The Qt Company Ltd. |
4 | ** Contact: https://www.qt.io/licensing/ |
5 | ** |
6 | ** This file is part of the Qt Toolkit. |
7 | ** |
8 | ** $QT_BEGIN_LICENSE:LGPL$ |
9 | ** Commercial License Usage |
10 | ** Licensees holding valid commercial Qt licenses may use this file in |
11 | ** accordance with the commercial license agreement provided with the |
12 | ** Software or, alternatively, in accordance with the terms contained in |
13 | ** a written agreement between you and The Qt Company. For licensing terms |
14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
15 | ** information use the contact form at https://www.qt.io/contact-us. |
16 | ** |
17 | ** GNU Lesser General Public License Usage |
18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
19 | ** General Public License version 3 as published by the Free Software |
20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
21 | ** packaging of this file. Please review the following information to |
22 | ** ensure the GNU Lesser General Public License version 3 requirements |
23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
24 | ** |
25 | ** GNU General Public License Usage |
26 | ** Alternatively, this file may be used under the terms of the GNU |
27 | ** General Public License version 2.0 or (at your option) the GNU General |
28 | ** Public license version 3 or any later version approved by the KDE Free |
29 | ** Qt Foundation. The licenses are as published by the Free Software |
30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
31 | ** included in the packaging of this file. Please review the following |
32 | ** information to ensure the GNU General Public License requirements will |
33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
35 | ** |
36 | ** $QT_END_LICENSE$ |
37 | ** |
38 | ****************************************************************************/ |
39 | |
40 | #include <QtMultimedia/private/qtmultimediaglobal_p.h> |
41 | #include "qgstutils_p.h" |
42 | |
43 | #include <QtCore/qdatetime.h> |
44 | #include <QtCore/qdir.h> |
45 | #include <QtCore/qbytearray.h> |
46 | #include <QtCore/qvariant.h> |
47 | #include <QtCore/qregularexpression.h> |
48 | #include <QtCore/qsize.h> |
49 | #include <QtCore/qset.h> |
50 | #include <QtCore/qstringlist.h> |
51 | #include <QtGui/qimage.h> |
52 | #include <qaudioformat.h> |
53 | #include <QtCore/qelapsedtimer.h> |
54 | #include <QtMultimedia/qvideosurfaceformat.h> |
55 | #include <private/qmultimediautils_p.h> |
56 | |
57 | #include <gst/audio/audio.h> |
58 | #include <gst/video/video.h> |
59 | |
60 | template<typename T, int N> static int lengthOf(const T (&)[N]) { return N; } |
61 | |
62 | #if QT_CONFIG(linux_v4l) |
63 | # include <private/qcore_unix_p.h> |
64 | # include <linux/videodev2.h> |
65 | #endif |
66 | |
67 | #include "qgstreamervideoinputdevicecontrol_p.h" |
68 | |
69 | QT_BEGIN_NAMESPACE |
70 | |
71 | //internal |
72 | static void addTagToMap(const GstTagList *list, |
73 | const gchar *tag, |
74 | gpointer user_data) |
75 | { |
76 | QMap<QByteArray, QVariant> *map = reinterpret_cast<QMap<QByteArray, QVariant>* >(user_data); |
77 | |
78 | GValue val; |
79 | val.g_type = 0; |
80 | gst_tag_list_copy_value(dest: &val,list,tag); |
81 | |
82 | switch( G_VALUE_TYPE(&val) ) { |
83 | case G_TYPE_STRING: |
84 | { |
85 | const gchar *str_value = g_value_get_string(value: &val); |
86 | map->insert(akey: QByteArray(tag), avalue: QString::fromUtf8(str: str_value)); |
87 | break; |
88 | } |
89 | case G_TYPE_INT: |
90 | map->insert(akey: QByteArray(tag), avalue: g_value_get_int(value: &val)); |
91 | break; |
92 | case G_TYPE_UINT: |
93 | map->insert(akey: QByteArray(tag), avalue: g_value_get_uint(value: &val)); |
94 | break; |
95 | case G_TYPE_LONG: |
96 | map->insert(akey: QByteArray(tag), avalue: qint64(g_value_get_long(value: &val))); |
97 | break; |
98 | case G_TYPE_BOOLEAN: |
99 | map->insert(akey: QByteArray(tag), avalue: g_value_get_boolean(value: &val)); |
100 | break; |
101 | case G_TYPE_CHAR: |
102 | #if GLIB_CHECK_VERSION(2,32,0) |
103 | map->insert(akey: QByteArray(tag), avalue: g_value_get_schar(value: &val)); |
104 | #else |
105 | map->insert(QByteArray(tag), g_value_get_char(&val)); |
106 | #endif |
107 | break; |
108 | case G_TYPE_DOUBLE: |
109 | map->insert(akey: QByteArray(tag), avalue: g_value_get_double(value: &val)); |
110 | break; |
111 | default: |
112 | // GST_TYPE_DATE is a function, not a constant, so pull it out of the switch |
113 | #if GST_CHECK_VERSION(1,0,0) |
114 | if (G_VALUE_TYPE(&val) == G_TYPE_DATE) { |
115 | const GDate *date = (const GDate *)g_value_get_boxed(value: &val); |
116 | #else |
117 | if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) { |
118 | const GDate *date = gst_value_get_date(&val); |
119 | #endif |
120 | if (g_date_valid(date)) { |
121 | int year = g_date_get_year(date); |
122 | int month = g_date_get_month(date); |
123 | int day = g_date_get_day(date); |
124 | map->insert(akey: QByteArray(tag), avalue: QDate(year,month,day)); |
125 | if (!map->contains(akey: "year" )) |
126 | map->insert(akey: "year" , avalue: year); |
127 | } |
128 | #if GST_CHECK_VERSION(1,0,0) |
129 | } else if (G_VALUE_TYPE(&val) == GST_TYPE_DATE_TIME) { |
130 | const GstDateTime *dateTime = (const GstDateTime *)g_value_get_boxed(value: &val); |
131 | int year = gst_date_time_has_year(datetime: dateTime) ? gst_date_time_get_year(datetime: dateTime) : 0; |
132 | int month = gst_date_time_has_month(datetime: dateTime) ? gst_date_time_get_month(datetime: dateTime) : 0; |
133 | int day = gst_date_time_has_day(datetime: dateTime) ? gst_date_time_get_day(datetime: dateTime) : 0; |
134 | if (gst_date_time_has_time(datetime: dateTime)) { |
135 | int hour = gst_date_time_get_hour(datetime: dateTime); |
136 | int minute = gst_date_time_get_minute(datetime: dateTime); |
137 | int second = gst_date_time_get_second(datetime: dateTime); |
138 | float tz = gst_date_time_get_time_zone_offset(datetime: dateTime); |
139 | QDateTime dateTime(QDate(year, month, day), QTime(hour, minute, second), |
140 | Qt::OffsetFromUTC, tz * 60 * 60); |
141 | map->insert(akey: QByteArray(tag), avalue: dateTime); |
142 | } else if (year > 0 && month > 0 && day > 0) { |
143 | map->insert(akey: QByteArray(tag), avalue: QDate(year,month,day)); |
144 | } |
145 | if (!map->contains(akey: "year" ) && year > 0) |
146 | map->insert(akey: "year" , avalue: year); |
147 | } else if (G_VALUE_TYPE(&val) == GST_TYPE_SAMPLE) { |
148 | GstSample *sample = (GstSample *)g_value_get_boxed(value: &val); |
149 | GstCaps* caps = gst_sample_get_caps(sample); |
150 | if (caps && !gst_caps_is_empty(caps)) { |
151 | GstStructure *structure = gst_caps_get_structure(caps, index: 0); |
152 | const gchar *name = gst_structure_get_name(structure); |
153 | if (QByteArray(name).startsWith(c: "image/" )) { |
154 | GstBuffer *buffer = gst_sample_get_buffer(sample); |
155 | if (buffer) { |
156 | GstMapInfo info; |
157 | gst_buffer_map(buffer, info: &info, flags: GST_MAP_READ); |
158 | map->insert(akey: QByteArray(tag), avalue: QImage::fromData(data: info.data, size: info.size, format: name)); |
159 | gst_buffer_unmap(buffer, info: &info); |
160 | } |
161 | } |
162 | } |
163 | #endif |
164 | } else if (G_VALUE_TYPE(&val) == GST_TYPE_FRACTION) { |
165 | int nom = gst_value_get_fraction_numerator(value: &val); |
166 | int denom = gst_value_get_fraction_denominator(value: &val); |
167 | |
168 | if (denom > 0) { |
169 | map->insert(akey: QByteArray(tag), avalue: double(nom)/denom); |
170 | } |
171 | } |
172 | break; |
173 | } |
174 | |
175 | g_value_unset(value: &val); |
176 | } |
177 | |
178 | /*! |
179 | \class QGstUtils |
180 | \internal |
181 | */ |
182 | |
183 | /*! |
184 | Convert GstTagList structure to QMap<QByteArray, QVariant>. |
185 | |
186 | Mapping to int, bool, char, string, fractions and date are supported. |
187 | Fraction values are converted to doubles. |
188 | */ |
189 | QMap<QByteArray, QVariant> QGstUtils::gstTagListToMap(const GstTagList *tags) |
190 | { |
191 | QMap<QByteArray, QVariant> res; |
192 | gst_tag_list_foreach(list: tags, func: addTagToMap, user_data: &res); |
193 | |
194 | return res; |
195 | } |
196 | |
197 | /*! |
198 | Returns resolution of \a caps. |
199 | If caps doesn't have a valid size, an empty QSize is returned. |
200 | */ |
201 | QSize QGstUtils::capsResolution(const GstCaps *caps) |
202 | { |
203 | if (gst_caps_get_size(caps) == 0) |
204 | return QSize(); |
205 | |
206 | return structureResolution(s: gst_caps_get_structure(caps, index: 0)); |
207 | } |
208 | |
209 | /*! |
210 | Returns aspect ratio corrected resolution of \a caps. |
211 | If caps doesn't have a valid size, an empty QSize is returned. |
212 | */ |
213 | QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps) |
214 | { |
215 | QSize size; |
216 | |
217 | if (caps) { |
218 | size = capsResolution(caps); |
219 | |
220 | gint aspectNum = 0; |
221 | gint aspectDenum = 0; |
222 | if (!size.isEmpty() && gst_structure_get_fraction( |
223 | structure: gst_caps_get_structure(caps, index: 0), fieldname: "pixel-aspect-ratio" , value_numerator: &aspectNum, value_denominator: &aspectDenum)) { |
224 | if (aspectDenum > 0) |
225 | size.setWidth(size.width()*aspectNum/aspectDenum); |
226 | } |
227 | } |
228 | |
229 | return size; |
230 | } |
231 | |
232 | |
233 | #if GST_CHECK_VERSION(1,0,0) |
234 | namespace { |
235 | |
236 | struct AudioFormat |
237 | { |
238 | GstAudioFormat format; |
239 | QAudioFormat::SampleType sampleType; |
240 | QAudioFormat::Endian byteOrder; |
241 | int sampleSize; |
242 | }; |
243 | static const AudioFormat qt_audioLookup[] = |
244 | { |
245 | { .format: GST_AUDIO_FORMAT_S8 , .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 8 }, |
246 | { .format: GST_AUDIO_FORMAT_U8 , .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 8 }, |
247 | { .format: GST_AUDIO_FORMAT_S16LE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 16 }, |
248 | { .format: GST_AUDIO_FORMAT_S16BE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::BigEndian , .sampleSize: 16 }, |
249 | { .format: GST_AUDIO_FORMAT_U16LE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 16 }, |
250 | { .format: GST_AUDIO_FORMAT_U16BE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::BigEndian , .sampleSize: 16 }, |
251 | { .format: GST_AUDIO_FORMAT_S32LE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 32 }, |
252 | { .format: GST_AUDIO_FORMAT_S32BE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::BigEndian , .sampleSize: 32 }, |
253 | { .format: GST_AUDIO_FORMAT_U32LE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 32 }, |
254 | { .format: GST_AUDIO_FORMAT_U32BE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::BigEndian , .sampleSize: 32 }, |
255 | { .format: GST_AUDIO_FORMAT_S24LE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 24 }, |
256 | { .format: GST_AUDIO_FORMAT_S24BE, .sampleType: QAudioFormat::SignedInt , .byteOrder: QAudioFormat::BigEndian , .sampleSize: 24 }, |
257 | { .format: GST_AUDIO_FORMAT_U24LE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 24 }, |
258 | { .format: GST_AUDIO_FORMAT_U24BE, .sampleType: QAudioFormat::UnSignedInt, .byteOrder: QAudioFormat::BigEndian , .sampleSize: 24 }, |
259 | { .format: GST_AUDIO_FORMAT_F32LE, .sampleType: QAudioFormat::Float , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 32 }, |
260 | { .format: GST_AUDIO_FORMAT_F32BE, .sampleType: QAudioFormat::Float , .byteOrder: QAudioFormat::BigEndian , .sampleSize: 32 }, |
261 | { .format: GST_AUDIO_FORMAT_F64LE, .sampleType: QAudioFormat::Float , .byteOrder: QAudioFormat::LittleEndian, .sampleSize: 64 }, |
262 | { .format: GST_AUDIO_FORMAT_F64BE, .sampleType: QAudioFormat::Float , .byteOrder: QAudioFormat::BigEndian , .sampleSize: 64 } |
263 | }; |
264 | |
265 | } |
266 | #endif |
267 | |
268 | /*! |
269 | Returns audio format for caps. |
270 | If caps doesn't have a valid audio format, an empty QAudioFormat is returned. |
271 | */ |
272 | |
273 | QAudioFormat QGstUtils::audioFormatForCaps(const GstCaps *caps) |
274 | { |
275 | QAudioFormat format; |
276 | #if GST_CHECK_VERSION(1,0,0) |
277 | GstAudioInfo info; |
278 | if (gst_audio_info_from_caps(info: &info, caps)) { |
279 | for (int i = 0; i < lengthOf(qt_audioLookup); ++i) { |
280 | if (qt_audioLookup[i].format != info.finfo->format) |
281 | continue; |
282 | |
283 | format.setSampleType(qt_audioLookup[i].sampleType); |
284 | format.setByteOrder(qt_audioLookup[i].byteOrder); |
285 | format.setSampleSize(qt_audioLookup[i].sampleSize); |
286 | format.setSampleRate(info.rate); |
287 | format.setChannelCount(info.channels); |
288 | format.setCodec(QStringLiteral("audio/pcm" )); |
289 | |
290 | return format; |
291 | } |
292 | } |
293 | #else |
294 | const GstStructure *structure = gst_caps_get_structure(caps, 0); |
295 | |
296 | if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-int" ) == 0) { |
297 | |
298 | format.setCodec("audio/pcm" ); |
299 | |
300 | int endianness = 0; |
301 | gst_structure_get_int(structure, "endianness" , &endianness); |
302 | if (endianness == 1234) |
303 | format.setByteOrder(QAudioFormat::LittleEndian); |
304 | else if (endianness == 4321) |
305 | format.setByteOrder(QAudioFormat::BigEndian); |
306 | |
307 | gboolean isSigned = FALSE; |
308 | gst_structure_get_boolean(structure, "signed" , &isSigned); |
309 | if (isSigned) |
310 | format.setSampleType(QAudioFormat::SignedInt); |
311 | else |
312 | format.setSampleType(QAudioFormat::UnSignedInt); |
313 | |
314 | // Number of bits allocated per sample. |
315 | int width = 0; |
316 | gst_structure_get_int(structure, "width" , &width); |
317 | |
318 | // The number of bits used per sample. This must be less than or equal to the width. |
319 | int depth = 0; |
320 | gst_structure_get_int(structure, "depth" , &depth); |
321 | |
322 | if (width != depth) { |
323 | // Unsupported sample layout. |
324 | return QAudioFormat(); |
325 | } |
326 | format.setSampleSize(width); |
327 | |
328 | int rate = 0; |
329 | gst_structure_get_int(structure, "rate" , &rate); |
330 | format.setSampleRate(rate); |
331 | |
332 | int channels = 0; |
333 | gst_structure_get_int(structure, "channels" , &channels); |
334 | format.setChannelCount(channels); |
335 | |
336 | } else if (qstrcmp(gst_structure_get_name(structure), "audio/x-raw-float" ) == 0) { |
337 | |
338 | format.setCodec("audio/pcm" ); |
339 | |
340 | int endianness = 0; |
341 | gst_structure_get_int(structure, "endianness" , &endianness); |
342 | if (endianness == 1234) |
343 | format.setByteOrder(QAudioFormat::LittleEndian); |
344 | else if (endianness == 4321) |
345 | format.setByteOrder(QAudioFormat::BigEndian); |
346 | |
347 | format.setSampleType(QAudioFormat::Float); |
348 | |
349 | int width = 0; |
350 | gst_structure_get_int(structure, "width" , &width); |
351 | |
352 | format.setSampleSize(width); |
353 | |
354 | int rate = 0; |
355 | gst_structure_get_int(structure, "rate" , &rate); |
356 | format.setSampleRate(rate); |
357 | |
358 | int channels = 0; |
359 | gst_structure_get_int(structure, "channels" , &channels); |
360 | format.setChannelCount(channels); |
361 | |
362 | } else { |
363 | return QAudioFormat(); |
364 | } |
365 | #endif |
366 | return format; |
367 | } |
368 | |
369 | #if GST_CHECK_VERSION(1,0,0) |
370 | /* |
371 | Returns audio format for a sample. |
372 | If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned. |
373 | */ |
374 | QAudioFormat QGstUtils::audioFormatForSample(GstSample *sample) |
375 | { |
376 | GstCaps* caps = gst_sample_get_caps(sample); |
377 | if (!caps) |
378 | return QAudioFormat(); |
379 | |
380 | return QGstUtils::audioFormatForCaps(caps); |
381 | } |
382 | #else |
383 | /*! |
384 | Returns audio format for a buffer. |
385 | If the buffer doesn't have a valid audio format, an empty QAudioFormat is returned. |
386 | */ |
387 | QAudioFormat QGstUtils::audioFormatForBuffer(GstBuffer *buffer) |
388 | { |
389 | GstCaps* caps = gst_buffer_get_caps(buffer); |
390 | if (!caps) |
391 | return QAudioFormat(); |
392 | |
393 | QAudioFormat format = QGstUtils::audioFormatForCaps(caps); |
394 | gst_caps_unref(caps); |
395 | return format; |
396 | } |
397 | #endif |
398 | |
399 | /*! |
400 | Builds GstCaps for an audio format. |
401 | Returns 0 if the audio format is not valid. |
402 | Caller must unref GstCaps. |
403 | */ |
404 | |
405 | GstCaps *QGstUtils::capsForAudioFormat(const QAudioFormat &format) |
406 | { |
407 | if (!format.isValid()) |
408 | return 0; |
409 | |
410 | #if GST_CHECK_VERSION(1,0,0) |
411 | const QAudioFormat::SampleType sampleType = format.sampleType(); |
412 | const QAudioFormat::Endian byteOrder = format.byteOrder(); |
413 | const int sampleSize = format.sampleSize(); |
414 | |
415 | for (int i = 0; i < lengthOf(qt_audioLookup); ++i) { |
416 | if (qt_audioLookup[i].sampleType != sampleType |
417 | || qt_audioLookup[i].byteOrder != byteOrder |
418 | || qt_audioLookup[i].sampleSize != sampleSize) { |
419 | continue; |
420 | } |
421 | |
422 | return gst_caps_new_simple( |
423 | media_type: "audio/x-raw" , |
424 | fieldname: "format" , G_TYPE_STRING, gst_audio_format_to_string(format: qt_audioLookup[i].format), |
425 | "rate" , G_TYPE_INT , format.sampleRate(), |
426 | "channels" , G_TYPE_INT , format.channelCount(), |
427 | nullptr); |
428 | } |
429 | return 0; |
430 | #else |
431 | GstStructure *structure = 0; |
432 | |
433 | if (format.isValid()) { |
434 | if (format.sampleType() == QAudioFormat::SignedInt || format.sampleType() == QAudioFormat::UnSignedInt) { |
435 | structure = gst_structure_new("audio/x-raw-int" , nullptr); |
436 | } else if (format.sampleType() == QAudioFormat::Float) { |
437 | structure = gst_structure_new("audio/x-raw-float" , nullptr); |
438 | } |
439 | } |
440 | |
441 | GstCaps *caps = 0; |
442 | |
443 | if (structure) { |
444 | gst_structure_set(structure, "rate" , G_TYPE_INT, format.sampleRate(), nullptr); |
445 | gst_structure_set(structure, "channels" , G_TYPE_INT, format.channelCount(), nullptr); |
446 | gst_structure_set(structure, "width" , G_TYPE_INT, format.sampleSize(), nullptr); |
447 | gst_structure_set(structure, "depth" , G_TYPE_INT, format.sampleSize(), nullptr); |
448 | |
449 | if (format.byteOrder() == QAudioFormat::LittleEndian) |
450 | gst_structure_set(structure, "endianness" , G_TYPE_INT, 1234, nullptr); |
451 | else if (format.byteOrder() == QAudioFormat::BigEndian) |
452 | gst_structure_set(structure, "endianness" , G_TYPE_INT, 4321, nullptr); |
453 | |
454 | if (format.sampleType() == QAudioFormat::SignedInt) |
455 | gst_structure_set(structure, "signed" , G_TYPE_BOOLEAN, TRUE, nullptr); |
456 | else if (format.sampleType() == QAudioFormat::UnSignedInt) |
457 | gst_structure_set(structure, "signed" , G_TYPE_BOOLEAN, FALSE, nullptr); |
458 | |
459 | caps = gst_caps_new_empty(); |
460 | Q_ASSERT(caps); |
461 | gst_caps_append_structure(caps, structure); |
462 | } |
463 | |
464 | return caps; |
465 | #endif |
466 | } |
467 | |
468 | void QGstUtils::initializeGst() |
469 | { |
470 | static bool initialized = false; |
471 | if (!initialized) { |
472 | initialized = true; |
473 | gst_init(argc: nullptr, argv: nullptr); |
474 | } |
475 | } |
476 | |
477 | namespace { |
478 | const char* getCodecAlias(const QString &codec) |
479 | { |
480 | if (codec.startsWith(s: QLatin1String("avc1." ))) |
481 | return "video/x-h264" ; |
482 | |
483 | if (codec.startsWith(s: QLatin1String("mp4a." ))) |
484 | return "audio/mpeg4" ; |
485 | |
486 | if (codec.startsWith(s: QLatin1String("mp4v.20." ))) |
487 | return "video/mpeg4" ; |
488 | |
489 | if (codec == QLatin1String("samr" )) |
490 | return "audio/amr" ; |
491 | |
492 | return 0; |
493 | } |
494 | |
495 | const char* getMimeTypeAlias(const QString &mimeType) |
496 | { |
497 | if (mimeType == QLatin1String("video/mp4" )) |
498 | return "video/mpeg4" ; |
499 | |
500 | if (mimeType == QLatin1String("audio/mp4" )) |
501 | return "audio/mpeg4" ; |
502 | |
503 | if (mimeType == QLatin1String("video/ogg" ) |
504 | || mimeType == QLatin1String("audio/ogg" )) |
505 | return "application/ogg" ; |
506 | |
507 | return 0; |
508 | } |
509 | } |
510 | |
511 | QMultimedia::SupportEstimate QGstUtils::hasSupport(const QString &mimeType, |
512 | const QStringList &codecs, |
513 | const QSet<QString> &supportedMimeTypeSet) |
514 | { |
515 | if (supportedMimeTypeSet.isEmpty()) |
516 | return QMultimedia::NotSupported; |
517 | |
518 | QString mimeTypeLowcase = mimeType.toLower(); |
519 | bool containsMimeType = supportedMimeTypeSet.contains(value: mimeTypeLowcase); |
520 | if (!containsMimeType) { |
521 | const char* mimeTypeAlias = getMimeTypeAlias(mimeType: mimeTypeLowcase); |
522 | containsMimeType = supportedMimeTypeSet.contains(value: QLatin1String(mimeTypeAlias)); |
523 | if (!containsMimeType) { |
524 | containsMimeType = supportedMimeTypeSet.contains(value: QLatin1String("video/" ) + mimeTypeLowcase) |
525 | || supportedMimeTypeSet.contains(value: QLatin1String("video/x-" ) + mimeTypeLowcase) |
526 | || supportedMimeTypeSet.contains(value: QLatin1String("audio/" ) + mimeTypeLowcase) |
527 | || supportedMimeTypeSet.contains(value: QLatin1String("audio/x-" ) + mimeTypeLowcase); |
528 | } |
529 | } |
530 | |
531 | int supportedCodecCount = 0; |
532 | for (const QString &codec : codecs) { |
533 | QString codecLowcase = codec.toLower(); |
534 | const char* codecAlias = getCodecAlias(codec: codecLowcase); |
535 | if (codecAlias) { |
536 | if (supportedMimeTypeSet.contains(value: QLatin1String(codecAlias))) |
537 | supportedCodecCount++; |
538 | } else if (supportedMimeTypeSet.contains(value: QLatin1String("video/" ) + codecLowcase) |
539 | || supportedMimeTypeSet.contains(value: QLatin1String("video/x-" ) + codecLowcase) |
540 | || supportedMimeTypeSet.contains(value: QLatin1String("audio/" ) + codecLowcase) |
541 | || supportedMimeTypeSet.contains(value: QLatin1String("audio/x-" ) + codecLowcase)) { |
542 | supportedCodecCount++; |
543 | } |
544 | } |
545 | if (supportedCodecCount > 0 && supportedCodecCount == codecs.size()) |
546 | return QMultimedia::ProbablySupported; |
547 | |
548 | if (supportedCodecCount == 0 && !containsMimeType) |
549 | return QMultimedia::NotSupported; |
550 | |
551 | return QMultimedia::MaybeSupported; |
552 | } |
553 | |
554 | namespace { |
555 | |
556 | typedef QHash<GstElementFactory *, QVector<QGstUtils::CameraInfo> > FactoryCameraInfoMap; |
557 | |
558 | Q_GLOBAL_STATIC(FactoryCameraInfoMap, qt_camera_device_info); |
559 | |
560 | } |
561 | |
562 | QVector<QGstUtils::CameraInfo> QGstUtils::enumerateCameras(GstElementFactory *factory) |
563 | { |
564 | static QElapsedTimer camerasCacheAgeTimer; |
565 | if (camerasCacheAgeTimer.isValid() && camerasCacheAgeTimer.elapsed() > 500) // ms |
566 | qt_camera_device_info()->clear(); |
567 | |
568 | FactoryCameraInfoMap::const_iterator it = qt_camera_device_info()->constFind(akey: factory); |
569 | if (it != qt_camera_device_info()->constEnd()) |
570 | return *it; |
571 | |
572 | QVector<CameraInfo> &devices = (*qt_camera_device_info())[factory]; |
573 | |
574 | if (factory) { |
575 | bool hasVideoSource = false; |
576 | |
577 | const GType type = gst_element_factory_get_element_type(factory); |
578 | GObjectClass * const objectClass = type |
579 | ? static_cast<GObjectClass *>(g_type_class_ref(type)) |
580 | : 0; |
581 | if (objectClass) { |
582 | if (g_object_class_find_property(oclass: objectClass, property_name: "camera-device" )) { |
583 | const CameraInfo primary = { |
584 | QStringLiteral("primary" ), |
585 | .description: QGstreamerVideoInputDeviceControl::primaryCamera(), |
586 | .orientation: 0, |
587 | .position: QCamera::BackFace, |
588 | .driver: QByteArray() |
589 | }; |
590 | const CameraInfo secondary = { |
591 | QStringLiteral("secondary" ), |
592 | .description: QGstreamerVideoInputDeviceControl::secondaryCamera(), |
593 | .orientation: 0, |
594 | .position: QCamera::FrontFace, |
595 | .driver: QByteArray() |
596 | }; |
597 | |
598 | devices.append(t: primary); |
599 | devices.append(t: secondary); |
600 | |
601 | GstElement *camera = g_object_class_find_property(oclass: objectClass, property_name: "sensor-mount-angle" ) |
602 | ? gst_element_factory_create(factory, name: 0) |
603 | : 0; |
604 | if (camera) { |
605 | if (gst_element_set_state(element: camera, state: GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) { |
606 | // no-op |
607 | } else for (int i = 0; i < 2; ++i) { |
608 | gint orientation = 0; |
609 | g_object_set(G_OBJECT(camera), first_property_name: "camera-device" , i, nullptr); |
610 | g_object_get(G_OBJECT(camera), first_property_name: "sensor-mount-angle" , &orientation, nullptr); |
611 | |
612 | devices[i].orientation = (720 - orientation) % 360; |
613 | } |
614 | gst_element_set_state(element: camera, state: GST_STATE_NULL); |
615 | gst_object_unref(GST_OBJECT(camera)); |
616 | |
617 | } |
618 | } else if (g_object_class_find_property(oclass: objectClass, property_name: "video-source" )) { |
619 | hasVideoSource = true; |
620 | } |
621 | |
622 | g_type_class_unref(g_class: objectClass); |
623 | } |
624 | |
625 | if (!devices.isEmpty() || !hasVideoSource) { |
626 | camerasCacheAgeTimer.restart(); |
627 | return devices; |
628 | } |
629 | } |
630 | |
631 | #if QT_CONFIG(linux_v4l) |
632 | QDir devDir(QStringLiteral("/dev" )); |
633 | devDir.setFilter(QDir::System); |
634 | |
635 | const QFileInfoList entries = devDir.entryInfoList(nameFilters: QStringList() |
636 | << QStringLiteral("video*" )); |
637 | |
638 | for (const QFileInfo &entryInfo : entries) { |
639 | //qDebug() << "Try" << entryInfo.filePath(); |
640 | |
641 | int fd = qt_safe_open(pathname: entryInfo.filePath().toLatin1().constData(), O_RDWR ); |
642 | if (fd == -1) |
643 | continue; |
644 | |
645 | bool isCamera = false; |
646 | |
647 | v4l2_input input; |
648 | memset(s: &input, c: 0, n: sizeof(input)); |
649 | for (; ::ioctl(fd: fd, VIDIOC_ENUMINPUT, &input) >= 0; ++input.index) { |
650 | if (input.type == V4L2_INPUT_TYPE_CAMERA || input.type == 0) { |
651 | const int ret = ::ioctl(fd: fd, VIDIOC_S_INPUT, &input.index); |
652 | isCamera = (ret == 0 || errno == ENOTTY || errno == EBUSY); |
653 | break; |
654 | } |
655 | } |
656 | |
657 | if (isCamera) { |
658 | // find out its driver "name" |
659 | QByteArray driver; |
660 | QString name; |
661 | struct v4l2_capability vcap; |
662 | memset(s: &vcap, c: 0, n: sizeof(struct v4l2_capability)); |
663 | |
664 | if (ioctl(fd: fd, VIDIOC_QUERYCAP, &vcap) != 0) { |
665 | name = entryInfo.fileName(); |
666 | } else { |
667 | driver = QByteArray((const char*)vcap.driver); |
668 | name = QString::fromUtf8(str: (const char*)vcap.card); |
669 | if (name.isEmpty()) |
670 | name = entryInfo.fileName(); |
671 | } |
672 | //qDebug() << "found camera: " << name; |
673 | |
674 | |
675 | CameraInfo device = { |
676 | .name: entryInfo.absoluteFilePath(), |
677 | .description: name, |
678 | .orientation: 0, |
679 | .position: QCamera::UnspecifiedPosition, |
680 | .driver: driver |
681 | }; |
682 | devices.append(t: device); |
683 | } |
684 | qt_safe_close(fd); |
685 | } |
686 | camerasCacheAgeTimer.restart(); |
687 | #endif // linux_v4l |
688 | |
689 | #if GST_CHECK_VERSION(1,4,0) && (defined(Q_OS_WIN) || defined(Q_OS_MACOS)) |
690 | if (!devices.isEmpty()) |
691 | return devices; |
692 | |
693 | #if defined(Q_OS_WIN) |
694 | const char *propName = "device-path" ; |
695 | auto deviceDesc = [](GValue *value) { |
696 | gchar *desc = g_value_dup_string(value); |
697 | const QString id = QLatin1String(desc); |
698 | g_free(desc); |
699 | return id; |
700 | }; |
701 | #elif defined(Q_OS_MACOS) |
702 | const char *propName = "device-index" ; |
703 | auto deviceDesc = [](GValue *value) { |
704 | return QString::number(g_value_get_int(value)); |
705 | }; |
706 | #endif |
707 | |
708 | QGstUtils::initializeGst(); |
709 | GstDeviceMonitor *monitor = gst_device_monitor_new(); |
710 | auto caps = gst_caps_new_empty_simple("video/x-raw" ); |
711 | gst_device_monitor_add_filter(monitor, "Video/Source" , caps); |
712 | gst_caps_unref(caps); |
713 | |
714 | GList *devs = gst_device_monitor_get_devices(monitor); |
715 | while (devs) { |
716 | GstDevice *dev = reinterpret_cast<GstDevice*>(devs->data); |
717 | GstElement *element = gst_device_create_element(dev, nullptr); |
718 | if (element) { |
719 | gchar *name = gst_device_get_display_name(dev); |
720 | const QString deviceName = QLatin1String(name); |
721 | g_free(name); |
722 | GParamSpec *prop = g_object_class_find_property(G_OBJECT_GET_CLASS(element), propName); |
723 | if (prop) { |
724 | GValue value = G_VALUE_INIT; |
725 | g_value_init(&value, prop->value_type); |
726 | g_object_get_property(G_OBJECT(element), prop->name, &value); |
727 | const QString deviceId = deviceDesc(&value); |
728 | g_value_unset(&value); |
729 | |
730 | CameraInfo device = { |
731 | deviceId, |
732 | deviceName, |
733 | 0, |
734 | QCamera::UnspecifiedPosition, |
735 | QByteArray() |
736 | }; |
737 | |
738 | devices.append(device); |
739 | } |
740 | |
741 | gst_object_unref(element); |
742 | } |
743 | |
744 | gst_object_unref(dev); |
745 | devs = g_list_delete_link(devs, devs); |
746 | } |
747 | gst_object_unref(monitor); |
748 | #endif // GST_CHECK_VERSION(1,4,0) && (defined(Q_OS_WIN) || defined(Q_OS_MACOS)) |
749 | |
750 | return devices; |
751 | } |
752 | |
753 | QList<QByteArray> QGstUtils::cameraDevices(GstElementFactory * factory) |
754 | { |
755 | QList<QByteArray> devices; |
756 | |
757 | const auto cameras = enumerateCameras(factory); |
758 | devices.reserve(alloc: cameras.size()); |
759 | for (const CameraInfo &camera : cameras) |
760 | devices.append(t: camera.name.toUtf8()); |
761 | |
762 | return devices; |
763 | } |
764 | |
765 | QString QGstUtils::cameraDescription(const QString &device, GstElementFactory * factory) |
766 | { |
767 | const auto cameras = enumerateCameras(factory); |
768 | for (const CameraInfo &camera : cameras) { |
769 | if (camera.name == device) |
770 | return camera.description; |
771 | } |
772 | return QString(); |
773 | } |
774 | |
775 | QCamera::Position QGstUtils::cameraPosition(const QString &device, GstElementFactory * factory) |
776 | { |
777 | const auto cameras = enumerateCameras(factory); |
778 | for (const CameraInfo &camera : cameras) { |
779 | if (camera.name == device) |
780 | return camera.position; |
781 | } |
782 | return QCamera::UnspecifiedPosition; |
783 | } |
784 | |
785 | int QGstUtils::cameraOrientation(const QString &device, GstElementFactory * factory) |
786 | { |
787 | const auto cameras = enumerateCameras(factory); |
788 | for (const CameraInfo &camera : cameras) { |
789 | if (camera.name == device) |
790 | return camera.orientation; |
791 | } |
792 | return 0; |
793 | } |
794 | |
795 | QByteArray QGstUtils::cameraDriver(const QString &device, GstElementFactory *factory) |
796 | { |
797 | const auto cameras = enumerateCameras(factory); |
798 | for (const CameraInfo &camera : cameras) { |
799 | if (camera.name == device) |
800 | return camera.driver; |
801 | } |
802 | return QByteArray(); |
803 | } |
804 | |
805 | QSet<QString> QGstUtils::supportedMimeTypes(bool (*isValidFactory)(GstElementFactory *factory)) |
806 | { |
807 | QSet<QString> supportedMimeTypes; |
808 | |
809 | //enumerate supported mime types |
810 | gst_init(argc: nullptr, argv: nullptr); |
811 | |
812 | #if GST_CHECK_VERSION(1,0,0) |
813 | GstRegistry *registry = gst_registry_get(); |
814 | GList *orig_plugins = gst_registry_get_plugin_list(registry); |
815 | #else |
816 | GstRegistry *registry = gst_registry_get_default(); |
817 | GList *orig_plugins = gst_default_registry_get_plugin_list (); |
818 | #endif |
819 | for (GList *plugins = orig_plugins; plugins; plugins = g_list_next(plugins)) { |
820 | GstPlugin *plugin = (GstPlugin *) (plugins->data); |
821 | #if GST_CHECK_VERSION(1,0,0) |
822 | if (GST_OBJECT_FLAG_IS_SET(GST_OBJECT(plugin), GST_PLUGIN_FLAG_BLACKLISTED)) |
823 | continue; |
824 | #else |
825 | if (plugin->flags & (1<<1)) //GST_PLUGIN_FLAG_BLACKLISTED |
826 | continue; |
827 | #endif |
828 | |
829 | GList *orig_features = gst_registry_get_feature_list_by_plugin( |
830 | registry, name: gst_plugin_get_name(plugin)); |
831 | for (GList *features = orig_features; features; features = g_list_next(features)) { |
832 | if (G_UNLIKELY(features->data == nullptr)) |
833 | continue; |
834 | |
835 | GstPluginFeature *feature = GST_PLUGIN_FEATURE(features->data); |
836 | GstElementFactory *factory; |
837 | |
838 | if (GST_IS_TYPE_FIND_FACTORY(feature)) { |
839 | QString name(QLatin1String(gst_plugin_feature_get_name(feature))); |
840 | if (name.contains(c: QLatin1Char('/'))) //filter out any string without '/' which is obviously not a mime type |
841 | supportedMimeTypes.insert(value: name.toLower()); |
842 | continue; |
843 | } else if (!GST_IS_ELEMENT_FACTORY (feature) |
844 | || !(factory = GST_ELEMENT_FACTORY(gst_plugin_feature_load(feature)))) { |
845 | continue; |
846 | } else if (!isValidFactory(factory)) { |
847 | // Do nothing |
848 | } else for (const GList *pads = gst_element_factory_get_static_pad_templates(factory); |
849 | pads; |
850 | pads = g_list_next(pads)) { |
851 | GstStaticPadTemplate *padtemplate = static_cast<GstStaticPadTemplate *>(pads->data); |
852 | |
853 | if (padtemplate->direction == GST_PAD_SINK && padtemplate->static_caps.string) { |
854 | GstCaps *caps = gst_static_caps_get(static_caps: &padtemplate->static_caps); |
855 | if (gst_caps_is_any(caps) || gst_caps_is_empty(caps)) { |
856 | } else for (guint i = 0; i < gst_caps_get_size(caps); i++) { |
857 | GstStructure *structure = gst_caps_get_structure(caps, index: i); |
858 | QString nameLowcase = QString::fromLatin1(str: gst_structure_get_name(structure)).toLower(); |
859 | |
860 | supportedMimeTypes.insert(value: nameLowcase); |
861 | if (nameLowcase.contains(s: QLatin1String("mpeg" ))) { |
862 | //Because mpeg version number is only included in the detail |
863 | //description, it is necessary to manually extract this information |
864 | //in order to match the mime type of mpeg4. |
865 | const GValue *value = gst_structure_get_value(structure, fieldname: "mpegversion" ); |
866 | if (value) { |
867 | gchar *str = gst_value_serialize(value); |
868 | QString versions = QLatin1String(str); |
869 | const QStringList elements = versions.split(sep: QRegularExpression(QLatin1String("\\D+" )), behavior: Qt::SkipEmptyParts); |
870 | for (const QString &e : elements) |
871 | supportedMimeTypes.insert(value: nameLowcase + e); |
872 | g_free(mem: str); |
873 | } |
874 | } |
875 | } |
876 | } |
877 | } |
878 | gst_object_unref(object: factory); |
879 | } |
880 | gst_plugin_feature_list_free(list: orig_features); |
881 | } |
882 | gst_plugin_list_free (list: orig_plugins); |
883 | |
884 | #if defined QT_SUPPORTEDMIMETYPES_DEBUG |
885 | QStringList list = supportedMimeTypes.toList(); |
886 | list.sort(); |
887 | if (qgetenv("QT_DEBUG_PLUGINS" ).toInt() > 0) { |
888 | for (const QString &type : qAsConst(list)) |
889 | qDebug() << type; |
890 | } |
891 | #endif |
892 | return supportedMimeTypes; |
893 | } |
894 | |
895 | #if GST_CHECK_VERSION(1, 0, 0) |
896 | namespace { |
897 | |
898 | struct ColorFormat { QImage::Format imageFormat; GstVideoFormat gstFormat; }; |
899 | static const ColorFormat qt_colorLookup[] = |
900 | { |
901 | { .imageFormat: QImage::Format_RGBX8888, .gstFormat: GST_VIDEO_FORMAT_RGBx }, |
902 | { .imageFormat: QImage::Format_RGBA8888, .gstFormat: GST_VIDEO_FORMAT_RGBA }, |
903 | { .imageFormat: QImage::Format_RGB888 , .gstFormat: GST_VIDEO_FORMAT_RGB }, |
904 | { .imageFormat: QImage::Format_RGB16 , .gstFormat: GST_VIDEO_FORMAT_RGB16 } |
905 | }; |
906 | |
907 | } |
908 | #endif |
909 | |
910 | #if GST_CHECK_VERSION(1,0,0) |
911 | QImage QGstUtils::bufferToImage(GstBuffer *buffer, const GstVideoInfo &videoInfo) |
912 | #else |
913 | QImage QGstUtils::bufferToImage(GstBuffer *buffer) |
914 | #endif |
915 | { |
916 | QImage img; |
917 | |
918 | #if GST_CHECK_VERSION(1,0,0) |
919 | GstVideoInfo info = videoInfo; |
920 | GstVideoFrame frame; |
921 | if (!gst_video_frame_map(frame: &frame, info: &info, buffer, flags: GST_MAP_READ)) |
922 | return img; |
923 | #else |
924 | GstCaps *caps = gst_buffer_get_caps(buffer); |
925 | if (!caps) |
926 | return img; |
927 | |
928 | GstStructure *structure = gst_caps_get_structure (caps, 0); |
929 | gint width = 0; |
930 | gint height = 0; |
931 | |
932 | if (!structure |
933 | || !gst_structure_get_int(structure, "width" , &width) |
934 | || !gst_structure_get_int(structure, "height" , &height) |
935 | || width <= 0 |
936 | || height <= 0) { |
937 | gst_caps_unref(caps); |
938 | return img; |
939 | } |
940 | gst_caps_unref(caps); |
941 | #endif |
942 | |
943 | #if GST_CHECK_VERSION(1,0,0) |
944 | if (videoInfo.finfo->format == GST_VIDEO_FORMAT_I420) { |
945 | const int width = videoInfo.width; |
946 | const int height = videoInfo.height; |
947 | |
948 | const int stride[] = { frame.info.stride[0], frame.info.stride[1], frame.info.stride[2] }; |
949 | const uchar *data[] = { |
950 | static_cast<const uchar *>(frame.data[0]), |
951 | static_cast<const uchar *>(frame.data[1]), |
952 | static_cast<const uchar *>(frame.data[2]) |
953 | }; |
954 | #else |
955 | if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv" ) == 0) { |
956 | const int stride[] = { width, width / 2, width / 2 }; |
957 | const uchar *data[] = { |
958 | (const uchar *)buffer->data, |
959 | (const uchar *)buffer->data + width * height, |
960 | (const uchar *)buffer->data + width * height * 5 / 4 |
961 | }; |
962 | #endif |
963 | img = QImage(width/2, height/2, QImage::Format_RGB32); |
964 | |
965 | for (int y=0; y<height; y+=2) { |
966 | const uchar *yLine = data[0] + (y * stride[0]); |
967 | const uchar *uLine = data[1] + (y * stride[1] / 2); |
968 | const uchar *vLine = data[2] + (y * stride[2] / 2); |
969 | |
970 | for (int x=0; x<width; x+=2) { |
971 | const qreal Y = 1.164*(yLine[x]-16); |
972 | const int U = uLine[x/2]-128; |
973 | const int V = vLine[x/2]-128; |
974 | |
975 | int b = qBound(min: 0, val: int(Y + 2.018*U), max: 255); |
976 | int g = qBound(min: 0, val: int(Y - 0.813*V - 0.391*U), max: 255); |
977 | int r = qBound(min: 0, val: int(Y + 1.596*V), max: 255); |
978 | |
979 | img.setPixel(x: x/2,y: y/2,index_or_rgb: qRgb(r,g,b)); |
980 | } |
981 | } |
982 | #if GST_CHECK_VERSION(1,0,0) |
983 | } else for (int i = 0; i < lengthOf(qt_colorLookup); ++i) { |
984 | if (qt_colorLookup[i].gstFormat != videoInfo.finfo->format) |
985 | continue; |
986 | |
987 | const QImage image( |
988 | static_cast<const uchar *>(frame.data[0]), |
989 | videoInfo.width, |
990 | videoInfo.height, |
991 | frame.info.stride[0], |
992 | qt_colorLookup[i].imageFormat); |
993 | img = image; |
994 | img.detach(); |
995 | |
996 | break; |
997 | } |
998 | |
999 | gst_video_frame_unmap(frame: &frame); |
1000 | #else |
1001 | } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb" ) == 0) { |
1002 | QImage::Format format = QImage::Format_Invalid; |
1003 | int bpp = 0; |
1004 | gst_structure_get_int(structure, "bpp" , &bpp); |
1005 | |
1006 | if (bpp == 24) |
1007 | format = QImage::Format_RGB888; |
1008 | else if (bpp == 32) |
1009 | format = QImage::Format_RGB32; |
1010 | |
1011 | if (format != QImage::Format_Invalid) { |
1012 | img = QImage((const uchar *)buffer->data, |
1013 | width, |
1014 | height, |
1015 | format); |
1016 | img.bits(); //detach |
1017 | } |
1018 | } |
1019 | #endif |
1020 | return img; |
1021 | } |
1022 | |
1023 | |
1024 | namespace { |
1025 | |
1026 | #if GST_CHECK_VERSION(1,0,0) |
1027 | |
1028 | struct VideoFormat |
1029 | { |
1030 | QVideoFrame::PixelFormat pixelFormat; |
1031 | GstVideoFormat gstFormat; |
1032 | }; |
1033 | |
1034 | static const VideoFormat qt_videoFormatLookup[] = |
1035 | { |
1036 | { .pixelFormat: QVideoFrame::Format_YUV420P, .gstFormat: GST_VIDEO_FORMAT_I420 }, |
1037 | { .pixelFormat: QVideoFrame::Format_YUV422P, .gstFormat: GST_VIDEO_FORMAT_Y42B }, |
1038 | { .pixelFormat: QVideoFrame::Format_YV12 , .gstFormat: GST_VIDEO_FORMAT_YV12 }, |
1039 | { .pixelFormat: QVideoFrame::Format_UYVY , .gstFormat: GST_VIDEO_FORMAT_UYVY }, |
1040 | { .pixelFormat: QVideoFrame::Format_YUYV , .gstFormat: GST_VIDEO_FORMAT_YUY2 }, |
1041 | { .pixelFormat: QVideoFrame::Format_NV12 , .gstFormat: GST_VIDEO_FORMAT_NV12 }, |
1042 | { .pixelFormat: QVideoFrame::Format_NV21 , .gstFormat: GST_VIDEO_FORMAT_NV21 }, |
1043 | { .pixelFormat: QVideoFrame::Format_AYUV444, .gstFormat: GST_VIDEO_FORMAT_AYUV }, |
1044 | #if Q_BYTE_ORDER == Q_LITTLE_ENDIAN |
1045 | { .pixelFormat: QVideoFrame::Format_RGB32 , .gstFormat: GST_VIDEO_FORMAT_BGRx }, |
1046 | { .pixelFormat: QVideoFrame::Format_BGR32 , .gstFormat: GST_VIDEO_FORMAT_RGBx }, |
1047 | { .pixelFormat: QVideoFrame::Format_ARGB32, .gstFormat: GST_VIDEO_FORMAT_BGRA }, |
1048 | { .pixelFormat: QVideoFrame::Format_ABGR32, .gstFormat: GST_VIDEO_FORMAT_RGBA }, |
1049 | { .pixelFormat: QVideoFrame::Format_BGRA32, .gstFormat: GST_VIDEO_FORMAT_ARGB }, |
1050 | #else |
1051 | { QVideoFrame::Format_RGB32 , GST_VIDEO_FORMAT_xRGB }, |
1052 | { QVideoFrame::Format_BGR32 , GST_VIDEO_FORMAT_xBGR }, |
1053 | { QVideoFrame::Format_ARGB32, GST_VIDEO_FORMAT_ARGB }, |
1054 | { QVideoFrame::Format_ABGR32, GST_VIDEO_FORMAT_ABGR }, |
1055 | { QVideoFrame::Format_BGRA32, GST_VIDEO_FORMAT_BGRA }, |
1056 | #endif |
1057 | { .pixelFormat: QVideoFrame::Format_RGB24 , .gstFormat: GST_VIDEO_FORMAT_RGB }, |
1058 | { .pixelFormat: QVideoFrame::Format_BGR24 , .gstFormat: GST_VIDEO_FORMAT_BGR }, |
1059 | { .pixelFormat: QVideoFrame::Format_RGB565, .gstFormat: GST_VIDEO_FORMAT_RGB16 } |
1060 | }; |
1061 | |
1062 | static int indexOfVideoFormat(QVideoFrame::PixelFormat format) |
1063 | { |
1064 | for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i) |
1065 | if (qt_videoFormatLookup[i].pixelFormat == format) |
1066 | return i; |
1067 | |
1068 | return -1; |
1069 | } |
1070 | |
1071 | static int indexOfVideoFormat(GstVideoFormat format) |
1072 | { |
1073 | for (int i = 0; i < lengthOf(qt_videoFormatLookup); ++i) |
1074 | if (qt_videoFormatLookup[i].gstFormat == format) |
1075 | return i; |
1076 | |
1077 | return -1; |
1078 | } |
1079 | |
1080 | #else |
1081 | |
1082 | struct YuvFormat |
1083 | { |
1084 | QVideoFrame::PixelFormat pixelFormat; |
1085 | guint32 fourcc; |
1086 | int bitsPerPixel; |
1087 | }; |
1088 | |
1089 | static const YuvFormat qt_yuvColorLookup[] = |
1090 | { |
1091 | { QVideoFrame::Format_YUV420P, GST_MAKE_FOURCC('I','4','2','0'), 8 }, |
1092 | { QVideoFrame::Format_YUV422P, GST_MAKE_FOURCC('Y','4','2','B'), 8 }, |
1093 | { QVideoFrame::Format_YV12, GST_MAKE_FOURCC('Y','V','1','2'), 8 }, |
1094 | { QVideoFrame::Format_UYVY, GST_MAKE_FOURCC('U','Y','V','Y'), 16 }, |
1095 | { QVideoFrame::Format_YUYV, GST_MAKE_FOURCC('Y','U','Y','2'), 16 }, |
1096 | { QVideoFrame::Format_NV12, GST_MAKE_FOURCC('N','V','1','2'), 8 }, |
1097 | { QVideoFrame::Format_NV21, GST_MAKE_FOURCC('N','V','2','1'), 8 }, |
1098 | { QVideoFrame::Format_AYUV444, GST_MAKE_FOURCC('A','Y','U','V'), 32 } |
1099 | }; |
1100 | |
1101 | static int indexOfYuvColor(QVideoFrame::PixelFormat format) |
1102 | { |
1103 | const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat); |
1104 | |
1105 | for (int i = 0; i < count; ++i) |
1106 | if (qt_yuvColorLookup[i].pixelFormat == format) |
1107 | return i; |
1108 | |
1109 | return -1; |
1110 | } |
1111 | |
1112 | static int indexOfYuvColor(guint32 fourcc) |
1113 | { |
1114 | const int count = sizeof(qt_yuvColorLookup) / sizeof(YuvFormat); |
1115 | |
1116 | for (int i = 0; i < count; ++i) |
1117 | if (qt_yuvColorLookup[i].fourcc == fourcc) |
1118 | return i; |
1119 | |
1120 | return -1; |
1121 | } |
1122 | |
1123 | struct RgbFormat |
1124 | { |
1125 | QVideoFrame::PixelFormat pixelFormat; |
1126 | int bitsPerPixel; |
1127 | int depth; |
1128 | int endianness; |
1129 | int red; |
1130 | int green; |
1131 | int blue; |
1132 | int alpha; |
1133 | }; |
1134 | |
1135 | static const RgbFormat qt_rgbColorLookup[] = |
1136 | { |
1137 | { QVideoFrame::Format_RGB32 , 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x00000000 }, |
1138 | { QVideoFrame::Format_RGB32 , 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 }, |
1139 | { QVideoFrame::Format_BGR32 , 32, 24, 4321, int(0xFF000000), 0x00FF0000, 0x0000FF00, 0x00000000 }, |
1140 | { QVideoFrame::Format_BGR32 , 32, 24, 1234, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 }, |
1141 | { QVideoFrame::Format_ARGB32, 32, 24, 4321, 0x0000FF00, 0x00FF0000, int(0xFF000000), 0x000000FF }, |
1142 | { QVideoFrame::Format_ARGB32, 32, 24, 1234, 0x00FF0000, 0x0000FF00, 0x000000FF, int(0xFF000000) }, |
1143 | { QVideoFrame::Format_RGB24 , 24, 24, 4321, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00000000 }, |
1144 | { QVideoFrame::Format_BGR24 , 24, 24, 4321, 0x000000FF, 0x0000FF00, 0x00FF0000, 0x00000000 }, |
1145 | { QVideoFrame::Format_RGB565, 16, 16, 1234, 0x0000F800, 0x000007E0, 0x0000001F, 0x00000000 } |
1146 | }; |
1147 | |
1148 | static int indexOfRgbColor( |
1149 | int bits, int depth, int endianness, int red, int green, int blue, int alpha) |
1150 | { |
1151 | const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat); |
1152 | |
1153 | for (int i = 0; i < count; ++i) { |
1154 | if (qt_rgbColorLookup[i].bitsPerPixel == bits |
1155 | && qt_rgbColorLookup[i].depth == depth |
1156 | && qt_rgbColorLookup[i].endianness == endianness |
1157 | && qt_rgbColorLookup[i].red == red |
1158 | && qt_rgbColorLookup[i].green == green |
1159 | && qt_rgbColorLookup[i].blue == blue |
1160 | && qt_rgbColorLookup[i].alpha == alpha) { |
1161 | return i; |
1162 | } |
1163 | } |
1164 | return -1; |
1165 | } |
1166 | #endif |
1167 | |
1168 | } |
1169 | |
1170 | #if GST_CHECK_VERSION(1,0,0) |
1171 | |
1172 | QVideoSurfaceFormat QGstUtils::formatForCaps( |
1173 | GstCaps *caps, GstVideoInfo *info, QAbstractVideoBuffer::HandleType handleType) |
1174 | { |
1175 | GstVideoInfo vidInfo; |
1176 | GstVideoInfo *infoPtr = info ? info : &vidInfo; |
1177 | |
1178 | if (gst_video_info_from_caps(info: infoPtr, caps)) { |
1179 | int index = indexOfVideoFormat(format: infoPtr->finfo->format); |
1180 | |
1181 | if (index != -1) { |
1182 | QVideoSurfaceFormat format( |
1183 | QSize(infoPtr->width, infoPtr->height), |
1184 | qt_videoFormatLookup[index].pixelFormat, |
1185 | handleType); |
1186 | |
1187 | if (infoPtr->fps_d > 0) |
1188 | format.setFrameRate(qreal(infoPtr->fps_n) / infoPtr->fps_d); |
1189 | |
1190 | if (infoPtr->par_d > 0) |
1191 | format.setPixelAspectRatio(width: infoPtr->par_n, height: infoPtr->par_d); |
1192 | |
1193 | return format; |
1194 | } |
1195 | } |
1196 | return QVideoSurfaceFormat(); |
1197 | } |
1198 | |
1199 | #else |
1200 | |
1201 | QVideoSurfaceFormat QGstUtils::formatForCaps( |
1202 | GstCaps *caps, int *bytesPerLine, QAbstractVideoBuffer::HandleType handleType) |
1203 | { |
1204 | const GstStructure *structure = gst_caps_get_structure(caps, 0); |
1205 | |
1206 | int bitsPerPixel = 0; |
1207 | QSize size = structureResolution(structure); |
1208 | QVideoFrame::PixelFormat pixelFormat = structurePixelFormat(structure, &bitsPerPixel); |
1209 | |
1210 | if (pixelFormat != QVideoFrame::Format_Invalid) { |
1211 | QVideoSurfaceFormat format(size, pixelFormat, handleType); |
1212 | |
1213 | QPair<qreal, qreal> rate = structureFrameRateRange(structure); |
1214 | if (rate.second) |
1215 | format.setFrameRate(rate.second); |
1216 | |
1217 | format.setPixelAspectRatio(structurePixelAspectRatio(structure)); |
1218 | |
1219 | if (bytesPerLine) |
1220 | *bytesPerLine = ((size.width() * bitsPerPixel / 8) + 3) & ~3; |
1221 | |
1222 | return format; |
1223 | } |
1224 | return QVideoSurfaceFormat(); |
1225 | } |
1226 | |
1227 | #endif |
1228 | |
1229 | GstCaps *QGstUtils::capsForFormats(const QList<QVideoFrame::PixelFormat> &formats) |
1230 | { |
1231 | GstCaps *caps = gst_caps_new_empty(); |
1232 | |
1233 | #if GST_CHECK_VERSION(1,0,0) |
1234 | for (QVideoFrame::PixelFormat format : formats) { |
1235 | int index = indexOfVideoFormat(format); |
1236 | |
1237 | if (index != -1) { |
1238 | gst_caps_append_structure(caps, structure: gst_structure_new( |
1239 | name: "video/x-raw" , |
1240 | firstfield: "format" , G_TYPE_STRING, gst_video_format_to_string(format: qt_videoFormatLookup[index].gstFormat), |
1241 | nullptr)); |
1242 | } |
1243 | } |
1244 | #else |
1245 | for (QVideoFrame::PixelFormat format : formats) { |
1246 | int index = indexOfYuvColor(format); |
1247 | |
1248 | if (index != -1) { |
1249 | gst_caps_append_structure(caps, gst_structure_new( |
1250 | "video/x-raw-yuv" , |
1251 | "format" , GST_TYPE_FOURCC, qt_yuvColorLookup[index].fourcc, |
1252 | nullptr)); |
1253 | continue; |
1254 | } |
1255 | |
1256 | const int count = sizeof(qt_rgbColorLookup) / sizeof(RgbFormat); |
1257 | |
1258 | for (int i = 0; i < count; ++i) { |
1259 | if (qt_rgbColorLookup[i].pixelFormat == format) { |
1260 | GstStructure *structure = gst_structure_new( |
1261 | "video/x-raw-rgb" , |
1262 | "bpp" , G_TYPE_INT, qt_rgbColorLookup[i].bitsPerPixel, |
1263 | "depth" , G_TYPE_INT, qt_rgbColorLookup[i].depth, |
1264 | "endianness" , G_TYPE_INT, qt_rgbColorLookup[i].endianness, |
1265 | "red_mask" , G_TYPE_INT, qt_rgbColorLookup[i].red, |
1266 | "green_mask" , G_TYPE_INT, qt_rgbColorLookup[i].green, |
1267 | "blue_mask" , G_TYPE_INT, qt_rgbColorLookup[i].blue, |
1268 | nullptr); |
1269 | |
1270 | if (qt_rgbColorLookup[i].alpha != 0) { |
1271 | gst_structure_set( |
1272 | structure, "alpha_mask" , G_TYPE_INT, qt_rgbColorLookup[i].alpha, nullptr); |
1273 | } |
1274 | gst_caps_append_structure(caps, structure); |
1275 | } |
1276 | } |
1277 | } |
1278 | #endif |
1279 | |
1280 | gst_caps_set_simple( |
1281 | caps, |
1282 | field: "framerate" , GST_TYPE_FRACTION_RANGE, 0, 1, INT_MAX, 1, |
1283 | "width" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
1284 | "height" , GST_TYPE_INT_RANGE, 1, INT_MAX, |
1285 | nullptr); |
1286 | |
1287 | return caps; |
1288 | } |
1289 | |
1290 | void QGstUtils::setFrameTimeStamps(QVideoFrame *frame, GstBuffer *buffer) |
1291 | { |
1292 | // GStreamer uses nanoseconds, Qt uses microseconds |
1293 | qint64 startTime = GST_BUFFER_TIMESTAMP(buffer); |
1294 | if (startTime >= 0) { |
1295 | frame->setStartTime(startTime/G_GINT64_CONSTANT (1000)); |
1296 | |
1297 | qint64 duration = GST_BUFFER_DURATION(buffer); |
1298 | if (duration >= 0) |
1299 | frame->setEndTime((startTime + duration)/G_GINT64_CONSTANT (1000)); |
1300 | } |
1301 | } |
1302 | |
1303 | void QGstUtils::setMetaData(GstElement *element, const QMap<QByteArray, QVariant> &data) |
1304 | { |
1305 | if (!GST_IS_TAG_SETTER(element)) |
1306 | return; |
1307 | |
1308 | gst_tag_setter_reset_tags(GST_TAG_SETTER(element)); |
1309 | |
1310 | for (auto it = data.cbegin(), end = data.cend(); it != end; ++it) { |
1311 | const QString tagName = QString::fromLatin1(str: it.key()); |
1312 | const QVariant &tagValue = it.value(); |
1313 | |
1314 | switch (tagValue.type()) { |
1315 | case QVariant::String: |
1316 | gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
1317 | mode: GST_TAG_MERGE_REPLACE, |
1318 | tag: tagName.toUtf8().constData(), |
1319 | tagValue.toString().toUtf8().constData(), |
1320 | nullptr); |
1321 | break; |
1322 | case QVariant::Int: |
1323 | case QVariant::LongLong: |
1324 | gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
1325 | mode: GST_TAG_MERGE_REPLACE, |
1326 | tag: tagName.toUtf8().constData(), |
1327 | tagValue.toInt(), |
1328 | nullptr); |
1329 | break; |
1330 | case QVariant::Double: |
1331 | gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
1332 | mode: GST_TAG_MERGE_REPLACE, |
1333 | tag: tagName.toUtf8().constData(), |
1334 | tagValue.toDouble(), |
1335 | nullptr); |
1336 | break; |
1337 | #if GST_CHECK_VERSION(0, 10, 31) |
1338 | case QVariant::DateTime: { |
1339 | QDateTime date = tagValue.toDateTime().toLocalTime(); |
1340 | gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
1341 | mode: GST_TAG_MERGE_REPLACE, |
1342 | tag: tagName.toUtf8().constData(), |
1343 | gst_date_time_new_local_time( |
1344 | year: date.date().year(), month: date.date().month(), day: date.date().day(), |
1345 | hour: date.time().hour(), minute: date.time().minute(), seconds: date.time().second()), |
1346 | nullptr); |
1347 | break; |
1348 | } |
1349 | #endif |
1350 | default: |
1351 | break; |
1352 | } |
1353 | } |
1354 | } |
1355 | |
1356 | void QGstUtils::setMetaData(GstBin *bin, const QMap<QByteArray, QVariant> &data) |
1357 | { |
1358 | GstIterator *elements = gst_bin_iterate_all_by_interface(bin, GST_TYPE_TAG_SETTER); |
1359 | #if GST_CHECK_VERSION(1,0,0) |
1360 | GValue item = G_VALUE_INIT; |
1361 | while (gst_iterator_next(it: elements, elem: &item) == GST_ITERATOR_OK) { |
1362 | GstElement * const element = GST_ELEMENT(g_value_get_object(&item)); |
1363 | #else |
1364 | GstElement *element = 0; |
1365 | while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) { |
1366 | #endif |
1367 | setMetaData(element, data); |
1368 | } |
1369 | gst_iterator_free(it: elements); |
1370 | } |
1371 | |
1372 | |
1373 | GstCaps *QGstUtils::videoFilterCaps() |
1374 | { |
1375 | const char *caps = |
1376 | #if GST_CHECK_VERSION(1,2,0) |
1377 | "video/x-raw(ANY);" |
1378 | #elif GST_CHECK_VERSION(1,0,0) |
1379 | "video/x-raw;" |
1380 | #else |
1381 | "video/x-raw-yuv;" |
1382 | "video/x-raw-rgb;" |
1383 | "video/x-raw-data;" |
1384 | "video/x-android-buffer;" |
1385 | #endif |
1386 | "image/jpeg;" |
1387 | "video/x-h264" ; |
1388 | static GstStaticCaps staticCaps = GST_STATIC_CAPS(caps); |
1389 | |
1390 | return gst_caps_make_writable(gst_static_caps_get(&staticCaps)); |
1391 | } |
1392 | |
1393 | QSize QGstUtils::structureResolution(const GstStructure *s) |
1394 | { |
1395 | QSize size; |
1396 | |
1397 | int w, h; |
1398 | if (s && gst_structure_get_int(structure: s, fieldname: "width" , value: &w) && gst_structure_get_int(structure: s, fieldname: "height" , value: &h)) { |
1399 | size.rwidth() = w; |
1400 | size.rheight() = h; |
1401 | } |
1402 | |
1403 | return size; |
1404 | } |
1405 | |
1406 | QVideoFrame::PixelFormat QGstUtils::structurePixelFormat(const GstStructure *structure, int *bpp) |
1407 | { |
1408 | QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid; |
1409 | |
1410 | if (!structure) |
1411 | return pixelFormat; |
1412 | |
1413 | #if GST_CHECK_VERSION(1,0,0) |
1414 | Q_UNUSED(bpp); |
1415 | |
1416 | if (gst_structure_has_name(structure, name: "video/x-raw" )) { |
1417 | const gchar *s = gst_structure_get_string(structure, fieldname: "format" ); |
1418 | if (s) { |
1419 | GstVideoFormat format = gst_video_format_from_string(format: s); |
1420 | int index = indexOfVideoFormat(format); |
1421 | |
1422 | if (index != -1) |
1423 | pixelFormat = qt_videoFormatLookup[index].pixelFormat; |
1424 | } |
1425 | } |
1426 | #else |
1427 | if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv" ) == 0) { |
1428 | guint32 fourcc = 0; |
1429 | gst_structure_get_fourcc(structure, "format" , &fourcc); |
1430 | |
1431 | int index = indexOfYuvColor(fourcc); |
1432 | if (index != -1) { |
1433 | pixelFormat = qt_yuvColorLookup[index].pixelFormat; |
1434 | if (bpp) |
1435 | *bpp = qt_yuvColorLookup[index].bitsPerPixel; |
1436 | } |
1437 | } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb" ) == 0) { |
1438 | int bitsPerPixel = 0; |
1439 | int depth = 0; |
1440 | int endianness = 0; |
1441 | int red = 0; |
1442 | int green = 0; |
1443 | int blue = 0; |
1444 | int alpha = 0; |
1445 | |
1446 | gst_structure_get_int(structure, "bpp" , &bitsPerPixel); |
1447 | gst_structure_get_int(structure, "depth" , &depth); |
1448 | gst_structure_get_int(structure, "endianness" , &endianness); |
1449 | gst_structure_get_int(structure, "red_mask" , &red); |
1450 | gst_structure_get_int(structure, "green_mask" , &green); |
1451 | gst_structure_get_int(structure, "blue_mask" , &blue); |
1452 | gst_structure_get_int(structure, "alpha_mask" , &alpha); |
1453 | |
1454 | int index = indexOfRgbColor(bitsPerPixel, depth, endianness, red, green, blue, alpha); |
1455 | |
1456 | if (index != -1) { |
1457 | pixelFormat = qt_rgbColorLookup[index].pixelFormat; |
1458 | if (bpp) |
1459 | *bpp = qt_rgbColorLookup[index].bitsPerPixel; |
1460 | } |
1461 | } |
1462 | #endif |
1463 | |
1464 | return pixelFormat; |
1465 | } |
1466 | |
1467 | QSize QGstUtils::structurePixelAspectRatio(const GstStructure *s) |
1468 | { |
1469 | QSize ratio(1, 1); |
1470 | |
1471 | gint aspectNum = 0; |
1472 | gint aspectDenum = 0; |
1473 | if (s && gst_structure_get_fraction(structure: s, fieldname: "pixel-aspect-ratio" , value_numerator: &aspectNum, value_denominator: &aspectDenum)) { |
1474 | if (aspectDenum > 0) { |
1475 | ratio.rwidth() = aspectNum; |
1476 | ratio.rheight() = aspectDenum; |
1477 | } |
1478 | } |
1479 | |
1480 | return ratio; |
1481 | } |
1482 | |
1483 | QPair<qreal, qreal> QGstUtils::structureFrameRateRange(const GstStructure *s) |
1484 | { |
1485 | QPair<qreal, qreal> rate; |
1486 | |
1487 | if (!s) |
1488 | return rate; |
1489 | |
1490 | int n, d; |
1491 | if (gst_structure_get_fraction(structure: s, fieldname: "framerate" , value_numerator: &n, value_denominator: &d)) { |
1492 | rate.second = qreal(n) / d; |
1493 | rate.first = rate.second; |
1494 | } else if (gst_structure_get_fraction(structure: s, fieldname: "max-framerate" , value_numerator: &n, value_denominator: &d)) { |
1495 | rate.second = qreal(n) / d; |
1496 | if (gst_structure_get_fraction(structure: s, fieldname: "min-framerate" , value_numerator: &n, value_denominator: &d)) |
1497 | rate.first = qreal(n) / d; |
1498 | else |
1499 | rate.first = qreal(1); |
1500 | } |
1501 | |
1502 | return rate; |
1503 | } |
1504 | |
1505 | typedef QMap<QString, QString> FileExtensionMap; |
1506 | Q_GLOBAL_STATIC(FileExtensionMap, fileExtensionMap) |
1507 | |
1508 | QString QGstUtils::fileExtensionForMimeType(const QString &mimeType) |
1509 | { |
1510 | if (fileExtensionMap->isEmpty()) { |
1511 | //extension for containers hard to guess from mimetype |
1512 | fileExtensionMap->insert(QStringLiteral("video/x-matroska" ), avalue: QLatin1String("mkv" )); |
1513 | fileExtensionMap->insert(QStringLiteral("video/quicktime" ), avalue: QLatin1String("mov" )); |
1514 | fileExtensionMap->insert(QStringLiteral("video/x-msvideo" ), avalue: QLatin1String("avi" )); |
1515 | fileExtensionMap->insert(QStringLiteral("video/msvideo" ), avalue: QLatin1String("avi" )); |
1516 | fileExtensionMap->insert(QStringLiteral("audio/mpeg" ), avalue: QLatin1String("mp3" )); |
1517 | fileExtensionMap->insert(QStringLiteral("application/x-shockwave-flash" ), avalue: QLatin1String("swf" )); |
1518 | fileExtensionMap->insert(QStringLiteral("application/x-pn-realmedia" ), avalue: QLatin1String("rm" )); |
1519 | } |
1520 | |
1521 | //for container names like avi instead of video/x-msvideo, use it as extension |
1522 | if (!mimeType.contains(c: QLatin1Char('/'))) |
1523 | return mimeType; |
1524 | |
1525 | QString format = mimeType.left(n: mimeType.indexOf(c: QLatin1Char(','))); |
1526 | QString extension = fileExtensionMap->value(akey: format); |
1527 | |
1528 | if (!extension.isEmpty() || format.isEmpty()) |
1529 | return extension; |
1530 | |
1531 | QRegularExpression rx(QStringLiteral("[-/]([\\w]+)$" )); |
1532 | QRegularExpressionMatch match = rx.match(subject: format); |
1533 | |
1534 | if (match.hasMatch()) |
1535 | extension = match.captured(nth: 1); |
1536 | |
1537 | return extension; |
1538 | } |
1539 | |
1540 | #if GST_CHECK_VERSION(0,10,30) |
1541 | QVariant QGstUtils::fromGStreamerOrientation(const QVariant &value) |
1542 | { |
1543 | // Note gstreamer tokens either describe the counter clockwise rotation of the |
1544 | // image or the clockwise transform to apply to correct the image. The orientation |
1545 | // value returned is the clockwise rotation of the image. |
1546 | const QString token = value.toString(); |
1547 | if (token == QStringLiteral("rotate-90" )) |
1548 | return 270; |
1549 | if (token == QStringLiteral("rotate-180" )) |
1550 | return 180; |
1551 | if (token == QStringLiteral("rotate-270" )) |
1552 | return 90; |
1553 | return 0; |
1554 | } |
1555 | |
1556 | QVariant QGstUtils::toGStreamerOrientation(const QVariant &value) |
1557 | { |
1558 | switch (value.toInt()) { |
1559 | case 90: |
1560 | return QStringLiteral("rotate-270" ); |
1561 | case 180: |
1562 | return QStringLiteral("rotate-180" ); |
1563 | case 270: |
1564 | return QStringLiteral("rotate-90" ); |
1565 | default: |
1566 | return QStringLiteral("rotate-0" ); |
1567 | } |
1568 | } |
1569 | #endif |
1570 | |
1571 | bool QGstUtils::useOpenGL() |
1572 | { |
1573 | static bool result = qEnvironmentVariableIntValue(varName: "QT_GSTREAMER_USE_OPENGL_PLUGIN" ); |
1574 | return result; |
1575 | } |
1576 | |
1577 | void qt_gst_object_ref_sink(gpointer object) |
1578 | { |
1579 | #if GST_CHECK_VERSION(0,10,24) |
1580 | gst_object_ref_sink(object); |
1581 | #else |
1582 | g_return_if_fail (GST_IS_OBJECT(object)); |
1583 | |
1584 | GST_OBJECT_LOCK(object); |
1585 | if (G_LIKELY(GST_OBJECT_IS_FLOATING(object))) { |
1586 | GST_OBJECT_FLAG_UNSET(object, GST_OBJECT_FLOATING); |
1587 | GST_OBJECT_UNLOCK(object); |
1588 | } else { |
1589 | GST_OBJECT_UNLOCK(object); |
1590 | gst_object_ref(object); |
1591 | } |
1592 | #endif |
1593 | } |
1594 | |
1595 | GstCaps *qt_gst_pad_get_current_caps(GstPad *pad) |
1596 | { |
1597 | #if GST_CHECK_VERSION(1,0,0) |
1598 | return gst_pad_get_current_caps(pad); |
1599 | #else |
1600 | return gst_pad_get_negotiated_caps(pad); |
1601 | #endif |
1602 | } |
1603 | |
1604 | GstCaps *qt_gst_pad_get_caps(GstPad *pad) |
1605 | { |
1606 | #if GST_CHECK_VERSION(1,0,0) |
1607 | return gst_pad_query_caps(pad, filter: nullptr); |
1608 | #elif GST_CHECK_VERSION(0, 10, 26) |
1609 | return gst_pad_get_caps_reffed(pad); |
1610 | #else |
1611 | return gst_pad_get_caps(pad); |
1612 | #endif |
1613 | } |
1614 | |
1615 | GstStructure *qt_gst_structure_new_empty(const char *name) |
1616 | { |
1617 | #if GST_CHECK_VERSION(1,0,0) |
1618 | return gst_structure_new_empty(name); |
1619 | #else |
1620 | return gst_structure_new(name, nullptr); |
1621 | #endif |
1622 | } |
1623 | |
1624 | gboolean qt_gst_element_query_position(GstElement *element, GstFormat format, gint64 *cur) |
1625 | { |
1626 | #if GST_CHECK_VERSION(1,0,0) |
1627 | return gst_element_query_position(element, format, cur); |
1628 | #else |
1629 | return gst_element_query_position(element, &format, cur); |
1630 | #endif |
1631 | } |
1632 | |
1633 | gboolean qt_gst_element_query_duration(GstElement *element, GstFormat format, gint64 *cur) |
1634 | { |
1635 | #if GST_CHECK_VERSION(1,0,0) |
1636 | return gst_element_query_duration(element, format, duration: cur); |
1637 | #else |
1638 | return gst_element_query_duration(element, &format, cur); |
1639 | #endif |
1640 | } |
1641 | |
1642 | GstCaps *qt_gst_caps_normalize(GstCaps *caps) |
1643 | { |
1644 | #if GST_CHECK_VERSION(1,0,0) |
1645 | // gst_caps_normalize() takes ownership of the argument in 1.0 |
1646 | return gst_caps_normalize(caps); |
1647 | #else |
1648 | // in 0.10, it doesn't. Unref the argument to mimic the 1.0 behavior |
1649 | GstCaps *res = gst_caps_normalize(caps); |
1650 | gst_caps_unref(caps); |
1651 | return res; |
1652 | #endif |
1653 | } |
1654 | |
1655 | const gchar *qt_gst_element_get_factory_name(GstElement *element) |
1656 | { |
1657 | const gchar *name = 0; |
1658 | const GstElementFactory *factory = 0; |
1659 | |
1660 | if (element && (factory = gst_element_get_factory(element))) |
1661 | name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); |
1662 | |
1663 | return name; |
1664 | } |
1665 | |
1666 | gboolean qt_gst_caps_can_intersect(const GstCaps * caps1, const GstCaps * caps2) |
1667 | { |
1668 | #if GST_CHECK_VERSION(0, 10, 25) |
1669 | return gst_caps_can_intersect(caps1, caps2); |
1670 | #else |
1671 | GstCaps *intersection = gst_caps_intersect(caps1, caps2); |
1672 | gboolean res = !gst_caps_is_empty(intersection); |
1673 | gst_caps_unref(intersection); |
1674 | return res; |
1675 | #endif |
1676 | } |
1677 | |
1678 | #if !GST_CHECK_VERSION(0, 10, 31) |
1679 | static gboolean qt_gst_videosink_factory_filter(GstPluginFeature *feature, gpointer) |
1680 | { |
1681 | guint rank; |
1682 | const gchar *klass; |
1683 | |
1684 | if (!GST_IS_ELEMENT_FACTORY(feature)) |
1685 | return FALSE; |
1686 | |
1687 | klass = gst_element_factory_get_klass(GST_ELEMENT_FACTORY(feature)); |
1688 | if (!(strstr(klass, "Sink" ) && strstr(klass, "Video" ))) |
1689 | return FALSE; |
1690 | |
1691 | rank = gst_plugin_feature_get_rank(feature); |
1692 | if (rank < GST_RANK_MARGINAL) |
1693 | return FALSE; |
1694 | |
1695 | return TRUE; |
1696 | } |
1697 | |
1698 | static gint qt_gst_compare_ranks(GstPluginFeature *f1, GstPluginFeature *f2) |
1699 | { |
1700 | gint diff; |
1701 | |
1702 | diff = gst_plugin_feature_get_rank(f2) - gst_plugin_feature_get_rank(f1); |
1703 | if (diff != 0) |
1704 | return diff; |
1705 | |
1706 | return strcmp(gst_plugin_feature_get_name(f2), gst_plugin_feature_get_name (f1)); |
1707 | } |
1708 | #endif |
1709 | |
1710 | GList *qt_gst_video_sinks() |
1711 | { |
1712 | GList *list = nullptr; |
1713 | |
1714 | #if GST_CHECK_VERSION(0, 10, 31) |
1715 | list = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_SINK | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, |
1716 | minrank: GST_RANK_MARGINAL); |
1717 | #else |
1718 | list = gst_registry_feature_filter(gst_registry_get_default(), |
1719 | (GstPluginFeatureFilter)qt_gst_videosink_factory_filter, |
1720 | FALSE, nullptr); |
1721 | list = g_list_sort(list, (GCompareFunc)qt_gst_compare_ranks); |
1722 | #endif |
1723 | |
1724 | return list; |
1725 | } |
1726 | |
1727 | void qt_gst_util_double_to_fraction(gdouble src, gint *dest_n, gint *dest_d) |
1728 | { |
1729 | #if GST_CHECK_VERSION(0, 10, 26) |
1730 | gst_util_double_to_fraction(src, dest_n, dest_d); |
1731 | #else |
1732 | qt_real_to_fraction(src, dest_n, dest_d); |
1733 | #endif |
1734 | } |
1735 | |
1736 | QDebug operator <<(QDebug debug, GstCaps *caps) |
1737 | { |
1738 | if (caps) { |
1739 | gchar *string = gst_caps_to_string(caps); |
1740 | debug = debug << string; |
1741 | g_free(mem: string); |
1742 | } |
1743 | return debug; |
1744 | } |
1745 | |
1746 | QT_END_NAMESPACE |
1747 | |