1 | // Copyright (C) 2016 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include <qcameradevice.h> |
5 | |
6 | #include "qgstreamercamera_p.h" |
7 | #include "qgstreamerimagecapture_p.h" |
8 | #include <qgstreamervideodevices_p.h> |
9 | #include <qgstreamerintegration_p.h> |
10 | #include <qmediacapturesession.h> |
11 | |
12 | #if QT_CONFIG(linux_v4l) |
13 | #include <linux/videodev2.h> |
14 | #include <private/qcore_unix_p.h> |
15 | #endif |
16 | |
17 | #include <QtCore/qdebug.h> |
18 | |
19 | QT_BEGIN_NAMESPACE |
20 | |
21 | QMaybe<QPlatformCamera *> QGstreamerCamera::create(QCamera *camera) |
22 | { |
23 | QGstElement videotestsrc("videotestsrc" ); |
24 | if (!videotestsrc) |
25 | return errorMessageCannotFindElement(element: "videotestsrc" ); |
26 | |
27 | QGstElement capsFilter("capsfilter" , "videoCapsFilter" ); |
28 | if (!capsFilter) |
29 | return errorMessageCannotFindElement(element: "capsfilter" ); |
30 | |
31 | QGstElement videoconvert("videoconvert" , "videoConvert" ); |
32 | if (!videoconvert) |
33 | return errorMessageCannotFindElement(element: "videoconvert" ); |
34 | |
35 | QGstElement videoscale("videoscale" , "videoScale" ); |
36 | if (!videoscale) |
37 | return errorMessageCannotFindElement(element: "videoscale" ); |
38 | |
39 | return new QGstreamerCamera(videotestsrc, capsFilter, videoconvert, videoscale, camera); |
40 | } |
41 | |
42 | QGstreamerCamera::QGstreamerCamera(QGstElement videotestsrc, QGstElement capsFilter, |
43 | QGstElement videoconvert, QGstElement videoscale, |
44 | QCamera *camera) |
45 | : QPlatformCamera(camera), |
46 | gstCamera(std::move(videotestsrc)), |
47 | gstCapsFilter(std::move(capsFilter)), |
48 | gstVideoConvert(std::move(videoconvert)), |
49 | gstVideoScale(std::move(videoscale)) |
50 | { |
51 | gstDecode = QGstElement("identity" ); |
52 | gstCameraBin = QGstBin("camerabin" ); |
53 | gstCameraBin.add(e1: gstCamera, e2: gstCapsFilter, e3: gstDecode, e4: gstVideoConvert, e5: gstVideoScale); |
54 | gstCamera.link(n1: gstCapsFilter, n2: gstDecode, n3: gstVideoConvert, n4: gstVideoScale); |
55 | gstCameraBin.addGhostPad(child: gstVideoScale, name: "src" ); |
56 | } |
57 | |
58 | QGstreamerCamera::~QGstreamerCamera() |
59 | { |
60 | #if QT_CONFIG(linux_v4l) |
61 | if (v4l2FileDescriptor >= 0) |
62 | qt_safe_close(fd: v4l2FileDescriptor); |
63 | v4l2FileDescriptor = -1; |
64 | #endif |
65 | gstCameraBin.setStateSync(GST_STATE_NULL); |
66 | } |
67 | |
68 | bool QGstreamerCamera::isActive() const |
69 | { |
70 | return m_active; |
71 | } |
72 | |
73 | void QGstreamerCamera::setActive(bool active) |
74 | { |
75 | if (m_active == active) |
76 | return; |
77 | if (m_cameraDevice.isNull() && active) |
78 | return; |
79 | |
80 | m_active = active; |
81 | |
82 | emit activeChanged(active); |
83 | } |
84 | |
85 | void QGstreamerCamera::setCamera(const QCameraDevice &camera) |
86 | { |
87 | if (m_cameraDevice == camera) |
88 | return; |
89 | |
90 | m_cameraDevice = camera; |
91 | |
92 | QGstElement gstNewCamera; |
93 | if (camera.isNull()) { |
94 | gstNewCamera = QGstElement("videotestsrc" ); |
95 | } else { |
96 | auto *integration = static_cast<QGstreamerIntegration *>(QGstreamerIntegration::instance()); |
97 | auto *device = integration->videoDevice(id: camera.id()); |
98 | gstNewCamera = gst_device_create_element(device, name: "camerasrc" ); |
99 | if (QGstStructure properties = gst_device_get_properties(device); !properties.isNull()) { |
100 | if (properties.name() == "v4l2deviceprovider" ) |
101 | m_v4l2Device = QString::fromUtf8(utf8: properties["device.path" ].toString()); |
102 | properties.free(); |
103 | } |
104 | } |
105 | |
106 | QCameraFormat f = findBestCameraFormat(camera); |
107 | auto caps = QGstCaps::fromCameraFormat(format: f); |
108 | auto gstNewDecode = QGstElement(f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity" ); |
109 | |
110 | gstCamera.unlink(next: gstCapsFilter); |
111 | gstCapsFilter.unlink(next: gstDecode); |
112 | gstDecode.unlink(next: gstVideoConvert); |
113 | |
114 | gstCameraBin.remove(element: gstCamera); |
115 | gstCameraBin.remove(element: gstDecode); |
116 | |
117 | gstCamera.setStateSync(GST_STATE_NULL); |
118 | gstDecode.setStateSync(GST_STATE_NULL); |
119 | |
120 | gstCapsFilter.set(property: "caps" , c: caps); |
121 | |
122 | gstCameraBin.add(e1: gstNewCamera, e2: gstNewDecode); |
123 | |
124 | gstNewDecode.link(next: gstVideoConvert); |
125 | gstCapsFilter.link(next: gstNewDecode); |
126 | |
127 | if (!gstNewCamera.link(next: gstCapsFilter)) |
128 | qWarning() << "linking camera failed" << gstCamera.name() << caps.toString(); |
129 | |
130 | // Start sending frames once pipeline is linked |
131 | // FIXME: put camera to READY state before linking to decoder as in the NULL state it does not know its true caps |
132 | gstCapsFilter.syncStateWithParent(); |
133 | gstNewDecode.syncStateWithParent(); |
134 | gstNewCamera.syncStateWithParent(); |
135 | |
136 | gstCamera = gstNewCamera; |
137 | gstDecode = gstNewDecode; |
138 | |
139 | updateCameraProperties(); |
140 | } |
141 | |
142 | bool QGstreamerCamera::setCameraFormat(const QCameraFormat &format) |
143 | { |
144 | if (!format.isNull() && !m_cameraDevice.videoFormats().contains(t: format)) |
145 | return false; |
146 | |
147 | QCameraFormat f = format; |
148 | if (f.isNull()) |
149 | f = findBestCameraFormat(camera: m_cameraDevice); |
150 | |
151 | auto caps = QGstCaps::fromCameraFormat(format: f); |
152 | |
153 | auto newGstDecode = QGstElement(f.pixelFormat() == QVideoFrameFormat::Format_Jpeg ? "jpegdec" : "identity" ); |
154 | gstCameraBin.add(element: newGstDecode); |
155 | newGstDecode.syncStateWithParent(); |
156 | |
157 | gstCamera.staticPad(name: "src" ).doInIdleProbe(work: [&](){ |
158 | gstCamera.unlink(next: gstCapsFilter); |
159 | gstCapsFilter.unlink(next: gstDecode); |
160 | gstDecode.unlink(next: gstVideoConvert); |
161 | |
162 | gstCapsFilter.set(property: "caps" , c: caps); |
163 | |
164 | newGstDecode.link(next: gstVideoConvert); |
165 | gstCapsFilter.link(next: newGstDecode); |
166 | if (!gstCamera.link(next: gstCapsFilter)) |
167 | qWarning() << "linking filtered camera to decoder failed" << gstCamera.name() << caps.toString(); |
168 | }); |
169 | |
170 | gstCameraBin.remove(element: gstDecode); |
171 | gstDecode.setStateSync(GST_STATE_NULL); |
172 | |
173 | gstDecode = newGstDecode; |
174 | |
175 | return true; |
176 | } |
177 | |
178 | void QGstreamerCamera::updateCameraProperties() |
179 | { |
180 | #if QT_CONFIG(linux_v4l) |
181 | if (isV4L2Camera()) { |
182 | initV4L2Controls(); |
183 | return; |
184 | } |
185 | #endif |
186 | #if QT_CONFIG(gstreamer_photography) |
187 | if (auto *p = photography()) |
188 | gst_photography_set_white_balance_mode(photo: p, wb_mode: GST_PHOTOGRAPHY_WB_MODE_AUTO); |
189 | QCamera::Features f = QCamera::Feature::ColorTemperature | QCamera::Feature::ExposureCompensation | |
190 | QCamera::Feature::IsoSensitivity | QCamera::Feature::ManualExposureTime; |
191 | supportedFeaturesChanged(f); |
192 | #endif |
193 | |
194 | } |
195 | |
196 | #if QT_CONFIG(gstreamer_photography) |
197 | GstPhotography *QGstreamerCamera::photography() const |
198 | { |
199 | if (!gstCamera.isNull() && GST_IS_PHOTOGRAPHY(gstCamera.element())) |
200 | return GST_PHOTOGRAPHY(gstCamera.element()); |
201 | return nullptr; |
202 | } |
203 | #endif |
204 | |
205 | void QGstreamerCamera::setFocusMode(QCamera::FocusMode mode) |
206 | { |
207 | if (mode == focusMode()) |
208 | return; |
209 | |
210 | #if QT_CONFIG(gstreamer_photography) |
211 | auto p = photography(); |
212 | if (p) { |
213 | GstPhotographyFocusMode photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL; |
214 | |
215 | switch (mode) { |
216 | case QCamera::FocusModeAutoNear: |
217 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MACRO; |
218 | break; |
219 | case QCamera::FocusModeAutoFar: |
220 | // not quite, but hey :) |
221 | Q_FALLTHROUGH(); |
222 | case QCamera::FocusModeHyperfocal: |
223 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL; |
224 | break; |
225 | case QCamera::FocusModeInfinity: |
226 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; |
227 | break; |
228 | case QCamera::FocusModeManual: |
229 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MANUAL; |
230 | break; |
231 | default: // QCamera::FocusModeAuto: |
232 | break; |
233 | } |
234 | |
235 | if (gst_photography_set_focus_mode(photo: p, mode: photographyMode)) |
236 | focusModeChanged(mode); |
237 | } |
238 | #endif |
239 | } |
240 | |
241 | bool QGstreamerCamera::isFocusModeSupported(QCamera::FocusMode mode) const |
242 | { |
243 | #if QT_CONFIG(gstreamer_photography) |
244 | if (photography()) |
245 | return true; |
246 | #endif |
247 | return mode == QCamera::FocusModeAuto; |
248 | } |
249 | |
250 | void QGstreamerCamera::setFlashMode(QCamera::FlashMode mode) |
251 | { |
252 | Q_UNUSED(mode); |
253 | |
254 | #if QT_CONFIG(gstreamer_photography) |
255 | if (auto *p = photography()) { |
256 | GstPhotographyFlashMode flashMode; |
257 | gst_photography_get_flash_mode(photo: p, flash_mode: &flashMode); |
258 | |
259 | switch (mode) { |
260 | case QCamera::FlashAuto: |
261 | flashMode = GST_PHOTOGRAPHY_FLASH_MODE_AUTO; |
262 | break; |
263 | case QCamera::FlashOff: |
264 | flashMode = GST_PHOTOGRAPHY_FLASH_MODE_OFF; |
265 | break; |
266 | case QCamera::FlashOn: |
267 | flashMode = GST_PHOTOGRAPHY_FLASH_MODE_ON; |
268 | break; |
269 | } |
270 | |
271 | if (gst_photography_set_flash_mode(photo: p, flash_mode: flashMode)) |
272 | flashModeChanged(mode); |
273 | } |
274 | #endif |
275 | } |
276 | |
277 | bool QGstreamerCamera::isFlashModeSupported(QCamera::FlashMode mode) const |
278 | { |
279 | #if QT_CONFIG(gstreamer_photography) |
280 | if (photography()) |
281 | return true; |
282 | #endif |
283 | |
284 | return mode == QCamera::FlashAuto; |
285 | } |
286 | |
287 | bool QGstreamerCamera::isFlashReady() const |
288 | { |
289 | #if QT_CONFIG(gstreamer_photography) |
290 | if (photography()) |
291 | return true; |
292 | #endif |
293 | |
294 | return false; |
295 | } |
296 | |
297 | void QGstreamerCamera::setExposureMode(QCamera::ExposureMode mode) |
298 | { |
299 | Q_UNUSED(mode); |
300 | #if QT_CONFIG(linux_v4l) |
301 | if (isV4L2Camera() && v4l2AutoExposureSupported && v4l2ManualExposureSupported) { |
302 | if (mode != QCamera::ExposureAuto && mode != QCamera::ExposureManual) |
303 | return; |
304 | int value = QCamera::ExposureAuto ? V4L2_EXPOSURE_AUTO : V4L2_EXPOSURE_MANUAL; |
305 | setV4L2Parameter(V4L2_CID_EXPOSURE_AUTO, value); |
306 | exposureModeChanged(mode); |
307 | return; |
308 | } |
309 | #endif |
310 | |
311 | #if QT_CONFIG(gstreamer_photography) |
312 | auto *p = photography(); |
313 | if (!p) |
314 | return; |
315 | |
316 | GstPhotographySceneMode sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO; |
317 | |
318 | switch (mode) { |
319 | case QCamera::ExposureManual: |
320 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_MANUAL; |
321 | break; |
322 | case QCamera::ExposurePortrait: |
323 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PORTRAIT; |
324 | break; |
325 | case QCamera::ExposureSports: |
326 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SPORT; |
327 | break; |
328 | case QCamera::ExposureNight: |
329 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT; |
330 | break; |
331 | case QCamera::ExposureAuto: |
332 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_AUTO; |
333 | break; |
334 | case QCamera::ExposureLandscape: |
335 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_LANDSCAPE; |
336 | break; |
337 | case QCamera::ExposureSnow: |
338 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SNOW; |
339 | break; |
340 | case QCamera::ExposureBeach: |
341 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BEACH; |
342 | break; |
343 | case QCamera::ExposureAction: |
344 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_ACTION; |
345 | break; |
346 | case QCamera::ExposureNightPortrait: |
347 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_NIGHT_PORTRAIT; |
348 | break; |
349 | case QCamera::ExposureTheatre: |
350 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_THEATRE; |
351 | break; |
352 | case QCamera::ExposureSunset: |
353 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_SUNSET; |
354 | break; |
355 | case QCamera::ExposureSteadyPhoto: |
356 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_STEADY_PHOTO; |
357 | break; |
358 | case QCamera::ExposureFireworks: |
359 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_FIREWORKS; |
360 | break; |
361 | case QCamera::ExposureParty: |
362 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_PARTY; |
363 | break; |
364 | case QCamera::ExposureCandlelight: |
365 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_CANDLELIGHT; |
366 | break; |
367 | case QCamera::ExposureBarcode: |
368 | sceneMode = GST_PHOTOGRAPHY_SCENE_MODE_BARCODE; |
369 | break; |
370 | default: |
371 | return; |
372 | } |
373 | |
374 | if (gst_photography_set_scene_mode(photo: p, scene_mode: sceneMode)) |
375 | exposureModeChanged(mode); |
376 | #endif |
377 | } |
378 | |
379 | bool QGstreamerCamera::isExposureModeSupported(QCamera::ExposureMode mode) const |
380 | { |
381 | if (mode == QCamera::ExposureAuto) |
382 | return true; |
383 | #if QT_CONFIG(linux_v4l) |
384 | if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported) |
385 | return mode == QCamera::ExposureManual; |
386 | #endif |
387 | #if QT_CONFIG(gstreamer_photography) |
388 | if (photography()) |
389 | return true; |
390 | #endif |
391 | |
392 | return false; |
393 | } |
394 | |
395 | void QGstreamerCamera::setExposureCompensation(float compensation) |
396 | { |
397 | Q_UNUSED(compensation); |
398 | #if QT_CONFIG(linux_v4l) |
399 | if (isV4L2Camera() && (v4l2MinExposureAdjustment != 0 || v4l2MaxExposureAdjustment != 0)) { |
400 | int value = qBound(min: v4l2MinExposureAdjustment, val: (int)(compensation*1000), max: v4l2MaxExposureAdjustment); |
401 | setV4L2Parameter(V4L2_CID_AUTO_EXPOSURE_BIAS, value); |
402 | exposureCompensationChanged(compensation: value/1000.); |
403 | return; |
404 | } |
405 | #endif |
406 | |
407 | #if QT_CONFIG(gstreamer_photography) |
408 | if (auto *p = photography()) { |
409 | if (gst_photography_set_ev_compensation(photo: p, ev_comp: compensation)) |
410 | exposureCompensationChanged(compensation); |
411 | } |
412 | #endif |
413 | } |
414 | |
415 | void QGstreamerCamera::setManualIsoSensitivity(int iso) |
416 | { |
417 | Q_UNUSED(iso); |
418 | #if QT_CONFIG(linux_v4l) |
419 | if (isV4L2Camera()) { |
420 | if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity)) |
421 | return; |
422 | setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY_AUTO, value: iso <= 0 ? V4L2_ISO_SENSITIVITY_AUTO : V4L2_ISO_SENSITIVITY_MANUAL); |
423 | if (iso > 0) { |
424 | iso = qBound(min: minIso(), val: iso, max: maxIso()); |
425 | setV4L2Parameter(V4L2_CID_ISO_SENSITIVITY, value: iso); |
426 | } |
427 | return; |
428 | } |
429 | #endif |
430 | #if QT_CONFIG(gstreamer_photography) |
431 | if (auto *p = photography()) { |
432 | if (gst_photography_set_iso_speed(photo: p, iso_speed: iso)) |
433 | isoSensitivityChanged(iso); |
434 | } |
435 | #endif |
436 | } |
437 | |
438 | int QGstreamerCamera::isoSensitivity() const |
439 | { |
440 | #if QT_CONFIG(linux_v4l) |
441 | if (isV4L2Camera()) { |
442 | if (!(supportedFeatures() & QCamera::Feature::IsoSensitivity)) |
443 | return -1; |
444 | return getV4L2Parameter(V4L2_CID_ISO_SENSITIVITY); |
445 | } |
446 | #endif |
447 | #if QT_CONFIG(gstreamer_photography) |
448 | if (auto *p = photography()) { |
449 | guint speed = 0; |
450 | if (gst_photography_get_iso_speed(photo: p, iso_speed: &speed)) |
451 | return speed; |
452 | } |
453 | #endif |
454 | return 100; |
455 | } |
456 | |
457 | void QGstreamerCamera::setManualExposureTime(float secs) |
458 | { |
459 | Q_UNUSED(secs); |
460 | #if QT_CONFIG(linux_v4l) |
461 | if (isV4L2Camera() && v4l2ManualExposureSupported && v4l2AutoExposureSupported) { |
462 | int exposure = qBound(min: v4l2MinExposure, val: qRound(d: secs*10000.), max: v4l2MaxExposure); |
463 | setV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE, value: exposure); |
464 | exposureTimeChanged(speed: exposure/10000.); |
465 | return; |
466 | } |
467 | #endif |
468 | |
469 | #if QT_CONFIG(gstreamer_photography) |
470 | if (auto *p = photography()) { |
471 | if (gst_photography_set_exposure(photo: p, exposure: guint(secs*1000000))) |
472 | exposureTimeChanged(speed: secs); |
473 | } |
474 | #endif |
475 | } |
476 | |
477 | float QGstreamerCamera::exposureTime() const |
478 | { |
479 | #if QT_CONFIG(linux_v4l) |
480 | if (isV4L2Camera()) { |
481 | return getV4L2Parameter(V4L2_CID_EXPOSURE_ABSOLUTE)/10000.; |
482 | } |
483 | #endif |
484 | #if QT_CONFIG(gstreamer_photography) |
485 | if (auto *p = photography()) { |
486 | guint32 exposure = 0; |
487 | if (gst_photography_get_exposure(photo: p, exposure: &exposure)) |
488 | return exposure/1000000.; |
489 | } |
490 | #endif |
491 | return -1; |
492 | } |
493 | |
494 | bool QGstreamerCamera::isWhiteBalanceModeSupported(QCamera::WhiteBalanceMode mode) const |
495 | { |
496 | if (mode == QCamera::WhiteBalanceAuto) |
497 | return true; |
498 | |
499 | #if QT_CONFIG(linux_v4l) |
500 | if (isV4L2Camera()) { |
501 | if (v4l2AutoWhiteBalanceSupported && v4l2ColorTemperatureSupported) |
502 | return true; |
503 | } |
504 | #endif |
505 | #if QT_CONFIG(gstreamer_photography) |
506 | if (auto *p = photography()) { |
507 | Q_UNUSED(p); |
508 | switch (mode) { |
509 | case QCamera::WhiteBalanceAuto: |
510 | case QCamera::WhiteBalanceSunlight: |
511 | case QCamera::WhiteBalanceCloudy: |
512 | case QCamera::WhiteBalanceShade: |
513 | case QCamera::WhiteBalanceSunset: |
514 | case QCamera::WhiteBalanceTungsten: |
515 | case QCamera::WhiteBalanceFluorescent: |
516 | return true; |
517 | case QCamera::WhiteBalanceManual: { |
518 | #if GST_CHECK_VERSION(1, 18, 0) |
519 | GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p); |
520 | if (iface->set_color_temperature && iface->get_color_temperature) |
521 | return true; |
522 | #endif |
523 | break; |
524 | } |
525 | default: |
526 | break; |
527 | } |
528 | } |
529 | #endif |
530 | |
531 | return mode == QCamera::WhiteBalanceAuto; |
532 | } |
533 | |
534 | void QGstreamerCamera::setWhiteBalanceMode(QCamera::WhiteBalanceMode mode) |
535 | { |
536 | Q_ASSERT(isWhiteBalanceModeSupported(mode)); |
537 | |
538 | #if QT_CONFIG(linux_v4l) |
539 | if (isV4L2Camera()) { |
540 | int temperature = colorTemperatureForWhiteBalance(mode); |
541 | int t = setV4L2ColorTemperature(temperature); |
542 | if (t == 0) |
543 | mode = QCamera::WhiteBalanceAuto; |
544 | whiteBalanceModeChanged(mode); |
545 | return; |
546 | } |
547 | #endif |
548 | |
549 | #if QT_CONFIG(gstreamer_photography) |
550 | if (auto *p = photography()) { |
551 | GstPhotographyWhiteBalanceMode gstMode = GST_PHOTOGRAPHY_WB_MODE_AUTO; |
552 | switch (mode) { |
553 | case QCamera::WhiteBalanceSunlight: |
554 | gstMode = GST_PHOTOGRAPHY_WB_MODE_DAYLIGHT; |
555 | break; |
556 | case QCamera::WhiteBalanceCloudy: |
557 | gstMode = GST_PHOTOGRAPHY_WB_MODE_CLOUDY; |
558 | break; |
559 | case QCamera::WhiteBalanceShade: |
560 | gstMode = GST_PHOTOGRAPHY_WB_MODE_SHADE; |
561 | break; |
562 | case QCamera::WhiteBalanceSunset: |
563 | gstMode = GST_PHOTOGRAPHY_WB_MODE_SUNSET; |
564 | break; |
565 | case QCamera::WhiteBalanceTungsten: |
566 | gstMode = GST_PHOTOGRAPHY_WB_MODE_TUNGSTEN; |
567 | break; |
568 | case QCamera::WhiteBalanceFluorescent: |
569 | gstMode = GST_PHOTOGRAPHY_WB_MODE_FLUORESCENT; |
570 | break; |
571 | case QCamera::WhiteBalanceAuto: |
572 | default: |
573 | break; |
574 | } |
575 | if (gst_photography_set_white_balance_mode(photo: p, wb_mode: gstMode)) { |
576 | whiteBalanceModeChanged(mode); |
577 | return; |
578 | } |
579 | } |
580 | #endif |
581 | } |
582 | |
583 | void QGstreamerCamera::setColorTemperature(int temperature) |
584 | { |
585 | if (temperature == 0) { |
586 | setWhiteBalanceMode(QCamera::WhiteBalanceAuto); |
587 | return; |
588 | } |
589 | |
590 | Q_ASSERT(isWhiteBalanceModeSupported(QCamera::WhiteBalanceManual)); |
591 | |
592 | #if QT_CONFIG(linux_v4l) |
593 | if (isV4L2Camera()) { |
594 | int t = setV4L2ColorTemperature(temperature); |
595 | if (t) |
596 | colorTemperatureChanged(temperature: t); |
597 | return; |
598 | } |
599 | #endif |
600 | |
601 | #if QT_CONFIG(gstreamer_photography) && GST_CHECK_VERSION(1, 18, 0) |
602 | if (auto *p = photography()) { |
603 | GstPhotographyInterface *iface = GST_PHOTOGRAPHY_GET_INTERFACE(p); |
604 | Q_ASSERT(iface->set_color_temperature); |
605 | iface->set_color_temperature(p, temperature); |
606 | return; |
607 | } |
608 | #endif |
609 | } |
610 | |
611 | #if QT_CONFIG(linux_v4l) |
612 | void QGstreamerCamera::initV4L2Controls() |
613 | { |
614 | v4l2AutoWhiteBalanceSupported = false; |
615 | v4l2ColorTemperatureSupported = false; |
616 | QCamera::Features features; |
617 | |
618 | |
619 | const QString deviceName = v4l2Device(); |
620 | Q_ASSERT(!deviceName.isEmpty()); |
621 | |
622 | v4l2FileDescriptor = qt_safe_open(pathname: deviceName.toLocal8Bit().constData(), O_RDONLY); |
623 | if (v4l2FileDescriptor == -1) { |
624 | qWarning() << "Unable to open the camera" << deviceName |
625 | << "for read to query the parameter info:" << qt_error_string(errno); |
626 | return; |
627 | } |
628 | |
629 | struct v4l2_queryctrl queryControl; |
630 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
631 | queryControl.id = V4L2_CID_AUTO_WHITE_BALANCE; |
632 | |
633 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
634 | v4l2AutoWhiteBalanceSupported = true; |
635 | setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, value: true); |
636 | } |
637 | |
638 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
639 | queryControl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE; |
640 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
641 | v4l2MinColorTemp = queryControl.minimum; |
642 | v4l2MaxColorTemp = queryControl.maximum; |
643 | v4l2ColorTemperatureSupported = true; |
644 | features |= QCamera::Feature::ColorTemperature; |
645 | } |
646 | |
647 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
648 | queryControl.id = V4L2_CID_EXPOSURE_AUTO; |
649 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
650 | v4l2AutoExposureSupported = true; |
651 | } |
652 | |
653 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
654 | queryControl.id = V4L2_CID_EXPOSURE_ABSOLUTE; |
655 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
656 | v4l2ManualExposureSupported = true; |
657 | v4l2MinExposure = queryControl.minimum; |
658 | v4l2MaxExposure = queryControl.maximum; |
659 | features |= QCamera::Feature::ManualExposureTime; |
660 | } |
661 | |
662 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
663 | queryControl.id = V4L2_CID_AUTO_EXPOSURE_BIAS; |
664 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
665 | v4l2MinExposureAdjustment = queryControl.minimum; |
666 | v4l2MaxExposureAdjustment = queryControl.maximum; |
667 | features |= QCamera::Feature::ExposureCompensation; |
668 | } |
669 | |
670 | ::memset(s: &queryControl, c: 0, n: sizeof(queryControl)); |
671 | queryControl.id = V4L2_CID_ISO_SENSITIVITY_AUTO; |
672 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
673 | queryControl.id = V4L2_CID_ISO_SENSITIVITY; |
674 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_QUERYCTRL, &queryControl) == 0) { |
675 | features |= QCamera::Feature::IsoSensitivity; |
676 | minIsoChanged(iso: queryControl.minimum); |
677 | maxIsoChanged(iso: queryControl.minimum); |
678 | } |
679 | } |
680 | |
681 | supportedFeaturesChanged(features); |
682 | } |
683 | |
684 | int QGstreamerCamera::setV4L2ColorTemperature(int temperature) |
685 | { |
686 | struct v4l2_control control; |
687 | ::memset(s: &control, c: 0, n: sizeof(control)); |
688 | |
689 | if (v4l2AutoWhiteBalanceSupported) { |
690 | setV4L2Parameter(V4L2_CID_AUTO_WHITE_BALANCE, value: temperature == 0 ? true : false); |
691 | } else if (temperature == 0) { |
692 | temperature = 5600; |
693 | } |
694 | |
695 | if (temperature != 0 && v4l2ColorTemperatureSupported) { |
696 | temperature = qBound(min: v4l2MinColorTemp, val: temperature, max: v4l2MaxColorTemp); |
697 | if (!setV4L2Parameter(V4L2_CID_WHITE_BALANCE_TEMPERATURE, value: qBound(min: v4l2MinColorTemp, val: temperature, max: v4l2MaxColorTemp))) |
698 | temperature = 0; |
699 | } else { |
700 | temperature = 0; |
701 | } |
702 | |
703 | return temperature; |
704 | } |
705 | |
706 | bool QGstreamerCamera::setV4L2Parameter(quint32 id, qint32 value) |
707 | { |
708 | struct v4l2_control control{.id: id, .value: value}; |
709 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_S_CTRL, &control) != 0) { |
710 | qWarning() << "Unable to set the V4L2 Parameter" << Qt::hex << id << "to" << value << qt_error_string(errno); |
711 | return false; |
712 | } |
713 | return true; |
714 | } |
715 | |
716 | int QGstreamerCamera::getV4L2Parameter(quint32 id) const |
717 | { |
718 | struct v4l2_control control{.id: id, .value: 0}; |
719 | if (::ioctl(fd: v4l2FileDescriptor, VIDIOC_G_CTRL, &control) != 0) { |
720 | qWarning() << "Unable to get the V4L2 Parameter" << Qt::hex << id << qt_error_string(errno); |
721 | return 0; |
722 | } |
723 | return control.value; |
724 | } |
725 | |
726 | #endif |
727 | |
728 | QT_END_NAMESPACE |
729 | |
730 | #include "moc_qgstreamercamera_p.cpp" |
731 | |