| 1 | /**************************************************************************** |
| 2 | ** |
| 3 | ** Copyright (C) 2016 The Qt Company Ltd. |
| 4 | ** Contact: https://www.qt.io/licensing/ |
| 5 | ** |
| 6 | ** This file is part of the Qt Toolkit. |
| 7 | ** |
| 8 | ** $QT_BEGIN_LICENSE:LGPL$ |
| 9 | ** Commercial License Usage |
| 10 | ** Licensees holding valid commercial Qt licenses may use this file in |
| 11 | ** accordance with the commercial license agreement provided with the |
| 12 | ** Software or, alternatively, in accordance with the terms contained in |
| 13 | ** a written agreement between you and The Qt Company. For licensing terms |
| 14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
| 15 | ** information use the contact form at https://www.qt.io/contact-us. |
| 16 | ** |
| 17 | ** GNU Lesser General Public License Usage |
| 18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
| 19 | ** General Public License version 3 as published by the Free Software |
| 20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
| 21 | ** packaging of this file. Please review the following information to |
| 22 | ** ensure the GNU Lesser General Public License version 3 requirements |
| 23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
| 24 | ** |
| 25 | ** GNU General Public License Usage |
| 26 | ** Alternatively, this file may be used under the terms of the GNU |
| 27 | ** General Public License version 2.0 or (at your option) the GNU General |
| 28 | ** Public license version 3 or any later version approved by the KDE Free |
| 29 | ** Qt Foundation. The licenses are as published by the Free Software |
| 30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
| 31 | ** included in the packaging of this file. Please review the following |
| 32 | ** information to ensure the GNU General Public License requirements will |
| 33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
| 34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
| 35 | ** |
| 36 | ** $QT_END_LICENSE$ |
| 37 | ** |
| 38 | ****************************************************************************/ |
| 39 | |
| 40 | #include "camerabinfocus.h" |
| 41 | #include "camerabinsession.h" |
| 42 | |
| 43 | #include <gst/interfaces/photography.h> |
| 44 | |
| 45 | #include <QDebug> |
| 46 | #include <QtCore/qcoreevent.h> |
| 47 | #include <QtCore/qmetaobject.h> |
| 48 | |
| 49 | #include <private/qgstutils_p.h> |
| 50 | |
| 51 | #if !GST_CHECK_VERSION(1,0,0) |
| 52 | typedef GstFocusMode GstPhotographyFocusMode; |
| 53 | #endif |
| 54 | |
| 55 | //#define CAMERABIN_DEBUG 1 |
| 56 | |
| 57 | QT_BEGIN_NAMESPACE |
| 58 | |
| 59 | CameraBinFocus::CameraBinFocus(CameraBinSession *session) |
| 60 | :QCameraFocusControl(session), |
| 61 | #if GST_CHECK_VERSION(1,0,0) |
| 62 | QGstreamerBufferProbe(ProbeBuffers), |
| 63 | #endif |
| 64 | m_session(session), |
| 65 | m_cameraStatus(QCamera::UnloadedStatus), |
| 66 | m_focusMode(QCameraFocus::AutoFocus), |
| 67 | m_focusPointMode(QCameraFocus::FocusPointAuto), |
| 68 | m_focusStatus(QCamera::Unlocked), |
| 69 | m_focusZoneStatus(QCameraFocusZone::Selected), |
| 70 | m_focusPoint(0.5, 0.5), |
| 71 | m_focusRect(0, 0, 0.3, 0.3) |
| 72 | { |
| 73 | m_focusRect.moveCenter(p: m_focusPoint); |
| 74 | |
| 75 | gst_photography_set_focus_mode(photo: m_session->photography(), mode: GST_PHOTOGRAPHY_FOCUS_MODE_AUTO); |
| 76 | |
| 77 | connect(sender: m_session, SIGNAL(statusChanged(QCamera::Status)), |
| 78 | receiver: this, SLOT(_q_handleCameraStatusChange(QCamera::Status))); |
| 79 | } |
| 80 | |
| 81 | CameraBinFocus::~CameraBinFocus() |
| 82 | { |
| 83 | } |
| 84 | |
| 85 | QCameraFocus::FocusModes CameraBinFocus::focusMode() const |
| 86 | { |
| 87 | return m_focusMode; |
| 88 | } |
| 89 | |
| 90 | void CameraBinFocus::setFocusMode(QCameraFocus::FocusModes mode) |
| 91 | { |
| 92 | GstPhotographyFocusMode photographyMode; |
| 93 | |
| 94 | switch (mode) { |
| 95 | case QCameraFocus::AutoFocus: |
| 96 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_AUTO; |
| 97 | break; |
| 98 | case QCameraFocus::HyperfocalFocus: |
| 99 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL; |
| 100 | break; |
| 101 | case QCameraFocus::InfinityFocus: |
| 102 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; |
| 103 | break; |
| 104 | case QCameraFocus::ContinuousFocus: |
| 105 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL; |
| 106 | break; |
| 107 | case QCameraFocus::MacroFocus: |
| 108 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_MACRO; |
| 109 | break; |
| 110 | default: |
| 111 | if (mode & QCameraFocus::AutoFocus) { |
| 112 | photographyMode = GST_PHOTOGRAPHY_FOCUS_MODE_AUTO; |
| 113 | break; |
| 114 | } else { |
| 115 | return; |
| 116 | } |
| 117 | } |
| 118 | |
| 119 | if (gst_photography_set_focus_mode(photo: m_session->photography(), mode: photographyMode)) |
| 120 | m_focusMode = mode; |
| 121 | } |
| 122 | |
| 123 | bool CameraBinFocus::isFocusModeSupported(QCameraFocus::FocusModes mode) const |
| 124 | { |
| 125 | switch (mode) { |
| 126 | case QCameraFocus::AutoFocus: |
| 127 | case QCameraFocus::HyperfocalFocus: |
| 128 | case QCameraFocus::InfinityFocus: |
| 129 | case QCameraFocus::ContinuousFocus: |
| 130 | case QCameraFocus::MacroFocus: |
| 131 | return true; |
| 132 | default: |
| 133 | return mode & QCameraFocus::AutoFocus; |
| 134 | } |
| 135 | } |
| 136 | |
| 137 | QCameraFocus::FocusPointMode CameraBinFocus::focusPointMode() const |
| 138 | { |
| 139 | return m_focusPointMode; |
| 140 | } |
| 141 | |
| 142 | void CameraBinFocus::setFocusPointMode(QCameraFocus::FocusPointMode mode) |
| 143 | { |
| 144 | GstElement *source = m_session->cameraSource(); |
| 145 | |
| 146 | if (m_focusPointMode == mode || !source) |
| 147 | return; |
| 148 | |
| 149 | #if GST_CHECK_VERSION(1,0,0) |
| 150 | if (m_focusPointMode == QCameraFocus::FocusPointFaceDetection) { |
| 151 | g_object_set (G_OBJECT(source), first_property_name: "detect-faces" , FALSE, NULL); |
| 152 | |
| 153 | if (GstPad *pad = gst_element_get_static_pad(element: source, name: "vfsrc" )) { |
| 154 | removeProbeFromPad(pad); |
| 155 | gst_object_unref(GST_OBJECT(pad)); |
| 156 | } |
| 157 | |
| 158 | m_faceResetTimer.stop(); |
| 159 | m_faceFocusRects.clear(); |
| 160 | |
| 161 | QMutexLocker locker(&m_mutex); |
| 162 | m_faces.clear(); |
| 163 | } |
| 164 | #endif |
| 165 | |
| 166 | if (m_focusPointMode != QCameraFocus::FocusPointAuto) |
| 167 | resetFocusPoint(); |
| 168 | |
| 169 | switch (mode) { |
| 170 | case QCameraFocus::FocusPointAuto: |
| 171 | case QCameraFocus::FocusPointCustom: |
| 172 | break; |
| 173 | #if GST_CHECK_VERSION(1,0,0) |
| 174 | case QCameraFocus::FocusPointFaceDetection: |
| 175 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(source), property_name: "detect-faces" )) { |
| 176 | if (GstPad *pad = gst_element_get_static_pad(element: source, name: "vfsrc" )) { |
| 177 | addProbeToPad(pad); |
| 178 | g_object_set (G_OBJECT(source), first_property_name: "detect-faces" , TRUE, NULL); |
| 179 | break; |
| 180 | } |
| 181 | } |
| 182 | return; |
| 183 | #endif |
| 184 | default: |
| 185 | return; |
| 186 | } |
| 187 | |
| 188 | m_focusPointMode = mode; |
| 189 | emit focusPointModeChanged(mode: m_focusPointMode); |
| 190 | emit focusZonesChanged(); |
| 191 | } |
| 192 | |
| 193 | bool CameraBinFocus::isFocusPointModeSupported(QCameraFocus::FocusPointMode mode) const |
| 194 | { |
| 195 | switch (mode) { |
| 196 | case QCameraFocus::FocusPointAuto: |
| 197 | case QCameraFocus::FocusPointCustom: |
| 198 | return true; |
| 199 | #if GST_CHECK_VERSION(1,0,0) |
| 200 | case QCameraFocus::FocusPointFaceDetection: |
| 201 | if (GstElement *source = m_session->cameraSource()) |
| 202 | return g_object_class_find_property(G_OBJECT_GET_CLASS(source), property_name: "detect-faces" ); |
| 203 | return false; |
| 204 | #endif |
| 205 | default: |
| 206 | return false; |
| 207 | } |
| 208 | } |
| 209 | |
| 210 | QPointF CameraBinFocus::customFocusPoint() const |
| 211 | { |
| 212 | return m_focusPoint; |
| 213 | } |
| 214 | |
| 215 | void CameraBinFocus::setCustomFocusPoint(const QPointF &point) |
| 216 | { |
| 217 | if (m_focusPoint != point) { |
| 218 | m_focusPoint = point; |
| 219 | |
| 220 | // Bound the focus point so the focus rect remains entirely within the unit square. |
| 221 | m_focusPoint.setX(qBound(min: m_focusRect.width() / 2, val: m_focusPoint.x(), max: 1 - m_focusRect.width() / 2)); |
| 222 | m_focusPoint.setY(qBound(min: m_focusRect.height() / 2, val: m_focusPoint.y(), max: 1 - m_focusRect.height() / 2)); |
| 223 | |
| 224 | if (m_focusPointMode == QCameraFocus::FocusPointCustom) { |
| 225 | const QRectF focusRect = m_focusRect; |
| 226 | m_focusRect.moveCenter(p: m_focusPoint); |
| 227 | |
| 228 | updateRegionOfInterest(rectangle: m_focusRect); |
| 229 | |
| 230 | if (focusRect != m_focusRect) { |
| 231 | emit focusZonesChanged(); |
| 232 | } |
| 233 | } |
| 234 | |
| 235 | emit customFocusPointChanged(point: m_focusPoint); |
| 236 | } |
| 237 | } |
| 238 | |
| 239 | QCameraFocusZoneList CameraBinFocus::focusZones() const |
| 240 | { |
| 241 | QCameraFocusZoneList zones; |
| 242 | |
| 243 | if (m_focusPointMode != QCameraFocus::FocusPointFaceDetection) { |
| 244 | zones.append(t: QCameraFocusZone(m_focusRect, m_focusZoneStatus)); |
| 245 | #if GST_CHECK_VERSION(1,0,0) |
| 246 | } else for (const QRect &face : qAsConst(t: m_faceFocusRects)) { |
| 247 | const QRectF normalizedRect( |
| 248 | face.x() / qreal(m_viewfinderResolution.width()), |
| 249 | face.y() / qreal(m_viewfinderResolution.height()), |
| 250 | face.width() / qreal(m_viewfinderResolution.width()), |
| 251 | face.height() / qreal(m_viewfinderResolution.height())); |
| 252 | zones.append(t: QCameraFocusZone(normalizedRect, m_focusZoneStatus)); |
| 253 | #endif |
| 254 | } |
| 255 | return zones; |
| 256 | } |
| 257 | |
| 258 | void CameraBinFocus::handleFocusMessage(GstMessage *gm) |
| 259 | { |
| 260 | //it's a sync message, so it's called from non main thread |
| 261 | const GstStructure *structure = gst_message_get_structure(message: gm); |
| 262 | if (gst_structure_has_name(structure, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) { |
| 263 | gint status = GST_PHOTOGRAPHY_FOCUS_STATUS_NONE; |
| 264 | gst_structure_get_int (structure, fieldname: "status" , value: &status); |
| 265 | QCamera::LockStatus focusStatus = m_focusStatus; |
| 266 | QCamera::LockChangeReason reason = QCamera::UserRequest; |
| 267 | |
| 268 | switch (status) { |
| 269 | case GST_PHOTOGRAPHY_FOCUS_STATUS_FAIL: |
| 270 | focusStatus = QCamera::Unlocked; |
| 271 | reason = QCamera::LockFailed; |
| 272 | break; |
| 273 | case GST_PHOTOGRAPHY_FOCUS_STATUS_SUCCESS: |
| 274 | focusStatus = QCamera::Locked; |
| 275 | break; |
| 276 | case GST_PHOTOGRAPHY_FOCUS_STATUS_NONE: |
| 277 | break; |
| 278 | case GST_PHOTOGRAPHY_FOCUS_STATUS_RUNNING: |
| 279 | focusStatus = QCamera::Searching; |
| 280 | break; |
| 281 | default: |
| 282 | break; |
| 283 | } |
| 284 | |
| 285 | static int signalIndex = metaObject()->indexOfSlot( |
| 286 | slot: "_q_setFocusStatus(QCamera::LockStatus,QCamera::LockChangeReason)" ); |
| 287 | metaObject()->method(index: signalIndex).invoke(object: this, |
| 288 | connectionType: Qt::QueuedConnection, |
| 289 | Q_ARG(QCamera::LockStatus,focusStatus), |
| 290 | Q_ARG(QCamera::LockChangeReason,reason)); |
| 291 | } |
| 292 | } |
| 293 | |
| 294 | void CameraBinFocus::_q_setFocusStatus(QCamera::LockStatus status, QCamera::LockChangeReason reason) |
| 295 | { |
| 296 | #ifdef CAMERABIN_DEBUG |
| 297 | qDebug() << Q_FUNC_INFO << "Current:" |
| 298 | << m_focusStatus |
| 299 | << "New:" |
| 300 | << status << reason; |
| 301 | #endif |
| 302 | |
| 303 | if (m_focusStatus != status) { |
| 304 | m_focusStatus = status; |
| 305 | |
| 306 | QCameraFocusZone::FocusZoneStatus zonesStatus = |
| 307 | m_focusStatus == QCamera::Locked ? |
| 308 | QCameraFocusZone::Focused : QCameraFocusZone::Selected; |
| 309 | |
| 310 | if (m_focusZoneStatus != zonesStatus) { |
| 311 | m_focusZoneStatus = zonesStatus; |
| 312 | emit focusZonesChanged(); |
| 313 | } |
| 314 | |
| 315 | #if GST_CHECK_VERSION(1,0,0) |
| 316 | if (m_focusPointMode == QCameraFocus::FocusPointFaceDetection |
| 317 | && m_focusStatus == QCamera::Unlocked) { |
| 318 | _q_updateFaces(); |
| 319 | } |
| 320 | #endif |
| 321 | |
| 322 | emit _q_focusStatusChanged(status: m_focusStatus, reason); |
| 323 | } |
| 324 | } |
| 325 | |
| 326 | void CameraBinFocus::_q_handleCameraStatusChange(QCamera::Status status) |
| 327 | { |
| 328 | m_cameraStatus = status; |
| 329 | if (status == QCamera::ActiveStatus) { |
| 330 | if (GstPad *pad = gst_element_get_static_pad(element: m_session->cameraSource(), name: "vfsrc" )) { |
| 331 | if (GstCaps *caps = qt_gst_pad_get_current_caps(pad)) { |
| 332 | if (GstStructure *structure = gst_caps_get_structure(caps, index: 0)) { |
| 333 | int width = 0; |
| 334 | int height = 0; |
| 335 | gst_structure_get_int(structure, fieldname: "width" , value: &width); |
| 336 | gst_structure_get_int(structure, fieldname: "height" , value: &height); |
| 337 | setViewfinderResolution(QSize(width, height)); |
| 338 | } |
| 339 | gst_caps_unref(caps); |
| 340 | } |
| 341 | gst_object_unref(GST_OBJECT(pad)); |
| 342 | } |
| 343 | if (m_focusPointMode == QCameraFocus::FocusPointCustom) { |
| 344 | updateRegionOfInterest(rectangle: m_focusRect); |
| 345 | } |
| 346 | } else { |
| 347 | _q_setFocusStatus(status: QCamera::Unlocked, reason: QCamera::LockLost); |
| 348 | |
| 349 | resetFocusPoint(); |
| 350 | } |
| 351 | } |
| 352 | |
| 353 | void CameraBinFocus::_q_startFocusing() |
| 354 | { |
| 355 | _q_setFocusStatus(status: QCamera::Searching, reason: QCamera::UserRequest); |
| 356 | gst_photography_set_autofocus(photo: m_session->photography(), TRUE); |
| 357 | } |
| 358 | |
| 359 | void CameraBinFocus::_q_stopFocusing() |
| 360 | { |
| 361 | gst_photography_set_autofocus(photo: m_session->photography(), FALSE); |
| 362 | _q_setFocusStatus(status: QCamera::Unlocked, reason: QCamera::UserRequest); |
| 363 | } |
| 364 | |
| 365 | void CameraBinFocus::setViewfinderResolution(const QSize &resolution) |
| 366 | { |
| 367 | if (resolution != m_viewfinderResolution) { |
| 368 | m_viewfinderResolution = resolution; |
| 369 | if (!resolution.isEmpty()) { |
| 370 | const QPointF center = m_focusRect.center(); |
| 371 | m_focusRect.setWidth(m_focusRect.height() * resolution.height() / resolution.width()); |
| 372 | m_focusRect.moveCenter(p: center); |
| 373 | } |
| 374 | } |
| 375 | } |
| 376 | |
| 377 | void CameraBinFocus::resetFocusPoint() |
| 378 | { |
| 379 | const QRectF focusRect = m_focusRect; |
| 380 | m_focusPoint = QPointF(0.5, 0.5); |
| 381 | m_focusRect.moveCenter(p: m_focusPoint); |
| 382 | |
| 383 | updateRegionOfInterest(rectangles: QVector<QRect>()); |
| 384 | |
| 385 | if (focusRect != m_focusRect) { |
| 386 | emit customFocusPointChanged(point: m_focusPoint); |
| 387 | emit focusZonesChanged(); |
| 388 | } |
| 389 | } |
| 390 | |
| 391 | static void appendRegion(GValue *regions, int priority, const QRect &rectangle) |
| 392 | { |
| 393 | GstStructure *region = gst_structure_new( |
| 394 | name: "region" , |
| 395 | firstfield: "region-x" , G_TYPE_UINT , rectangle.x(), |
| 396 | "region-y" , G_TYPE_UINT, rectangle.y(), |
| 397 | "region-w" , G_TYPE_UINT , rectangle.width(), |
| 398 | "region-h" , G_TYPE_UINT, rectangle.height(), |
| 399 | "region-priority" , G_TYPE_UINT, priority, |
| 400 | NULL); |
| 401 | |
| 402 | GValue regionValue = G_VALUE_INIT; |
| 403 | g_value_init(value: ®ionValue, GST_TYPE_STRUCTURE); |
| 404 | gst_value_set_structure(value: ®ionValue, structure: region); |
| 405 | gst_structure_free(structure: region); |
| 406 | |
| 407 | gst_value_list_append_value(value: regions, append_value: ®ionValue); |
| 408 | g_value_unset(value: ®ionValue); |
| 409 | } |
| 410 | |
| 411 | void CameraBinFocus::updateRegionOfInterest(const QRectF &rectangle) |
| 412 | { |
| 413 | updateRegionOfInterest(rectangles: QVector<QRect>() << QRect( |
| 414 | rectangle.x() * m_viewfinderResolution.width(), |
| 415 | rectangle.y() * m_viewfinderResolution.height(), |
| 416 | rectangle.width() * m_viewfinderResolution.width(), |
| 417 | rectangle.height() * m_viewfinderResolution.height())); |
| 418 | } |
| 419 | |
| 420 | void CameraBinFocus::updateRegionOfInterest(const QVector<QRect> &rectangles) |
| 421 | { |
| 422 | if (m_cameraStatus != QCamera::ActiveStatus) |
| 423 | return; |
| 424 | |
| 425 | GstElement * const cameraSource = m_session->cameraSource(); |
| 426 | if (!cameraSource) |
| 427 | return; |
| 428 | |
| 429 | GValue regions = G_VALUE_INIT; |
| 430 | g_value_init(value: ®ions, GST_TYPE_LIST); |
| 431 | |
| 432 | if (rectangles.isEmpty()) { |
| 433 | appendRegion(regions: ®ions, priority: 0, rectangle: QRect(0, 0, 0, 0)); |
| 434 | } else { |
| 435 | // Add padding around small face rectangles so the auto focus has a reasonable amount |
| 436 | // of image to work with. |
| 437 | const int minimumDimension = qMin( |
| 438 | a: m_viewfinderResolution.width(), b: m_viewfinderResolution.height()) * 0.3; |
| 439 | const QRect viewfinderRectangle(QPoint(0, 0), m_viewfinderResolution); |
| 440 | |
| 441 | for (const QRect &rectangle : rectangles) { |
| 442 | QRect paddedRectangle( |
| 443 | 0, |
| 444 | 0, |
| 445 | qMax(a: rectangle.width(), b: minimumDimension), |
| 446 | qMax(a: rectangle.height(), b: minimumDimension)); |
| 447 | paddedRectangle.moveCenter(p: rectangle.center()); |
| 448 | |
| 449 | appendRegion(regions: ®ions, priority: 1, rectangle: viewfinderRectangle.intersected(other: paddedRectangle)); |
| 450 | } |
| 451 | } |
| 452 | |
| 453 | GstStructure *regionsOfInterest = gst_structure_new( |
| 454 | name: "regions-of-interest" , |
| 455 | firstfield: "frame-width" , G_TYPE_UINT , m_viewfinderResolution.width(), |
| 456 | "frame-height" , G_TYPE_UINT, m_viewfinderResolution.height(), |
| 457 | NULL); |
| 458 | gst_structure_set_value(structure: regionsOfInterest, fieldname: "regions" , value: ®ions); |
| 459 | g_value_unset(value: ®ions); |
| 460 | |
| 461 | GstEvent *event = gst_event_new_custom(type: GST_EVENT_CUSTOM_UPSTREAM, structure: regionsOfInterest); |
| 462 | gst_element_send_event(element: cameraSource, event); |
| 463 | } |
| 464 | |
| 465 | #if GST_CHECK_VERSION(1,0,0) |
| 466 | |
| 467 | void CameraBinFocus::_q_updateFaces() |
| 468 | { |
| 469 | if (m_focusPointMode != QCameraFocus::FocusPointFaceDetection |
| 470 | || m_focusStatus != QCamera::Unlocked) { |
| 471 | return; |
| 472 | } |
| 473 | |
| 474 | QVector<QRect> faces; |
| 475 | |
| 476 | { |
| 477 | QMutexLocker locker(&m_mutex); |
| 478 | faces = m_faces; |
| 479 | } |
| 480 | |
| 481 | if (!faces.isEmpty()) { |
| 482 | m_faceResetTimer.stop(); |
| 483 | m_faceFocusRects = faces; |
| 484 | updateRegionOfInterest(rectangles: m_faceFocusRects); |
| 485 | emit focusZonesChanged(); |
| 486 | } else { |
| 487 | m_faceResetTimer.start(msec: 500, obj: this); |
| 488 | } |
| 489 | } |
| 490 | |
| 491 | void CameraBinFocus::timerEvent(QTimerEvent *event) |
| 492 | { |
| 493 | if (event->timerId() == m_faceResetTimer.timerId()) { |
| 494 | m_faceResetTimer.stop(); |
| 495 | |
| 496 | if (m_focusStatus == QCamera::Unlocked) { |
| 497 | m_faceFocusRects.clear(); |
| 498 | updateRegionOfInterest(rectangles: m_faceFocusRects); |
| 499 | emit focusZonesChanged(); |
| 500 | } |
| 501 | } else { |
| 502 | QCameraFocusControl::timerEvent(event); |
| 503 | } |
| 504 | } |
| 505 | |
| 506 | bool CameraBinFocus::probeBuffer(GstBuffer *buffer) |
| 507 | { |
| 508 | QVector<QRect> faces; |
| 509 | |
| 510 | #if GST_CHECK_VERSION(1,1,3) |
| 511 | gpointer state = NULL; |
| 512 | const GstMetaInfo *info = GST_VIDEO_REGION_OF_INTEREST_META_INFO; |
| 513 | |
| 514 | while (GstMeta *meta = gst_buffer_iterate_meta(buffer, state: &state)) { |
| 515 | if (meta->info->api != info->api) |
| 516 | continue; |
| 517 | |
| 518 | GstVideoRegionOfInterestMeta *region = reinterpret_cast<GstVideoRegionOfInterestMeta *>(meta); |
| 519 | |
| 520 | faces.append(t: QRect(region->x, region->y, region->w, region->h)); |
| 521 | } |
| 522 | #else |
| 523 | Q_UNUSED(buffer); |
| 524 | #endif |
| 525 | |
| 526 | QMutexLocker locker(&m_mutex); |
| 527 | |
| 528 | if (m_faces != faces) { |
| 529 | m_faces = faces; |
| 530 | |
| 531 | static const int signalIndex = metaObject()->indexOfSlot(slot: "_q_updateFaces()" ); |
| 532 | metaObject()->method(index: signalIndex).invoke(object: this, connectionType: Qt::QueuedConnection); |
| 533 | } |
| 534 | |
| 535 | return true; |
| 536 | } |
| 537 | |
| 538 | #endif |
| 539 | |
| 540 | QT_END_NAMESPACE |
| 541 | |