| 1 | /**************************************************************************** |
| 2 | ** |
| 3 | ** Copyright (C) 2016 The Qt Company Ltd. |
| 4 | ** Contact: https://www.qt.io/licensing/ |
| 5 | ** |
| 6 | ** This file is part of the Qt Toolkit. |
| 7 | ** |
| 8 | ** $QT_BEGIN_LICENSE:LGPL$ |
| 9 | ** Commercial License Usage |
| 10 | ** Licensees holding valid commercial Qt licenses may use this file in |
| 11 | ** accordance with the commercial license agreement provided with the |
| 12 | ** Software or, alternatively, in accordance with the terms contained in |
| 13 | ** a written agreement between you and The Qt Company. For licensing terms |
| 14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
| 15 | ** information use the contact form at https://www.qt.io/contact-us. |
| 16 | ** |
| 17 | ** GNU Lesser General Public License Usage |
| 18 | ** Alternatively, this file may be used under the terms of the GNU Lesser |
| 19 | ** General Public License version 3 as published by the Free Software |
| 20 | ** Foundation and appearing in the file LICENSE.LGPL3 included in the |
| 21 | ** packaging of this file. Please review the following information to |
| 22 | ** ensure the GNU Lesser General Public License version 3 requirements |
| 23 | ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. |
| 24 | ** |
| 25 | ** GNU General Public License Usage |
| 26 | ** Alternatively, this file may be used under the terms of the GNU |
| 27 | ** General Public License version 2.0 or (at your option) the GNU General |
| 28 | ** Public license version 3 or any later version approved by the KDE Free |
| 29 | ** Qt Foundation. The licenses are as published by the Free Software |
| 30 | ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 |
| 31 | ** included in the packaging of this file. Please review the following |
| 32 | ** information to ensure the GNU General Public License requirements will |
| 33 | ** be met: https://www.gnu.org/licenses/gpl-2.0.html and |
| 34 | ** https://www.gnu.org/licenses/gpl-3.0.html. |
| 35 | ** |
| 36 | ** $QT_END_LICENSE$ |
| 37 | ** |
| 38 | ****************************************************************************/ |
| 39 | |
| 40 | #include <QtMultimedia/private/qtmultimediaglobal_p.h> |
| 41 | #include "camerabinsession.h" |
| 42 | #include "camerabincontrol.h" |
| 43 | #include "camerabinrecorder.h" |
| 44 | #include "camerabincontainer.h" |
| 45 | #include "camerabinaudioencoder.h" |
| 46 | #include "camerabinvideoencoder.h" |
| 47 | #include "camerabinimageencoder.h" |
| 48 | |
| 49 | #if QT_CONFIG(gstreamer_photography) |
| 50 | #include "camerabinexposure.h" |
| 51 | #include "camerabinflash.h" |
| 52 | #include "camerabinfocus.h" |
| 53 | #include "camerabinlocks.h" |
| 54 | #endif |
| 55 | |
| 56 | #include "camerabinzoom.h" |
| 57 | #include "camerabinimageprocessing.h" |
| 58 | #include "camerabinviewfindersettings.h" |
| 59 | |
| 60 | #include "camerabincapturedestination.h" |
| 61 | #include "camerabincapturebufferformat.h" |
| 62 | #include <private/qgstreamerbushelper_p.h> |
| 63 | #include <private/qgstreamervideorendererinterface_p.h> |
| 64 | #include <private/qgstutils_p.h> |
| 65 | #include <qmediarecorder.h> |
| 66 | #include <qvideosurfaceformat.h> |
| 67 | |
| 68 | #if QT_CONFIG(gstreamer_photography) |
| 69 | #include <gst/interfaces/photography.h> |
| 70 | #endif |
| 71 | |
| 72 | #include <gst/gsttagsetter.h> |
| 73 | #include <gst/gstversion.h> |
| 74 | |
| 75 | #include <QtCore/qdebug.h> |
| 76 | #include <QCoreApplication> |
| 77 | #include <QtCore/qmetaobject.h> |
| 78 | #include <QtGui/qdesktopservices.h> |
| 79 | |
| 80 | #include <QtGui/qimage.h> |
| 81 | #include <QtCore/qdatetime.h> |
| 82 | |
| 83 | #include <algorithm> |
| 84 | |
| 85 | //#define CAMERABIN_DEBUG 1 |
| 86 | //#define CAMERABIN_DEBUG_DUMP_BIN 1 |
| 87 | #define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v))) |
| 88 | |
| 89 | #define FILENAME_PROPERTY "location" |
| 90 | #define MODE_PROPERTY "mode" |
| 91 | #define MUTE_PROPERTY "mute" |
| 92 | #define IMAGE_PP_PROPERTY "image-post-processing" |
| 93 | #define IMAGE_ENCODER_PROPERTY "image-encoder" |
| 94 | #define VIDEO_PP_PROPERTY "video-post-processing" |
| 95 | #define VIEWFINDER_SINK_PROPERTY "viewfinder-sink" |
| 96 | #define CAMERA_SOURCE_PROPERTY "camera-source" |
| 97 | #define AUDIO_SOURCE_PROPERTY "audio-source" |
| 98 | #define SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-supported-caps" |
| 99 | #define SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-supported-caps" |
| 100 | #define SUPPORTED_VIEWFINDER_CAPS_PROPERTY "viewfinder-supported-caps" |
| 101 | #define AUDIO_CAPTURE_CAPS_PROPERTY "audio-capture-caps" |
| 102 | #define IMAGE_CAPTURE_CAPS_PROPERTY "image-capture-caps" |
| 103 | #define VIDEO_CAPTURE_CAPS_PROPERTY "video-capture-caps" |
| 104 | #define VIEWFINDER_CAPS_PROPERTY "viewfinder-caps" |
| 105 | #define PREVIEW_CAPS_PROPERTY "preview-caps" |
| 106 | #define POST_PREVIEWS_PROPERTY "post-previews" |
| 107 | |
| 108 | |
| 109 | #define CAPTURE_START "start-capture" |
| 110 | #define CAPTURE_STOP "stop-capture" |
| 111 | |
| 112 | #define FILESINK_BIN_NAME "videobin-filesink" |
| 113 | |
| 114 | #define CAMERABIN_IMAGE_MODE 1 |
| 115 | #define CAMERABIN_VIDEO_MODE 2 |
| 116 | |
| 117 | #define PREVIEW_CAPS_4_3 \ |
| 118 | "video/x-raw-rgb, width = (int) 640, height = (int) 480" |
| 119 | |
| 120 | QT_BEGIN_NAMESPACE |
| 121 | |
| 122 | CameraBinSession::CameraBinSession(GstElementFactory *sourceFactory, QObject *parent) |
| 123 | :QObject(parent), |
| 124 | m_recordingActive(false), |
| 125 | m_status(QCamera::UnloadedStatus), |
| 126 | m_pendingState(QCamera::UnloadedState), |
| 127 | m_muted(false), |
| 128 | m_busy(false), |
| 129 | m_captureMode(QCamera::CaptureStillImage), |
| 130 | m_audioInputFactory(0), |
| 131 | m_videoInputFactory(0), |
| 132 | m_viewfinder(0), |
| 133 | m_viewfinderInterface(0), |
| 134 | #if QT_CONFIG(gstreamer_photography) |
| 135 | m_cameraExposureControl(0), |
| 136 | m_cameraFlashControl(0), |
| 137 | m_cameraFocusControl(0), |
| 138 | m_cameraLocksControl(0), |
| 139 | #endif |
| 140 | m_cameraSrc(0), |
| 141 | m_videoSrc(0), |
| 142 | m_viewfinderElement(0), |
| 143 | m_sourceFactory(sourceFactory), |
| 144 | m_viewfinderHasChanged(true), |
| 145 | m_inputDeviceHasChanged(true), |
| 146 | m_usingWrapperCameraBinSrc(false), |
| 147 | m_viewfinderProbe(this), |
| 148 | m_audioSrc(0), |
| 149 | m_audioConvert(0), |
| 150 | m_capsFilter(0), |
| 151 | m_fileSink(0), |
| 152 | m_audioEncoder(0), |
| 153 | m_videoEncoder(0), |
| 154 | m_muxer(0) |
| 155 | { |
| 156 | if (m_sourceFactory) |
| 157 | gst_object_ref(GST_OBJECT(m_sourceFactory)); |
| 158 | m_camerabin = gst_element_factory_make(QT_GSTREAMER_CAMERABIN_ELEMENT_NAME, name: "camerabin" ); |
| 159 | |
| 160 | g_signal_connect(G_OBJECT(m_camerabin), "notify::idle" , G_CALLBACK(updateBusyStatus), this); |
| 161 | g_signal_connect(G_OBJECT(m_camerabin), "element-added" , G_CALLBACK(elementAdded), this); |
| 162 | g_signal_connect(G_OBJECT(m_camerabin), "element-removed" , G_CALLBACK(elementRemoved), this); |
| 163 | qt_gst_object_ref_sink(object: m_camerabin); |
| 164 | |
| 165 | m_bus = gst_element_get_bus(element: m_camerabin); |
| 166 | |
| 167 | m_busHelper = new QGstreamerBusHelper(m_bus, this); |
| 168 | m_busHelper->installMessageFilter(filter: this); |
| 169 | |
| 170 | m_cameraControl = new CameraBinControl(this); |
| 171 | m_audioEncodeControl = new CameraBinAudioEncoder(this); |
| 172 | m_videoEncodeControl = new CameraBinVideoEncoder(this); |
| 173 | m_imageEncodeControl = new CameraBinImageEncoder(this); |
| 174 | m_recorderControl = new CameraBinRecorder(this); |
| 175 | m_mediaContainerControl = new CameraBinContainer(this); |
| 176 | m_cameraZoomControl = new CameraBinZoom(this); |
| 177 | m_imageProcessingControl = new CameraBinImageProcessing(this); |
| 178 | m_captureDestinationControl = new CameraBinCaptureDestination(this); |
| 179 | m_captureBufferFormatControl = new CameraBinCaptureBufferFormat(this); |
| 180 | |
| 181 | QByteArray envFlags = qgetenv(varName: "QT_GSTREAMER_CAMERABIN_FLAGS" ); |
| 182 | if (!envFlags.isEmpty()) |
| 183 | g_object_set(G_OBJECT(m_camerabin), first_property_name: "flags" , envFlags.toInt(), NULL); |
| 184 | |
| 185 | //post image preview in RGB format |
| 186 | g_object_set(G_OBJECT(m_camerabin), POST_PREVIEWS_PROPERTY, TRUE, NULL); |
| 187 | |
| 188 | #if GST_CHECK_VERSION(1,0,0) |
| 189 | GstCaps *previewCaps = gst_caps_new_simple( |
| 190 | media_type: "video/x-raw" , |
| 191 | fieldname: "format" , G_TYPE_STRING, "RGBx" , |
| 192 | NULL); |
| 193 | #else |
| 194 | GstCaps *previewCaps = gst_caps_from_string("video/x-raw-rgb" ); |
| 195 | #endif |
| 196 | |
| 197 | g_object_set(G_OBJECT(m_camerabin), PREVIEW_CAPS_PROPERTY, previewCaps, NULL); |
| 198 | gst_caps_unref(caps: previewCaps); |
| 199 | } |
| 200 | |
| 201 | CameraBinSession::~CameraBinSession() |
| 202 | { |
| 203 | if (m_camerabin) { |
| 204 | if (m_viewfinderInterface) |
| 205 | m_viewfinderInterface->stopRenderer(); |
| 206 | |
| 207 | gst_element_set_state(element: m_camerabin, state: GST_STATE_NULL); |
| 208 | gst_element_get_state(element: m_camerabin, NULL, NULL, GST_CLOCK_TIME_NONE); |
| 209 | gst_object_unref(GST_OBJECT(m_bus)); |
| 210 | gst_object_unref(GST_OBJECT(m_camerabin)); |
| 211 | } |
| 212 | if (m_viewfinderElement) |
| 213 | gst_object_unref(GST_OBJECT(m_viewfinderElement)); |
| 214 | |
| 215 | if (m_sourceFactory) |
| 216 | gst_object_unref(GST_OBJECT(m_sourceFactory)); |
| 217 | |
| 218 | if (m_cameraSrc) |
| 219 | gst_object_unref(GST_OBJECT(m_cameraSrc)); |
| 220 | |
| 221 | if (m_videoSrc) |
| 222 | gst_object_unref(GST_OBJECT(m_videoSrc)); |
| 223 | } |
| 224 | |
| 225 | #if QT_CONFIG(gstreamer_photography) |
| 226 | GstPhotography *CameraBinSession::photography() |
| 227 | { |
| 228 | if (GST_IS_PHOTOGRAPHY(m_camerabin)) { |
| 229 | return GST_PHOTOGRAPHY(m_camerabin); |
| 230 | } |
| 231 | |
| 232 | GstElement * const source = buildCameraSource(); |
| 233 | |
| 234 | if (source && GST_IS_PHOTOGRAPHY(source)) |
| 235 | return GST_PHOTOGRAPHY(source); |
| 236 | |
| 237 | return 0; |
| 238 | } |
| 239 | |
| 240 | CameraBinExposure *CameraBinSession::cameraExposureControl() |
| 241 | { |
| 242 | if (!m_cameraExposureControl && photography()) |
| 243 | m_cameraExposureControl = new CameraBinExposure(this); |
| 244 | return m_cameraExposureControl; |
| 245 | } |
| 246 | |
| 247 | CameraBinFlash *CameraBinSession::cameraFlashControl() |
| 248 | { |
| 249 | if (!m_cameraFlashControl && photography()) |
| 250 | m_cameraFlashControl = new CameraBinFlash(this); |
| 251 | return m_cameraFlashControl; |
| 252 | } |
| 253 | |
| 254 | CameraBinFocus *CameraBinSession::cameraFocusControl() |
| 255 | { |
| 256 | if (!m_cameraFocusControl && photography()) |
| 257 | m_cameraFocusControl = new CameraBinFocus(this); |
| 258 | return m_cameraFocusControl; |
| 259 | } |
| 260 | |
| 261 | CameraBinLocks *CameraBinSession::cameraLocksControl() |
| 262 | { |
| 263 | if (!m_cameraLocksControl && photography()) |
| 264 | m_cameraLocksControl = new CameraBinLocks(this); |
| 265 | return m_cameraLocksControl; |
| 266 | } |
| 267 | #endif |
| 268 | |
| 269 | bool CameraBinSession::setupCameraBin() |
| 270 | { |
| 271 | if (!buildCameraSource()) |
| 272 | return false; |
| 273 | |
| 274 | if (m_viewfinderHasChanged) { |
| 275 | if (m_viewfinderElement) { |
| 276 | GstPad *pad = gst_element_get_static_pad(element: m_viewfinderElement, name: "sink" ); |
| 277 | m_viewfinderProbe.removeProbeFromPad(pad); |
| 278 | gst_object_unref(GST_OBJECT(pad)); |
| 279 | gst_object_unref(GST_OBJECT(m_viewfinderElement)); |
| 280 | } |
| 281 | |
| 282 | m_viewfinderElement = m_viewfinderInterface ? m_viewfinderInterface->videoSink() : 0; |
| 283 | #if CAMERABIN_DEBUG |
| 284 | qDebug() << Q_FUNC_INFO << "Viewfinder changed, reconfigure." ; |
| 285 | #endif |
| 286 | m_viewfinderHasChanged = false; |
| 287 | if (!m_viewfinderElement) { |
| 288 | if (m_pendingState == QCamera::ActiveState) |
| 289 | qWarning() << "Starting camera without viewfinder available" ; |
| 290 | m_viewfinderElement = gst_element_factory_make(factoryname: "fakesink" , NULL); |
| 291 | } |
| 292 | |
| 293 | GstPad *pad = gst_element_get_static_pad(element: m_viewfinderElement, name: "sink" ); |
| 294 | m_viewfinderProbe.addProbeToPad(pad); |
| 295 | gst_object_unref(GST_OBJECT(pad)); |
| 296 | |
| 297 | g_object_set(G_OBJECT(m_viewfinderElement), first_property_name: "sync" , FALSE, NULL); |
| 298 | qt_gst_object_ref_sink(GST_OBJECT(m_viewfinderElement)); |
| 299 | gst_element_set_state(element: m_camerabin, state: GST_STATE_NULL); |
| 300 | g_object_set(G_OBJECT(m_camerabin), VIEWFINDER_SINK_PROPERTY, m_viewfinderElement, NULL); |
| 301 | } |
| 302 | |
| 303 | return true; |
| 304 | } |
| 305 | |
| 306 | static GstCaps *resolutionToCaps(const QSize &resolution, |
| 307 | qreal frameRate = 0.0, |
| 308 | QVideoFrame::PixelFormat pixelFormat = QVideoFrame::Format_Invalid) |
| 309 | { |
| 310 | GstCaps *caps = 0; |
| 311 | if (pixelFormat == QVideoFrame::Format_Invalid) |
| 312 | caps = QGstUtils::videoFilterCaps(); |
| 313 | else |
| 314 | caps = QGstUtils::capsForFormats(formats: QList<QVideoFrame::PixelFormat>() << pixelFormat); |
| 315 | |
| 316 | if (!resolution.isEmpty()) { |
| 317 | gst_caps_set_simple( |
| 318 | caps, |
| 319 | field: "width" , G_TYPE_INT, resolution.width(), |
| 320 | "height" , G_TYPE_INT, resolution.height(), |
| 321 | NULL); |
| 322 | } |
| 323 | |
| 324 | if (frameRate > 0.0) { |
| 325 | gint numerator; |
| 326 | gint denominator; |
| 327 | qt_gst_util_double_to_fraction(src: frameRate, dest_n: &numerator, dest_d: &denominator); |
| 328 | |
| 329 | gst_caps_set_simple( |
| 330 | caps, |
| 331 | field: "framerate" , GST_TYPE_FRACTION, numerator, denominator, |
| 332 | NULL); |
| 333 | } |
| 334 | |
| 335 | return caps; |
| 336 | } |
| 337 | |
| 338 | void CameraBinSession::setupCaptureResolution() |
| 339 | { |
| 340 | QSize viewfinderResolution = m_viewfinderSettings.resolution(); |
| 341 | qreal viewfinderFrameRate = m_viewfinderSettings.maximumFrameRate(); |
| 342 | QVideoFrame::PixelFormat viewfinderPixelFormat = m_viewfinderSettings.pixelFormat(); |
| 343 | const QSize imageResolution = m_imageEncodeControl->imageSettings().resolution(); |
| 344 | const QSize videoResolution = m_videoEncodeControl->actualVideoSettings().resolution(); |
| 345 | |
| 346 | // WrapperCameraBinSrc cannot have different caps on its imgsrc, vidsrc and vfsrc pads. |
| 347 | // If capture resolution is specified, use it also for the viewfinder to avoid caps negotiation |
| 348 | // to fail. |
| 349 | if (m_usingWrapperCameraBinSrc) { |
| 350 | if (viewfinderResolution.isEmpty()) { |
| 351 | if (m_captureMode == QCamera::CaptureStillImage && !imageResolution.isEmpty()) |
| 352 | viewfinderResolution = imageResolution; |
| 353 | else if (m_captureMode == QCamera::CaptureVideo && !videoResolution.isEmpty()) |
| 354 | viewfinderResolution = videoResolution; |
| 355 | } |
| 356 | |
| 357 | // Make sure we don't use incompatible frame rate and pixel format with the new resolution |
| 358 | if (viewfinderResolution != m_viewfinderSettings.resolution() && |
| 359 | (!qFuzzyIsNull(d: viewfinderFrameRate) || viewfinderPixelFormat != QVideoFrame::Format_Invalid)) { |
| 360 | |
| 361 | enum { |
| 362 | Nothing = 0x0, |
| 363 | OnlyFrameRate = 0x1, |
| 364 | OnlyPixelFormat = 0x2, |
| 365 | Both = 0x4 |
| 366 | }; |
| 367 | quint8 found = Nothing; |
| 368 | auto viewfinderSettings = supportedViewfinderSettings(); |
| 369 | for (int i = 0; i < viewfinderSettings.count() && !(found & Both); ++i) { |
| 370 | const QCameraViewfinderSettings &s = viewfinderSettings.at(i); |
| 371 | if (s.resolution() == viewfinderResolution) { |
| 372 | if ((qFuzzyIsNull(d: viewfinderFrameRate) || s.maximumFrameRate() == viewfinderFrameRate) |
| 373 | && (viewfinderPixelFormat == QVideoFrame::Format_Invalid || s.pixelFormat() == viewfinderPixelFormat)) |
| 374 | found |= Both; |
| 375 | else if (s.maximumFrameRate() == viewfinderFrameRate) |
| 376 | found |= OnlyFrameRate; |
| 377 | else if (s.pixelFormat() == viewfinderPixelFormat) |
| 378 | found |= OnlyPixelFormat; |
| 379 | } |
| 380 | } |
| 381 | |
| 382 | if (found & Both) { |
| 383 | // no-op |
| 384 | } else if (found & OnlyPixelFormat) { |
| 385 | viewfinderFrameRate = qreal(0); |
| 386 | } else if (found & OnlyFrameRate) { |
| 387 | viewfinderPixelFormat = QVideoFrame::Format_Invalid; |
| 388 | } else { |
| 389 | viewfinderPixelFormat = QVideoFrame::Format_Invalid; |
| 390 | viewfinderFrameRate = qreal(0); |
| 391 | } |
| 392 | } |
| 393 | } |
| 394 | |
| 395 | GstCaps *caps = resolutionToCaps(resolution: imageResolution); |
| 396 | g_object_set(object: m_camerabin, IMAGE_CAPTURE_CAPS_PROPERTY, caps, NULL); |
| 397 | gst_caps_unref(caps); |
| 398 | |
| 399 | qreal framerate = m_videoEncodeControl->videoSettings().frameRate(); |
| 400 | caps = resolutionToCaps(resolution: videoResolution, frameRate: framerate); |
| 401 | g_object_set(object: m_camerabin, VIDEO_CAPTURE_CAPS_PROPERTY, caps, NULL); |
| 402 | gst_caps_unref(caps); |
| 403 | |
| 404 | caps = resolutionToCaps(resolution: viewfinderResolution, frameRate: viewfinderFrameRate, pixelFormat: viewfinderPixelFormat); |
| 405 | g_object_set(object: m_camerabin, VIEWFINDER_CAPS_PROPERTY, caps, NULL); |
| 406 | gst_caps_unref(caps); |
| 407 | |
| 408 | // Special case when using mfw_v4lsrc |
| 409 | if (m_videoSrc && qstrcmp(str1: qt_gst_element_get_factory_name(element: m_videoSrc), str2: "mfw_v4lsrc" ) == 0) { |
| 410 | int capMode = 0; |
| 411 | if (viewfinderResolution == QSize(320, 240)) |
| 412 | capMode = 1; |
| 413 | else if (viewfinderResolution == QSize(720, 480)) |
| 414 | capMode = 2; |
| 415 | else if (viewfinderResolution == QSize(720, 576)) |
| 416 | capMode = 3; |
| 417 | else if (viewfinderResolution == QSize(1280, 720)) |
| 418 | capMode = 4; |
| 419 | else if (viewfinderResolution == QSize(1920, 1080)) |
| 420 | capMode = 5; |
| 421 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "capture-mode" , capMode, NULL); |
| 422 | |
| 423 | if (!qFuzzyIsNull(d: viewfinderFrameRate)) { |
| 424 | int n, d; |
| 425 | qt_gst_util_double_to_fraction(src: viewfinderFrameRate, dest_n: &n, dest_d: &d); |
| 426 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "fps-n" , n, NULL); |
| 427 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "fps-d" , d, NULL); |
| 428 | } |
| 429 | } |
| 430 | |
| 431 | if (m_videoEncoder) |
| 432 | m_videoEncodeControl->applySettings(encoder: m_videoEncoder); |
| 433 | } |
| 434 | |
| 435 | void CameraBinSession::setAudioCaptureCaps() |
| 436 | { |
| 437 | QAudioEncoderSettings settings = m_audioEncodeControl->audioSettings(); |
| 438 | const int sampleRate = settings.sampleRate(); |
| 439 | const int channelCount = settings.channelCount(); |
| 440 | |
| 441 | if (sampleRate <= 0 && channelCount <=0) |
| 442 | return; |
| 443 | |
| 444 | #if GST_CHECK_VERSION(1,0,0) |
| 445 | GstStructure *structure = gst_structure_new_empty(QT_GSTREAMER_RAW_AUDIO_MIME); |
| 446 | #else |
| 447 | GstStructure *structure = gst_structure_new( |
| 448 | QT_GSTREAMER_RAW_AUDIO_MIME, |
| 449 | "endianness" , G_TYPE_INT, 1234, |
| 450 | "signed" , G_TYPE_BOOLEAN, TRUE, |
| 451 | "width" , G_TYPE_INT, 16, |
| 452 | "depth" , G_TYPE_INT, 16, |
| 453 | NULL); |
| 454 | #endif |
| 455 | if (sampleRate > 0) |
| 456 | gst_structure_set(structure, fieldname: "rate" , G_TYPE_INT, sampleRate, NULL); |
| 457 | if (channelCount > 0) |
| 458 | gst_structure_set(structure, fieldname: "channels" , G_TYPE_INT, channelCount, NULL); |
| 459 | |
| 460 | GstCaps *caps = gst_caps_new_full(struct1: structure, NULL); |
| 461 | g_object_set(G_OBJECT(m_camerabin), AUDIO_CAPTURE_CAPS_PROPERTY, caps, NULL); |
| 462 | gst_caps_unref(caps); |
| 463 | |
| 464 | if (m_audioEncoder) |
| 465 | m_audioEncodeControl->applySettings(element: m_audioEncoder); |
| 466 | } |
| 467 | |
| 468 | GstElement *CameraBinSession::buildCameraSource() |
| 469 | { |
| 470 | #if CAMERABIN_DEBUG |
| 471 | qDebug() << Q_FUNC_INFO; |
| 472 | #endif |
| 473 | if (m_inputDevice.isEmpty()) |
| 474 | return nullptr; |
| 475 | |
| 476 | if (!m_inputDeviceHasChanged) |
| 477 | return m_cameraSrc; |
| 478 | |
| 479 | m_inputDeviceHasChanged = false; |
| 480 | m_usingWrapperCameraBinSrc = false; |
| 481 | |
| 482 | if (m_videoSrc) { |
| 483 | gst_object_unref(GST_OBJECT(m_videoSrc)); |
| 484 | m_videoSrc = 0; |
| 485 | } |
| 486 | if (m_cameraSrc) { |
| 487 | gst_object_unref(GST_OBJECT(m_cameraSrc)); |
| 488 | m_cameraSrc = 0; |
| 489 | } |
| 490 | |
| 491 | GstElement *camSrc = 0; |
| 492 | g_object_get(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, &camSrc, NULL); |
| 493 | |
| 494 | if (!m_cameraSrc && m_sourceFactory) |
| 495 | m_cameraSrc = gst_element_factory_create(factory: m_sourceFactory, name: "camera_source" ); |
| 496 | |
| 497 | // If gstreamer has set a default source use it. |
| 498 | if (!m_cameraSrc) |
| 499 | m_cameraSrc = camSrc; |
| 500 | |
| 501 | if (m_cameraSrc) { |
| 502 | #if CAMERABIN_DEBUG |
| 503 | qDebug() << "set camera device" << m_inputDevice; |
| 504 | #endif |
| 505 | m_usingWrapperCameraBinSrc = qstrcmp(str1: qt_gst_element_get_factory_name(element: m_cameraSrc), str2: "wrappercamerabinsrc" ) == 0; |
| 506 | |
| 507 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), property_name: "video-source" )) { |
| 508 | if (!m_videoSrc) { |
| 509 | /* QT_GSTREAMER_CAMERABIN_VIDEOSRC can be used to set the video source element. |
| 510 | |
| 511 | --- Usage |
| 512 | |
| 513 | QT_GSTREAMER_CAMERABIN_VIDEOSRC=[drivername=elementname[,drivername2=elementname2 ...],][elementname] |
| 514 | |
| 515 | --- Examples |
| 516 | |
| 517 | Always use 'somevideosrc': |
| 518 | QT_GSTREAMER_CAMERABIN_VIDEOSRC="somevideosrc" |
| 519 | |
| 520 | Use 'somevideosrc' when the device driver is 'somedriver', otherwise use default: |
| 521 | QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc" |
| 522 | |
| 523 | Use 'somevideosrc' when the device driver is 'somedriver', otherwise use 'somevideosrc2' |
| 524 | QT_GSTREAMER_CAMERABIN_VIDEOSRC="somedriver=somevideosrc,somevideosrc2" |
| 525 | */ |
| 526 | const QByteArray envVideoSource = qgetenv(varName: "QT_GSTREAMER_CAMERABIN_VIDEOSRC" ); |
| 527 | |
| 528 | if (!envVideoSource.isEmpty()) { |
| 529 | const QList<QByteArray> sources = envVideoSource.split(sep: ','); |
| 530 | for (const QByteArray &source : sources) { |
| 531 | QList<QByteArray> keyValue = source.split(sep: '='); |
| 532 | QByteArray name = keyValue.at(i: 0); |
| 533 | if (keyValue.count() > 1 && keyValue.at(i: 0) == QGstUtils::cameraDriver(device: m_inputDevice, factory: m_sourceFactory)) |
| 534 | name = keyValue.at(i: 1); |
| 535 | |
| 536 | GError *error = NULL; |
| 537 | GstElement *element = gst_parse_launch(pipeline_description: name, error: &error); |
| 538 | |
| 539 | if (error) { |
| 540 | g_printerr(format: "ERROR: %s: %s\n" , name.constData(), GST_STR_NULL(error->message)); |
| 541 | g_clear_error(err: &error); |
| 542 | } |
| 543 | if (element) { |
| 544 | m_videoSrc = element; |
| 545 | break; |
| 546 | } |
| 547 | } |
| 548 | } else if (m_videoInputFactory) { |
| 549 | m_videoSrc = m_videoInputFactory->buildElement(); |
| 550 | } |
| 551 | |
| 552 | if (!m_videoSrc) |
| 553 | m_videoSrc = gst_element_factory_make(factoryname: "v4l2src" , name: "camera_source" ); |
| 554 | |
| 555 | if (!m_videoSrc) |
| 556 | m_videoSrc = gst_element_factory_make(factoryname: "ksvideosrc" , name: "camera_source" ); |
| 557 | |
| 558 | if (!m_videoSrc) |
| 559 | m_videoSrc = gst_element_factory_make(factoryname: "avfvideosrc" , name: "camera_source" ); |
| 560 | |
| 561 | if (m_videoSrc) |
| 562 | g_object_set(G_OBJECT(m_cameraSrc), first_property_name: "video-source" , m_videoSrc, NULL); |
| 563 | } |
| 564 | |
| 565 | if (m_videoSrc) { |
| 566 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), property_name: "device" )) |
| 567 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "device" , m_inputDevice.toUtf8().constData(), NULL); |
| 568 | |
| 569 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), property_name: "device-path" )) |
| 570 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "device-path" , m_inputDevice.toUtf8().constData(), NULL); |
| 571 | |
| 572 | if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_videoSrc), property_name: "device-index" )) |
| 573 | g_object_set(G_OBJECT(m_videoSrc), first_property_name: "device-index" , m_inputDevice.toInt(), NULL); |
| 574 | } |
| 575 | } else if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_cameraSrc), property_name: "camera-device" )) { |
| 576 | if (m_inputDevice == QLatin1String("secondary" )) { |
| 577 | g_object_set(G_OBJECT(m_cameraSrc), first_property_name: "camera-device" , 1, NULL); |
| 578 | } else { |
| 579 | g_object_set(G_OBJECT(m_cameraSrc), first_property_name: "camera-device" , 0, NULL); |
| 580 | } |
| 581 | } |
| 582 | } |
| 583 | |
| 584 | if (m_cameraSrc != camSrc) { |
| 585 | g_object_set(G_OBJECT(m_camerabin), CAMERA_SOURCE_PROPERTY, m_cameraSrc, NULL); |
| 586 | // Unref only if camSrc is not m_cameraSrc to prevent double unrefing. |
| 587 | if (camSrc) |
| 588 | gst_object_unref(GST_OBJECT(camSrc)); |
| 589 | } |
| 590 | |
| 591 | return m_cameraSrc; |
| 592 | } |
| 593 | |
| 594 | void CameraBinSession::captureImage(int requestId, const QString &fileName) |
| 595 | { |
| 596 | const QString actualFileName = m_mediaStorageLocation.generateFileName(requestedName: fileName, |
| 597 | type: QMediaStorageLocation::Pictures, |
| 598 | prefix: QLatin1String("IMG_" ), |
| 599 | extension: QLatin1String("jpg" )); |
| 600 | |
| 601 | m_requestId = requestId; |
| 602 | |
| 603 | #if CAMERABIN_DEBUG |
| 604 | qDebug() << Q_FUNC_INFO << m_requestId << fileName << "actual file name:" << actualFileName; |
| 605 | #endif |
| 606 | |
| 607 | g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, actualFileName.toLocal8Bit().constData(), NULL); |
| 608 | |
| 609 | g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL); |
| 610 | |
| 611 | m_imageFileName = actualFileName; |
| 612 | } |
| 613 | |
| 614 | void CameraBinSession::setCaptureMode(QCamera::CaptureModes mode) |
| 615 | { |
| 616 | m_captureMode = mode; |
| 617 | |
| 618 | switch (m_captureMode) { |
| 619 | case QCamera::CaptureStillImage: |
| 620 | g_object_set(object: m_camerabin, MODE_PROPERTY, CAMERABIN_IMAGE_MODE, NULL); |
| 621 | break; |
| 622 | case QCamera::CaptureVideo: |
| 623 | g_object_set(object: m_camerabin, MODE_PROPERTY, CAMERABIN_VIDEO_MODE, NULL); |
| 624 | break; |
| 625 | } |
| 626 | |
| 627 | m_recorderControl->updateStatus(); |
| 628 | } |
| 629 | |
| 630 | QUrl CameraBinSession::outputLocation() const |
| 631 | { |
| 632 | //return the location service wrote data to, not one set by user, it can be empty. |
| 633 | return m_actualSink; |
| 634 | } |
| 635 | |
| 636 | bool CameraBinSession::setOutputLocation(const QUrl& sink) |
| 637 | { |
| 638 | if (!sink.isRelative() && !sink.isLocalFile()) { |
| 639 | qWarning(msg: "Output location must be a local file" ); |
| 640 | return false; |
| 641 | } |
| 642 | |
| 643 | m_sink = m_actualSink = sink; |
| 644 | return true; |
| 645 | } |
| 646 | |
| 647 | void CameraBinSession::setDevice(const QString &device) |
| 648 | { |
| 649 | if (m_inputDevice != device) { |
| 650 | m_inputDevice = device; |
| 651 | m_inputDeviceHasChanged = true; |
| 652 | } |
| 653 | } |
| 654 | |
| 655 | void CameraBinSession::setAudioInput(QGstreamerElementFactory *audioInput) |
| 656 | { |
| 657 | m_audioInputFactory = audioInput; |
| 658 | } |
| 659 | |
| 660 | void CameraBinSession::setVideoInput(QGstreamerElementFactory *videoInput) |
| 661 | { |
| 662 | m_videoInputFactory = videoInput; |
| 663 | m_inputDeviceHasChanged = true; |
| 664 | } |
| 665 | |
| 666 | bool CameraBinSession::isReady() const |
| 667 | { |
| 668 | //it's possible to use QCamera without any viewfinder attached |
| 669 | return !m_viewfinderInterface || m_viewfinderInterface->isReady(); |
| 670 | } |
| 671 | |
| 672 | void CameraBinSession::setViewfinder(QObject *viewfinder) |
| 673 | { |
| 674 | if (m_viewfinderInterface) |
| 675 | m_viewfinderInterface->stopRenderer(); |
| 676 | |
| 677 | m_viewfinderInterface = qobject_cast<QGstreamerVideoRendererInterface*>(object: viewfinder); |
| 678 | if (!m_viewfinderInterface) |
| 679 | viewfinder = 0; |
| 680 | |
| 681 | if (m_viewfinder != viewfinder) { |
| 682 | bool oldReady = isReady(); |
| 683 | |
| 684 | if (m_viewfinder) { |
| 685 | disconnect(sender: m_viewfinder, SIGNAL(sinkChanged()), |
| 686 | receiver: this, SLOT(handleViewfinderChange())); |
| 687 | disconnect(sender: m_viewfinder, SIGNAL(readyChanged(bool)), |
| 688 | receiver: this, SIGNAL(readyChanged(bool))); |
| 689 | |
| 690 | m_busHelper->removeMessageFilter(filter: m_viewfinder); |
| 691 | } |
| 692 | |
| 693 | m_viewfinder = viewfinder; |
| 694 | m_viewfinderHasChanged = true; |
| 695 | |
| 696 | if (m_viewfinder) { |
| 697 | connect(sender: m_viewfinder, SIGNAL(sinkChanged()), |
| 698 | receiver: this, SLOT(handleViewfinderChange())); |
| 699 | connect(sender: m_viewfinder, SIGNAL(readyChanged(bool)), |
| 700 | receiver: this, SIGNAL(readyChanged(bool))); |
| 701 | |
| 702 | m_busHelper->installMessageFilter(filter: m_viewfinder); |
| 703 | } |
| 704 | |
| 705 | emit viewfinderChanged(); |
| 706 | if (oldReady != isReady()) |
| 707 | emit readyChanged(isReady()); |
| 708 | } |
| 709 | } |
| 710 | |
| 711 | static QList<QCameraViewfinderSettings> capsToViewfinderSettings(GstCaps *supportedCaps) |
| 712 | { |
| 713 | QList<QCameraViewfinderSettings> settings; |
| 714 | |
| 715 | if (!supportedCaps) |
| 716 | return settings; |
| 717 | |
| 718 | supportedCaps = qt_gst_caps_normalize(caps: supportedCaps); |
| 719 | |
| 720 | // Convert caps to QCameraViewfinderSettings |
| 721 | for (uint i = 0; i < gst_caps_get_size(caps: supportedCaps); ++i) { |
| 722 | const GstStructure *structure = gst_caps_get_structure(caps: supportedCaps, index: i); |
| 723 | |
| 724 | QCameraViewfinderSettings s; |
| 725 | s.setResolution(QGstUtils::structureResolution(s: structure)); |
| 726 | s.setPixelFormat(QGstUtils::structurePixelFormat(s: structure)); |
| 727 | s.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(s: structure)); |
| 728 | |
| 729 | QPair<qreal, qreal> frameRateRange = QGstUtils::structureFrameRateRange(s: structure); |
| 730 | s.setMinimumFrameRate(frameRateRange.first); |
| 731 | s.setMaximumFrameRate(frameRateRange.second); |
| 732 | |
| 733 | if (!s.resolution().isEmpty() |
| 734 | && s.pixelFormat() != QVideoFrame::Format_Invalid |
| 735 | && !settings.contains(t: s)) { |
| 736 | settings.append(t: s); |
| 737 | } |
| 738 | } |
| 739 | |
| 740 | gst_caps_unref(caps: supportedCaps); |
| 741 | return settings; |
| 742 | } |
| 743 | |
| 744 | QList<QCameraViewfinderSettings> CameraBinSession::supportedViewfinderSettings() const |
| 745 | { |
| 746 | if (m_status >= QCamera::LoadedStatus && m_supportedViewfinderSettings.isEmpty()) { |
| 747 | m_supportedViewfinderSettings = |
| 748 | capsToViewfinderSettings(supportedCaps: supportedCaps(mode: QCamera::CaptureViewfinder)); |
| 749 | } |
| 750 | |
| 751 | return m_supportedViewfinderSettings; |
| 752 | } |
| 753 | |
| 754 | QCameraViewfinderSettings CameraBinSession::viewfinderSettings() const |
| 755 | { |
| 756 | return m_status == QCamera::ActiveStatus ? m_actualViewfinderSettings : m_viewfinderSettings; |
| 757 | } |
| 758 | |
| 759 | void CameraBinSession::ViewfinderProbe::probeCaps(GstCaps *caps) |
| 760 | { |
| 761 | QGstreamerVideoProbeControl::probeCaps(caps); |
| 762 | |
| 763 | // Update actual viewfinder settings on viewfinder caps change |
| 764 | const GstStructure *s = gst_caps_get_structure(caps, index: 0); |
| 765 | const QPair<qreal, qreal> frameRate = QGstUtils::structureFrameRateRange(s); |
| 766 | session->m_actualViewfinderSettings.setResolution(QGstUtils::structureResolution(s)); |
| 767 | session->m_actualViewfinderSettings.setMinimumFrameRate(frameRate.first); |
| 768 | session->m_actualViewfinderSettings.setMaximumFrameRate(frameRate.second); |
| 769 | session->m_actualViewfinderSettings.setPixelFormat(QGstUtils::structurePixelFormat(s)); |
| 770 | session->m_actualViewfinderSettings.setPixelAspectRatio(QGstUtils::structurePixelAspectRatio(s)); |
| 771 | } |
| 772 | |
| 773 | void CameraBinSession::handleViewfinderChange() |
| 774 | { |
| 775 | //the viewfinder will be reloaded |
| 776 | //shortly when the pipeline is started |
| 777 | m_viewfinderHasChanged = true; |
| 778 | emit viewfinderChanged(); |
| 779 | } |
| 780 | |
| 781 | void CameraBinSession::setStatus(QCamera::Status status) |
| 782 | { |
| 783 | if (m_status == status) |
| 784 | return; |
| 785 | |
| 786 | m_status = status; |
| 787 | emit statusChanged(status: m_status); |
| 788 | |
| 789 | setStateHelper(m_pendingState); |
| 790 | } |
| 791 | |
| 792 | QCamera::Status CameraBinSession::status() const |
| 793 | { |
| 794 | return m_status; |
| 795 | } |
| 796 | |
| 797 | QCamera::State CameraBinSession::pendingState() const |
| 798 | { |
| 799 | return m_pendingState; |
| 800 | } |
| 801 | |
| 802 | void CameraBinSession::setState(QCamera::State newState) |
| 803 | { |
| 804 | if (newState == m_pendingState) |
| 805 | return; |
| 806 | |
| 807 | m_pendingState = newState; |
| 808 | emit pendingStateChanged(state: m_pendingState); |
| 809 | |
| 810 | #if CAMERABIN_DEBUG |
| 811 | qDebug() << Q_FUNC_INFO << newState; |
| 812 | #endif |
| 813 | |
| 814 | setStateHelper(newState); |
| 815 | } |
| 816 | |
| 817 | void CameraBinSession::setStateHelper(QCamera::State state) |
| 818 | { |
| 819 | switch (state) { |
| 820 | case QCamera::UnloadedState: |
| 821 | unload(); |
| 822 | break; |
| 823 | case QCamera::LoadedState: |
| 824 | if (m_status == QCamera::ActiveStatus) |
| 825 | stop(); |
| 826 | else if (m_status == QCamera::UnloadedStatus) |
| 827 | load(); |
| 828 | break; |
| 829 | case QCamera::ActiveState: |
| 830 | // If the viewfinder changed while in the loaded state, we need to reload the pipeline |
| 831 | if (m_status == QCamera::LoadedStatus && !m_viewfinderHasChanged) |
| 832 | start(); |
| 833 | else if (m_status == QCamera::UnloadedStatus || m_viewfinderHasChanged) |
| 834 | load(); |
| 835 | } |
| 836 | } |
| 837 | |
| 838 | void CameraBinSession::setError(int err, const QString &errorString) |
| 839 | { |
| 840 | // Emit only first error |
| 841 | if (m_pendingState == QCamera::UnloadedState) |
| 842 | return; |
| 843 | |
| 844 | setState(QCamera::UnloadedState); |
| 845 | emit error(error: err, errorString); |
| 846 | setStatus(QCamera::UnloadedStatus); |
| 847 | } |
| 848 | |
| 849 | void CameraBinSession::load() |
| 850 | { |
| 851 | if (m_status != QCamera::UnloadedStatus && !m_viewfinderHasChanged) |
| 852 | return; |
| 853 | |
| 854 | setStatus(QCamera::LoadingStatus); |
| 855 | |
| 856 | gst_element_set_state(element: m_camerabin, state: GST_STATE_NULL); |
| 857 | |
| 858 | if (!setupCameraBin()) { |
| 859 | setError(err: QCamera::CameraError, QStringLiteral("No camera source available" )); |
| 860 | return; |
| 861 | } |
| 862 | |
| 863 | m_recorderControl->applySettings(); |
| 864 | |
| 865 | #if QT_CONFIG(gstreamer_encodingprofiles) |
| 866 | GstEncodingContainerProfile *profile = m_recorderControl->videoProfile(); |
| 867 | if (profile) { |
| 868 | g_object_set (G_OBJECT(m_camerabin), |
| 869 | first_property_name: "video-profile" , |
| 870 | profile, |
| 871 | NULL); |
| 872 | gst_encoding_profile_unref(profile); |
| 873 | } |
| 874 | #endif |
| 875 | |
| 876 | gst_element_set_state(element: m_camerabin, state: GST_STATE_READY); |
| 877 | } |
| 878 | |
| 879 | void CameraBinSession::unload() |
| 880 | { |
| 881 | if (m_status == QCamera::UnloadedStatus || m_status == QCamera::UnloadingStatus) |
| 882 | return; |
| 883 | |
| 884 | setStatus(QCamera::UnloadingStatus); |
| 885 | |
| 886 | if (m_recordingActive) |
| 887 | stopVideoRecording(); |
| 888 | |
| 889 | if (m_viewfinderInterface) |
| 890 | m_viewfinderInterface->stopRenderer(); |
| 891 | |
| 892 | gst_element_set_state(element: m_camerabin, state: GST_STATE_NULL); |
| 893 | |
| 894 | if (m_busy) |
| 895 | emit busyChanged(m_busy = false); |
| 896 | |
| 897 | m_supportedViewfinderSettings.clear(); |
| 898 | |
| 899 | setStatus(QCamera::UnloadedStatus); |
| 900 | } |
| 901 | |
| 902 | void CameraBinSession::start() |
| 903 | { |
| 904 | if (m_status != QCamera::LoadedStatus) |
| 905 | return; |
| 906 | |
| 907 | setStatus(QCamera::StartingStatus); |
| 908 | |
| 909 | setAudioCaptureCaps(); |
| 910 | |
| 911 | setupCaptureResolution(); |
| 912 | |
| 913 | gst_element_set_state(element: m_camerabin, state: GST_STATE_PLAYING); |
| 914 | } |
| 915 | |
| 916 | void CameraBinSession::stop() |
| 917 | { |
| 918 | if (m_status != QCamera::ActiveStatus) |
| 919 | return; |
| 920 | |
| 921 | setStatus(QCamera::StoppingStatus); |
| 922 | |
| 923 | if (m_recordingActive) |
| 924 | stopVideoRecording(); |
| 925 | |
| 926 | if (m_viewfinderInterface) |
| 927 | m_viewfinderInterface->stopRenderer(); |
| 928 | |
| 929 | gst_element_set_state(element: m_camerabin, state: GST_STATE_READY); |
| 930 | } |
| 931 | |
| 932 | bool CameraBinSession::isBusy() const |
| 933 | { |
| 934 | return m_busy; |
| 935 | } |
| 936 | |
| 937 | void CameraBinSession::updateBusyStatus(GObject *o, GParamSpec *p, gpointer d) |
| 938 | { |
| 939 | Q_UNUSED(p); |
| 940 | CameraBinSession *session = reinterpret_cast<CameraBinSession *>(d); |
| 941 | |
| 942 | gboolean idle = false; |
| 943 | g_object_get(object: o, first_property_name: "idle" , &idle, NULL); |
| 944 | bool busy = !idle; |
| 945 | |
| 946 | if (session->m_busy != busy) { |
| 947 | session->m_busy = busy; |
| 948 | QMetaObject::invokeMethod(obj: session, member: "busyChanged" , |
| 949 | type: Qt::QueuedConnection, |
| 950 | Q_ARG(bool, busy)); |
| 951 | } |
| 952 | } |
| 953 | |
| 954 | qint64 CameraBinSession::duration() const |
| 955 | { |
| 956 | if (m_camerabin) { |
| 957 | GstElement *fileSink = gst_bin_get_by_name(GST_BIN(m_camerabin), FILESINK_BIN_NAME); |
| 958 | if (fileSink) { |
| 959 | GstFormat format = GST_FORMAT_TIME; |
| 960 | gint64 duration = 0; |
| 961 | bool ret = qt_gst_element_query_position(element: fileSink, format, cur: &duration); |
| 962 | gst_object_unref(GST_OBJECT(fileSink)); |
| 963 | if (ret) |
| 964 | return duration / 1000000; |
| 965 | } |
| 966 | } |
| 967 | |
| 968 | return 0; |
| 969 | } |
| 970 | |
| 971 | bool CameraBinSession::isMuted() const |
| 972 | { |
| 973 | return m_muted; |
| 974 | } |
| 975 | |
| 976 | void CameraBinSession::setMuted(bool muted) |
| 977 | { |
| 978 | if (m_muted != muted) { |
| 979 | m_muted = muted; |
| 980 | |
| 981 | if (m_camerabin) |
| 982 | g_object_set(G_OBJECT(m_camerabin), MUTE_PROPERTY, m_muted, NULL); |
| 983 | emit mutedChanged(m_muted); |
| 984 | } |
| 985 | } |
| 986 | |
| 987 | void CameraBinSession::setCaptureDevice(const QString &deviceName) |
| 988 | { |
| 989 | m_captureDevice = deviceName; |
| 990 | } |
| 991 | |
| 992 | void CameraBinSession::setMetaData(const QMap<QByteArray, QVariant> &data) |
| 993 | { |
| 994 | m_metaData = data; |
| 995 | |
| 996 | if (m_camerabin) |
| 997 | QGstUtils::setMetaData(element: m_camerabin, data); |
| 998 | } |
| 999 | |
| 1000 | bool CameraBinSession::processSyncMessage(const QGstreamerMessage &message) |
| 1001 | { |
| 1002 | GstMessage* gm = message.rawMessage(); |
| 1003 | |
| 1004 | if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT) { |
| 1005 | const GstStructure *st = gst_message_get_structure(message: gm); |
| 1006 | const GValue *sampleValue = 0; |
| 1007 | if (m_captureMode == QCamera::CaptureStillImage |
| 1008 | && gst_structure_has_name(structure: st, name: "preview-image" ) |
| 1009 | #if GST_CHECK_VERSION(1,0,0) |
| 1010 | && gst_structure_has_field_typed(structure: st, fieldname: "sample" , GST_TYPE_SAMPLE) |
| 1011 | && (sampleValue = gst_structure_get_value(structure: st, fieldname: "sample" ))) { |
| 1012 | GstSample * const sample = gst_value_get_sample(sampleValue); |
| 1013 | GstCaps * const previewCaps = gst_sample_get_caps(sample); |
| 1014 | GstBuffer * const buffer = gst_sample_get_buffer(sample); |
| 1015 | #else |
| 1016 | && gst_structure_has_field_typed(st, "buffer" , GST_TYPE_BUFFER) |
| 1017 | && (sampleValue = gst_structure_get_value(st, "buffer" ))) { |
| 1018 | GstBuffer * const buffer = gst_value_get_buffer(sampleValue); |
| 1019 | #endif |
| 1020 | |
| 1021 | QImage image; |
| 1022 | #if GST_CHECK_VERSION(1,0,0) |
| 1023 | GstVideoInfo previewInfo; |
| 1024 | if (gst_video_info_from_caps(info: &previewInfo, caps: previewCaps)) |
| 1025 | image = QGstUtils::bufferToImage(buffer, info: previewInfo); |
| 1026 | #else |
| 1027 | image = QGstUtils::bufferToImage(buffer); |
| 1028 | gst_buffer_unref(buffer); |
| 1029 | #endif |
| 1030 | if (!image.isNull()) { |
| 1031 | static QMetaMethod exposedSignal = QMetaMethod::fromSignal(signal: &CameraBinSession::imageExposed); |
| 1032 | exposedSignal.invoke(object: this, |
| 1033 | connectionType: Qt::QueuedConnection, |
| 1034 | Q_ARG(int,m_requestId)); |
| 1035 | |
| 1036 | static QMetaMethod capturedSignal = QMetaMethod::fromSignal(signal: &CameraBinSession::imageCaptured); |
| 1037 | capturedSignal.invoke(object: this, |
| 1038 | connectionType: Qt::QueuedConnection, |
| 1039 | Q_ARG(int,m_requestId), |
| 1040 | Q_ARG(QImage,image)); |
| 1041 | } |
| 1042 | return true; |
| 1043 | } |
| 1044 | #if QT_CONFIG(gstreamer_photography) |
| 1045 | if (gst_structure_has_name(structure: st, GST_PHOTOGRAPHY_AUTOFOCUS_DONE)) |
| 1046 | m_cameraFocusControl->handleFocusMessage(gm); |
| 1047 | #endif |
| 1048 | } |
| 1049 | |
| 1050 | return false; |
| 1051 | } |
| 1052 | |
| 1053 | bool CameraBinSession::processBusMessage(const QGstreamerMessage &message) |
| 1054 | { |
| 1055 | GstMessage* gm = message.rawMessage(); |
| 1056 | |
| 1057 | if (gm) { |
| 1058 | if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) { |
| 1059 | GError *err; |
| 1060 | gchar *debug; |
| 1061 | gst_message_parse_error (message: gm, gerror: &err, debug: &debug); |
| 1062 | |
| 1063 | QString message; |
| 1064 | |
| 1065 | if (err && err->message) { |
| 1066 | message = QString::fromUtf8(str: err->message); |
| 1067 | qWarning() << "CameraBin error:" << message; |
| 1068 | #if CAMERABIN_DEBUG |
| 1069 | qWarning() << QString::fromUtf8(debug); |
| 1070 | #endif |
| 1071 | } |
| 1072 | |
| 1073 | // Only report error messages from camerabin or video source |
| 1074 | if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_camerabin) |
| 1075 | || GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_videoSrc)) { |
| 1076 | if (message.isEmpty()) |
| 1077 | message = tr(s: "Camera error" ); |
| 1078 | |
| 1079 | setError(err: int(QMediaRecorder::ResourceError), errorString: message); |
| 1080 | } |
| 1081 | |
| 1082 | #ifdef CAMERABIN_DEBUG_DUMP_BIN |
| 1083 | _gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_camerabin), |
| 1084 | GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/), |
| 1085 | "camerabin_error" ); |
| 1086 | #endif |
| 1087 | |
| 1088 | |
| 1089 | if (err) |
| 1090 | g_error_free (error: err); |
| 1091 | |
| 1092 | if (debug) |
| 1093 | g_free (mem: debug); |
| 1094 | } |
| 1095 | |
| 1096 | if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_WARNING) { |
| 1097 | GError *err; |
| 1098 | gchar *debug; |
| 1099 | gst_message_parse_warning (message: gm, gerror: &err, debug: &debug); |
| 1100 | |
| 1101 | if (err && err->message) |
| 1102 | qWarning() << "CameraBin warning:" << QString::fromUtf8(str: err->message); |
| 1103 | |
| 1104 | if (err) |
| 1105 | g_error_free (error: err); |
| 1106 | if (debug) |
| 1107 | g_free (mem: debug); |
| 1108 | } |
| 1109 | |
| 1110 | if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_camerabin)) { |
| 1111 | switch (GST_MESSAGE_TYPE(gm)) { |
| 1112 | case GST_MESSAGE_DURATION: |
| 1113 | break; |
| 1114 | |
| 1115 | case GST_MESSAGE_STATE_CHANGED: |
| 1116 | { |
| 1117 | |
| 1118 | GstState oldState; |
| 1119 | GstState newState; |
| 1120 | GstState pending; |
| 1121 | |
| 1122 | gst_message_parse_state_changed(message: gm, oldstate: &oldState, newstate: &newState, pending: &pending); |
| 1123 | |
| 1124 | |
| 1125 | #if CAMERABIN_DEBUG |
| 1126 | QStringList states; |
| 1127 | states << "GST_STATE_VOID_PENDING" << "GST_STATE_NULL" << "GST_STATE_READY" << "GST_STATE_PAUSED" << "GST_STATE_PLAYING" ; |
| 1128 | |
| 1129 | |
| 1130 | qDebug() << QString("state changed: old: %1 new: %2 pending: %3" ) \ |
| 1131 | .arg(states[oldState]) \ |
| 1132 | .arg(states[newState]) \ |
| 1133 | .arg(states[pending]); |
| 1134 | #endif |
| 1135 | |
| 1136 | #ifdef CAMERABIN_DEBUG_DUMP_BIN |
| 1137 | _gst_debug_bin_to_dot_file_with_ts(GST_BIN(m_camerabin), |
| 1138 | GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /*GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/), |
| 1139 | "camerabin" ); |
| 1140 | #endif |
| 1141 | |
| 1142 | switch (newState) { |
| 1143 | case GST_STATE_VOID_PENDING: |
| 1144 | case GST_STATE_NULL: |
| 1145 | setStatus(QCamera::UnloadedStatus); |
| 1146 | break; |
| 1147 | case GST_STATE_READY: |
| 1148 | if (oldState == GST_STATE_NULL) |
| 1149 | m_supportedViewfinderSettings.clear(); |
| 1150 | |
| 1151 | setMetaData(m_metaData); |
| 1152 | setStatus(QCamera::LoadedStatus); |
| 1153 | break; |
| 1154 | case GST_STATE_PLAYING: |
| 1155 | setStatus(QCamera::ActiveStatus); |
| 1156 | break; |
| 1157 | case GST_STATE_PAUSED: |
| 1158 | default: |
| 1159 | break; |
| 1160 | } |
| 1161 | } |
| 1162 | break; |
| 1163 | default: |
| 1164 | break; |
| 1165 | } |
| 1166 | } |
| 1167 | } |
| 1168 | |
| 1169 | return false; |
| 1170 | } |
| 1171 | |
| 1172 | QGstreamerVideoProbeControl *CameraBinSession::videoProbe() |
| 1173 | { |
| 1174 | return &m_viewfinderProbe; |
| 1175 | } |
| 1176 | |
| 1177 | QString CameraBinSession::currentContainerFormat() const |
| 1178 | { |
| 1179 | if (!m_muxer) |
| 1180 | return QString(); |
| 1181 | |
| 1182 | QString format; |
| 1183 | |
| 1184 | if (GstPad *srcPad = gst_element_get_static_pad(element: m_muxer, name: "src" )) { |
| 1185 | if (GstCaps *caps = qt_gst_pad_get_caps(pad: srcPad)) { |
| 1186 | gchar *capsString = gst_caps_to_string(caps); |
| 1187 | format = QString::fromLatin1(str: capsString); |
| 1188 | if (capsString) |
| 1189 | g_free(mem: capsString); |
| 1190 | gst_caps_unref(caps); |
| 1191 | } |
| 1192 | gst_object_unref(GST_OBJECT(srcPad)); |
| 1193 | } |
| 1194 | |
| 1195 | return format; |
| 1196 | } |
| 1197 | |
| 1198 | void CameraBinSession::recordVideo() |
| 1199 | { |
| 1200 | QString format = currentContainerFormat(); |
| 1201 | if (format.isEmpty()) |
| 1202 | format = m_mediaContainerControl->actualContainerFormat(); |
| 1203 | |
| 1204 | const QString fileName = m_sink.isLocalFile() ? m_sink.toLocalFile() : m_sink.toString(); |
| 1205 | const QFileInfo fileInfo(fileName); |
| 1206 | const QString extension = fileInfo.suffix().isEmpty() |
| 1207 | ? QGstUtils::fileExtensionForMimeType(mimeType: format) |
| 1208 | : fileInfo.suffix(); |
| 1209 | |
| 1210 | const QString actualFileName = m_mediaStorageLocation.generateFileName(requestedName: fileName, |
| 1211 | type: QMediaStorageLocation::Movies, |
| 1212 | prefix: QLatin1String("clip_" ), |
| 1213 | extension); |
| 1214 | |
| 1215 | m_recordingActive = true; |
| 1216 | m_actualSink = QUrl::fromLocalFile(localfile: actualFileName); |
| 1217 | |
| 1218 | g_object_set(G_OBJECT(m_camerabin), FILENAME_PROPERTY, QFile::encodeName(fileName: actualFileName).constData(), NULL); |
| 1219 | |
| 1220 | g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_START, NULL); |
| 1221 | } |
| 1222 | |
| 1223 | void CameraBinSession::stopVideoRecording() |
| 1224 | { |
| 1225 | m_recordingActive = false; |
| 1226 | g_signal_emit_by_name(G_OBJECT(m_camerabin), CAPTURE_STOP, NULL); |
| 1227 | } |
| 1228 | |
| 1229 | //internal, only used by CameraBinSession::supportedFrameRates. |
| 1230 | //recursively fills the list of framerates res from value data. |
| 1231 | static void readValue(const GValue *value, QList< QPair<int,int> > *res, bool *continuous) |
| 1232 | { |
| 1233 | if (GST_VALUE_HOLDS_FRACTION(value)) { |
| 1234 | int num = gst_value_get_fraction_numerator(value); |
| 1235 | int denum = gst_value_get_fraction_denominator(value); |
| 1236 | |
| 1237 | *res << QPair<int,int>(num, denum); |
| 1238 | } else if (GST_VALUE_HOLDS_FRACTION_RANGE(value)) { |
| 1239 | const GValue *rateValueMin = gst_value_get_fraction_range_min(value); |
| 1240 | const GValue *rateValueMax = gst_value_get_fraction_range_max(value); |
| 1241 | |
| 1242 | if (continuous) |
| 1243 | *continuous = true; |
| 1244 | |
| 1245 | readValue(value: rateValueMin, res, continuous); |
| 1246 | readValue(value: rateValueMax, res, continuous); |
| 1247 | } else if (GST_VALUE_HOLDS_LIST(value)) { |
| 1248 | for (uint i=0; i<gst_value_list_get_size(value); i++) { |
| 1249 | readValue(value: gst_value_list_get_value(value, index: i), res, continuous); |
| 1250 | } |
| 1251 | } |
| 1252 | } |
| 1253 | |
| 1254 | static bool rateLessThan(const QPair<int,int> &r1, const QPair<int,int> &r2) |
| 1255 | { |
| 1256 | return r1.first*r2.second < r2.first*r1.second; |
| 1257 | } |
| 1258 | |
| 1259 | GstCaps *CameraBinSession::supportedCaps(QCamera::CaptureModes mode) const |
| 1260 | { |
| 1261 | GstCaps *supportedCaps = 0; |
| 1262 | |
| 1263 | // When using wrappercamerabinsrc, get the supported caps directly from the video source element. |
| 1264 | // This makes sure we only get the caps actually supported by the video source element. |
| 1265 | if (m_videoSrc) { |
| 1266 | GstPad *pad = gst_element_get_static_pad(element: m_videoSrc, name: "src" ); |
| 1267 | if (pad) { |
| 1268 | supportedCaps = qt_gst_pad_get_caps(pad); |
| 1269 | gst_object_unref(GST_OBJECT(pad)); |
| 1270 | } |
| 1271 | } |
| 1272 | |
| 1273 | // Otherwise, let the camerabin handle this. |
| 1274 | if (!supportedCaps) { |
| 1275 | const gchar *prop; |
| 1276 | switch (mode) { |
| 1277 | case QCamera::CaptureStillImage: |
| 1278 | prop = SUPPORTED_IMAGE_CAPTURE_CAPS_PROPERTY; |
| 1279 | break; |
| 1280 | case QCamera::CaptureVideo: |
| 1281 | prop = SUPPORTED_VIDEO_CAPTURE_CAPS_PROPERTY; |
| 1282 | break; |
| 1283 | case QCamera::CaptureViewfinder: |
| 1284 | default: |
| 1285 | prop = SUPPORTED_VIEWFINDER_CAPS_PROPERTY; |
| 1286 | break; |
| 1287 | } |
| 1288 | |
| 1289 | g_object_get(G_OBJECT(m_camerabin), first_property_name: prop, &supportedCaps, NULL); |
| 1290 | } |
| 1291 | |
| 1292 | return supportedCaps; |
| 1293 | } |
| 1294 | |
| 1295 | QList< QPair<int,int> > CameraBinSession::supportedFrameRates(const QSize &frameSize, bool *continuous) const |
| 1296 | { |
| 1297 | QList< QPair<int,int> > res; |
| 1298 | |
| 1299 | GstCaps *supportedCaps = this->supportedCaps(mode: QCamera::CaptureVideo); |
| 1300 | |
| 1301 | if (!supportedCaps) |
| 1302 | return res; |
| 1303 | |
| 1304 | GstCaps *caps = 0; |
| 1305 | |
| 1306 | if (frameSize.isEmpty()) { |
| 1307 | caps = gst_caps_copy(supportedCaps); |
| 1308 | } else { |
| 1309 | GstCaps *filter = QGstUtils::videoFilterCaps(); |
| 1310 | gst_caps_set_simple( |
| 1311 | caps: filter, |
| 1312 | field: "width" , G_TYPE_INT, frameSize.width(), |
| 1313 | "height" , G_TYPE_INT, frameSize.height(), |
| 1314 | NULL); |
| 1315 | |
| 1316 | caps = gst_caps_intersect(caps1: supportedCaps, caps2: filter); |
| 1317 | gst_caps_unref(caps: filter); |
| 1318 | } |
| 1319 | gst_caps_unref(caps: supportedCaps); |
| 1320 | |
| 1321 | //simplify to the list of rates only: |
| 1322 | caps = gst_caps_make_writable(caps); |
| 1323 | for (uint i=0; i<gst_caps_get_size(caps); i++) { |
| 1324 | GstStructure *structure = gst_caps_get_structure(caps, index: i); |
| 1325 | gst_structure_set_name(structure, name: "video/x-raw" ); |
| 1326 | #if GST_CHECK_VERSION(1,2,0) |
| 1327 | gst_caps_set_features(caps, index: i, NULL); |
| 1328 | #endif |
| 1329 | const GValue *oldRate = gst_structure_get_value(structure, fieldname: "framerate" ); |
| 1330 | if (!oldRate) |
| 1331 | continue; |
| 1332 | |
| 1333 | GValue rate; |
| 1334 | memset(s: &rate, c: 0, n: sizeof(rate)); |
| 1335 | g_value_init(value: &rate, G_VALUE_TYPE(oldRate)); |
| 1336 | g_value_copy(src_value: oldRate, dest_value: &rate); |
| 1337 | gst_structure_remove_all_fields(structure); |
| 1338 | gst_structure_set_value(structure, fieldname: "framerate" , value: &rate); |
| 1339 | g_value_unset(value: &rate); |
| 1340 | } |
| 1341 | #if GST_CHECK_VERSION(1,0,0) |
| 1342 | caps = gst_caps_simplify(caps); |
| 1343 | #else |
| 1344 | gst_caps_do_simplify(caps); |
| 1345 | #endif |
| 1346 | |
| 1347 | for (uint i=0; i<gst_caps_get_size(caps); i++) { |
| 1348 | GstStructure *structure = gst_caps_get_structure(caps, index: i); |
| 1349 | const GValue *rateValue = gst_structure_get_value(structure, fieldname: "framerate" ); |
| 1350 | if (!rateValue) |
| 1351 | continue; |
| 1352 | |
| 1353 | readValue(value: rateValue, res: &res, continuous); |
| 1354 | } |
| 1355 | |
| 1356 | std::sort(first: res.begin(), last: res.end(), comp: rateLessThan); |
| 1357 | |
| 1358 | #if CAMERABIN_DEBUG |
| 1359 | qDebug() << "Supported rates:" << caps; |
| 1360 | qDebug() << res; |
| 1361 | #endif |
| 1362 | |
| 1363 | gst_caps_unref(caps); |
| 1364 | |
| 1365 | return res; |
| 1366 | } |
| 1367 | |
| 1368 | //internal, only used by CameraBinSession::supportedResolutions |
| 1369 | //recursively find the supported resolutions range. |
| 1370 | static QPair<int,int> valueRange(const GValue *value, bool *continuous) |
| 1371 | { |
| 1372 | int minValue = 0; |
| 1373 | int maxValue = 0; |
| 1374 | |
| 1375 | if (g_value_type_compatible(G_VALUE_TYPE(value), G_TYPE_INT)) { |
| 1376 | minValue = maxValue = g_value_get_int(value); |
| 1377 | } else if (GST_VALUE_HOLDS_INT_RANGE(value)) { |
| 1378 | minValue = gst_value_get_int_range_min(value); |
| 1379 | maxValue = gst_value_get_int_range_max(value); |
| 1380 | *continuous = true; |
| 1381 | } else if (GST_VALUE_HOLDS_LIST(value)) { |
| 1382 | for (uint i=0; i<gst_value_list_get_size(value); i++) { |
| 1383 | QPair<int,int> res = valueRange(value: gst_value_list_get_value(value, index: i), continuous); |
| 1384 | |
| 1385 | if (res.first > 0 && minValue > 0) |
| 1386 | minValue = qMin(a: minValue, b: res.first); |
| 1387 | else //select non 0 valid value |
| 1388 | minValue = qMax(a: minValue, b: res.first); |
| 1389 | |
| 1390 | maxValue = qMax(a: maxValue, b: res.second); |
| 1391 | } |
| 1392 | } |
| 1393 | |
| 1394 | return QPair<int,int>(minValue, maxValue); |
| 1395 | } |
| 1396 | |
| 1397 | static bool resolutionLessThan(const QSize &r1, const QSize &r2) |
| 1398 | { |
| 1399 | return qlonglong(r1.width()) * r1.height() < qlonglong(r2.width()) * r2.height(); |
| 1400 | } |
| 1401 | |
| 1402 | |
| 1403 | QList<QSize> CameraBinSession::supportedResolutions(QPair<int,int> rate, |
| 1404 | bool *continuous, |
| 1405 | QCamera::CaptureModes mode) const |
| 1406 | { |
| 1407 | QList<QSize> res; |
| 1408 | |
| 1409 | if (continuous) |
| 1410 | *continuous = false; |
| 1411 | |
| 1412 | GstCaps *supportedCaps = this->supportedCaps(mode); |
| 1413 | |
| 1414 | #if CAMERABIN_DEBUG |
| 1415 | qDebug() << "Source caps:" << supportedCaps; |
| 1416 | #endif |
| 1417 | |
| 1418 | if (!supportedCaps) |
| 1419 | return res; |
| 1420 | |
| 1421 | GstCaps *caps = 0; |
| 1422 | bool isContinuous = false; |
| 1423 | |
| 1424 | if (rate.first <= 0 || rate.second <= 0) { |
| 1425 | caps = gst_caps_copy(supportedCaps); |
| 1426 | } else { |
| 1427 | GstCaps *filter = QGstUtils::videoFilterCaps(); |
| 1428 | gst_caps_set_simple( |
| 1429 | caps: filter, |
| 1430 | field: "framerate" , GST_TYPE_FRACTION , rate.first, rate.second, |
| 1431 | NULL); |
| 1432 | caps = gst_caps_intersect(caps1: supportedCaps, caps2: filter); |
| 1433 | gst_caps_unref(caps: filter); |
| 1434 | } |
| 1435 | gst_caps_unref(caps: supportedCaps); |
| 1436 | |
| 1437 | //simplify to the list of resolutions only: |
| 1438 | caps = gst_caps_make_writable(caps); |
| 1439 | for (uint i=0; i<gst_caps_get_size(caps); i++) { |
| 1440 | GstStructure *structure = gst_caps_get_structure(caps, index: i); |
| 1441 | gst_structure_set_name(structure, name: "video/x-raw" ); |
| 1442 | #if GST_CHECK_VERSION(1,2,0) |
| 1443 | gst_caps_set_features(caps, index: i, NULL); |
| 1444 | #endif |
| 1445 | const GValue *oldW = gst_structure_get_value(structure, fieldname: "width" ); |
| 1446 | const GValue *oldH = gst_structure_get_value(structure, fieldname: "height" ); |
| 1447 | if (!oldW || !oldH) |
| 1448 | continue; |
| 1449 | |
| 1450 | GValue w; |
| 1451 | memset(s: &w, c: 0, n: sizeof(GValue)); |
| 1452 | GValue h; |
| 1453 | memset(s: &h, c: 0, n: sizeof(GValue)); |
| 1454 | g_value_init(value: &w, G_VALUE_TYPE(oldW)); |
| 1455 | g_value_init(value: &h, G_VALUE_TYPE(oldH)); |
| 1456 | g_value_copy(src_value: oldW, dest_value: &w); |
| 1457 | g_value_copy(src_value: oldH, dest_value: &h); |
| 1458 | gst_structure_remove_all_fields(structure); |
| 1459 | gst_structure_set_value(structure, fieldname: "width" , value: &w); |
| 1460 | gst_structure_set_value(structure, fieldname: "height" , value: &h); |
| 1461 | g_value_unset(value: &w); |
| 1462 | g_value_unset(value: &h); |
| 1463 | } |
| 1464 | |
| 1465 | #if GST_CHECK_VERSION(1,0,0) |
| 1466 | caps = gst_caps_simplify(caps); |
| 1467 | #else |
| 1468 | gst_caps_do_simplify(caps); |
| 1469 | #endif |
| 1470 | |
| 1471 | |
| 1472 | for (uint i=0; i<gst_caps_get_size(caps); i++) { |
| 1473 | GstStructure *structure = gst_caps_get_structure(caps, index: i); |
| 1474 | const GValue *wValue = gst_structure_get_value(structure, fieldname: "width" ); |
| 1475 | const GValue *hValue = gst_structure_get_value(structure, fieldname: "height" ); |
| 1476 | if (!wValue || !hValue) |
| 1477 | continue; |
| 1478 | |
| 1479 | QPair<int,int> wRange = valueRange(value: wValue, continuous: &isContinuous); |
| 1480 | QPair<int,int> hRange = valueRange(value: hValue, continuous: &isContinuous); |
| 1481 | |
| 1482 | QSize minSize(wRange.first, hRange.first); |
| 1483 | QSize maxSize(wRange.second, hRange.second); |
| 1484 | |
| 1485 | if (!minSize.isEmpty()) |
| 1486 | res << minSize; |
| 1487 | |
| 1488 | if (minSize != maxSize && !maxSize.isEmpty()) |
| 1489 | res << maxSize; |
| 1490 | } |
| 1491 | |
| 1492 | |
| 1493 | std::sort(first: res.begin(), last: res.end(), comp: resolutionLessThan); |
| 1494 | |
| 1495 | //if the range is continuos, populate is with the common rates |
| 1496 | if (isContinuous && res.size() >= 2) { |
| 1497 | //fill the ragne with common value |
| 1498 | static const QList<QSize> commonSizes = |
| 1499 | QList<QSize>() << QSize(128, 96) |
| 1500 | << QSize(160,120) |
| 1501 | << QSize(176, 144) |
| 1502 | << QSize(320, 240) |
| 1503 | << QSize(352, 288) |
| 1504 | << QSize(640, 480) |
| 1505 | << QSize(848, 480) |
| 1506 | << QSize(854, 480) |
| 1507 | << QSize(1024, 768) |
| 1508 | << QSize(1280, 720) // HD 720 |
| 1509 | << QSize(1280, 1024) |
| 1510 | << QSize(1600, 1200) |
| 1511 | << QSize(1920, 1080) // HD |
| 1512 | << QSize(1920, 1200) |
| 1513 | << QSize(2048, 1536) |
| 1514 | << QSize(2560, 1600) |
| 1515 | << QSize(2580, 1936); |
| 1516 | QSize minSize = res.first(); |
| 1517 | QSize maxSize = res.last(); |
| 1518 | res.clear(); |
| 1519 | |
| 1520 | for (const QSize &candidate : commonSizes) { |
| 1521 | int w = candidate.width(); |
| 1522 | int h = candidate.height(); |
| 1523 | |
| 1524 | if (w > maxSize.width() && h > maxSize.height()) |
| 1525 | break; |
| 1526 | |
| 1527 | if (w >= minSize.width() && h >= minSize.height() && |
| 1528 | w <= maxSize.width() && h <= maxSize.height()) |
| 1529 | res << candidate; |
| 1530 | } |
| 1531 | |
| 1532 | if (res.isEmpty() || res.first() != minSize) |
| 1533 | res.prepend(t: minSize); |
| 1534 | |
| 1535 | if (res.last() != maxSize) |
| 1536 | res.append(t: maxSize); |
| 1537 | } |
| 1538 | |
| 1539 | #if CAMERABIN_DEBUG |
| 1540 | qDebug() << "Supported resolutions:" << gst_caps_to_string(caps); |
| 1541 | qDebug() << res; |
| 1542 | #endif |
| 1543 | |
| 1544 | gst_caps_unref(caps); |
| 1545 | |
| 1546 | if (continuous) |
| 1547 | *continuous = isContinuous; |
| 1548 | |
| 1549 | return res; |
| 1550 | } |
| 1551 | |
| 1552 | void CameraBinSession::elementAdded(GstBin *, GstElement *element, CameraBinSession *session) |
| 1553 | { |
| 1554 | GstElementFactory *factory = gst_element_get_factory(element); |
| 1555 | |
| 1556 | if (GST_IS_BIN(element)) { |
| 1557 | g_signal_connect(G_OBJECT(element), "element-added" , G_CALLBACK(elementAdded), session); |
| 1558 | g_signal_connect(G_OBJECT(element), "element-removed" , G_CALLBACK(elementRemoved), session); |
| 1559 | } else if (!factory) { |
| 1560 | // no-op |
| 1561 | #if GST_CHECK_VERSION(0,10,31) |
| 1562 | } else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_AUDIO_ENCODER)) { |
| 1563 | #else |
| 1564 | } else if (strstr(gst_element_factory_get_klass(factory), "Encoder/Audio" ) != NULL) { |
| 1565 | #endif |
| 1566 | session->m_audioEncoder = element; |
| 1567 | |
| 1568 | session->m_audioEncodeControl->applySettings(element); |
| 1569 | #if GST_CHECK_VERSION(0,10,31) |
| 1570 | } else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER)) { |
| 1571 | #else |
| 1572 | } else if (strstr(gst_element_factory_get_klass(factory), "Encoder/Video" ) != NULL) { |
| 1573 | #endif |
| 1574 | session->m_videoEncoder = element; |
| 1575 | |
| 1576 | session->m_videoEncodeControl->applySettings(encoder: element); |
| 1577 | #if GST_CHECK_VERSION(0,10,31) |
| 1578 | } else if (gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_MUXER)) { |
| 1579 | #else |
| 1580 | } else if (strstr(gst_element_factory_get_klass(factory), "Muxer" ) != NULL) { |
| 1581 | #endif |
| 1582 | session->m_muxer = element; |
| 1583 | } |
| 1584 | } |
| 1585 | |
| 1586 | void CameraBinSession::elementRemoved(GstBin *, GstElement *element, CameraBinSession *session) |
| 1587 | { |
| 1588 | if (element == session->m_audioEncoder) |
| 1589 | session->m_audioEncoder = 0; |
| 1590 | else if (element == session->m_videoEncoder) |
| 1591 | session->m_videoEncoder = 0; |
| 1592 | else if (element == session->m_muxer) |
| 1593 | session->m_muxer = 0; |
| 1594 | } |
| 1595 | |
| 1596 | QT_END_NAMESPACE |
| 1597 | |