1 | /**************************************************************************** |
2 | ** |
3 | ** Copyright (C) 2016 The Qt Company Ltd. |
4 | ** Contact: https://www.qt.io/licensing/ |
5 | ** |
6 | ** This file is part of the Qt Toolkit. |
7 | ** |
8 | ** $QT_BEGIN_LICENSE:GPL-EXCEPT$ |
9 | ** Commercial License Usage |
10 | ** Licensees holding valid commercial Qt licenses may use this file in |
11 | ** accordance with the commercial license agreement provided with the |
12 | ** Software or, alternatively, in accordance with the terms contained in |
13 | ** a written agreement between you and The Qt Company. For licensing terms |
14 | ** and conditions see https://www.qt.io/terms-conditions. For further |
15 | ** information use the contact form at https://www.qt.io/contact-us. |
16 | ** |
17 | ** GNU General Public License Usage |
18 | ** Alternatively, this file may be used under the terms of the GNU |
19 | ** General Public License version 3 as published by the Free Software |
20 | ** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT |
21 | ** included in the packaging of this file. Please review the following |
22 | ** information to ensure the GNU General Public License requirements will |
23 | ** be met: https://www.gnu.org/licenses/gpl-3.0.html. |
24 | ** |
25 | ** $QT_END_LICENSE$ |
26 | ** |
27 | ****************************************************************************/ |
28 | |
29 | //TESTED_COMPONENT=plugins/declarative/multimedia |
30 | |
31 | #include <QtTest/QtTest> |
32 | |
33 | #include <QtQml/qqmlengine.h> |
34 | #include <QtQml/qqmlcomponent.h> |
35 | #include <QQuickView> |
36 | |
37 | #include "private/qdeclarativevideooutput_p.h" |
38 | |
39 | #include <qabstractvideosurface.h> |
40 | #include <qvideorenderercontrol.h> |
41 | #include <qvideosurfaceformat.h> |
42 | |
43 | #include <qmediaobject.h> |
44 | |
45 | class SurfaceHolder : public QObject |
46 | { |
47 | Q_OBJECT |
48 | Q_PROPERTY(QAbstractVideoSurface *videoSurface READ videoSurface WRITE setVideoSurface) |
49 | public: |
50 | SurfaceHolder(QObject *parent) |
51 | : QObject(parent) |
52 | , m_surface(0) |
53 | { |
54 | } |
55 | |
56 | QAbstractVideoSurface *videoSurface() const |
57 | { |
58 | return m_surface; |
59 | } |
60 | void setVideoSurface(QAbstractVideoSurface *surface) |
61 | { |
62 | if (m_surface != surface && m_surface && m_surface->isActive()) { |
63 | m_surface->stop(); |
64 | } |
65 | m_surface = surface; |
66 | } |
67 | |
68 | void presentDummyFrame(const QSize &size); |
69 | |
70 | private: |
71 | QAbstractVideoSurface *m_surface; |
72 | |
73 | }; |
74 | |
75 | // Starts the surface and puts a frame |
76 | void SurfaceHolder::presentDummyFrame(const QSize &size) |
77 | { |
78 | if (m_surface && m_surface->supportedPixelFormats().count() > 0) { |
79 | QVideoFrame::PixelFormat pixelFormat = m_surface->supportedPixelFormats().value(i: 0); |
80 | QVideoSurfaceFormat format(size, pixelFormat); |
81 | QVideoFrame frame(size.width() * size.height() * 4, size, size.width() * 4, pixelFormat); |
82 | |
83 | if (!m_surface->isActive()) |
84 | m_surface->start(format); |
85 | m_surface->present(frame); |
86 | |
87 | // Have to spin an event loop or two for the surfaceFormatChanged() signal |
88 | qApp->processEvents(); |
89 | } |
90 | } |
91 | |
92 | class tst_QDeclarativeVideoOutput : public QObject |
93 | { |
94 | Q_OBJECT |
95 | public: |
96 | tst_QDeclarativeVideoOutput(); |
97 | |
98 | ~tst_QDeclarativeVideoOutput() |
99 | { |
100 | delete m_mappingOutput; |
101 | delete m_mappingSurface; |
102 | delete m_mappingComponent; |
103 | } |
104 | |
105 | public slots: |
106 | void initTestCase(); |
107 | |
108 | private slots: |
109 | void fillMode(); |
110 | void flushMode(); |
111 | void orientation(); |
112 | void surfaceSource(); |
113 | void paintSurface(); |
114 | void sourceRect(); |
115 | |
116 | void contentRect(); |
117 | void contentRect_data(); |
118 | |
119 | void mappingPoint(); |
120 | void mappingPoint_data(); |
121 | void mappingRect(); |
122 | void mappingRect_data(); |
123 | |
124 | // XXX May be worth adding tests that the surface activeChanged signals are sent appropriately |
125 | // to holder? |
126 | |
127 | private: |
128 | QQmlEngine m_engine; |
129 | |
130 | // Variables used for the mapping test |
131 | QQmlComponent *m_mappingComponent; |
132 | QObject *m_mappingOutput; |
133 | SurfaceHolder *m_mappingSurface; |
134 | |
135 | void updateOutputGeometry(QObject *output); |
136 | |
137 | QRectF invokeR2R(QObject *object, const char *signature, const QRectF &rect); |
138 | QPointF invokeP2P(QObject *object, const char *signature, const QPointF &point); |
139 | }; |
140 | |
141 | void tst_QDeclarativeVideoOutput::initTestCase() |
142 | { |
143 | // We initialize the mapping vars here |
144 | m_mappingComponent = new QQmlComponent(&m_engine); |
145 | m_mappingComponent->loadUrl(url: QUrl("qrc:/main.qml" )); |
146 | m_mappingSurface = new SurfaceHolder(this); |
147 | |
148 | m_mappingOutput = m_mappingComponent->create(); |
149 | QVERIFY(m_mappingOutput != 0); |
150 | |
151 | m_mappingOutput->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(m_mappingSurface))); |
152 | |
153 | m_mappingSurface->presentDummyFrame(size: QSize(200,100)); // this should start m_surface |
154 | updateOutputGeometry(output: m_mappingOutput); |
155 | } |
156 | |
157 | Q_DECLARE_METATYPE(QDeclarativeVideoOutput::FillMode) |
158 | Q_DECLARE_METATYPE(QDeclarativeVideoOutput::FlushMode) |
159 | |
160 | tst_QDeclarativeVideoOutput::tst_QDeclarativeVideoOutput() |
161 | : m_mappingComponent(0) |
162 | , m_mappingOutput(0) |
163 | , m_mappingSurface(0) |
164 | { |
165 | qRegisterMetaType<QDeclarativeVideoOutput::FillMode>(); |
166 | } |
167 | |
168 | void tst_QDeclarativeVideoOutput::fillMode() |
169 | { |
170 | QQmlComponent component(&m_engine); |
171 | component.loadUrl(url: QUrl("qrc:/main.qml" )); |
172 | |
173 | QObject *videoOutput = component.create(); |
174 | QVERIFY(videoOutput != 0); |
175 | |
176 | QSignalSpy propSpy(videoOutput, SIGNAL(fillModeChanged(QDeclarativeVideoOutput::FillMode))); |
177 | |
178 | // Default is preserveaspectfit |
179 | QCOMPARE(videoOutput->property("fillMode" ).value<QDeclarativeVideoOutput::FillMode>(), QDeclarativeVideoOutput::PreserveAspectFit); |
180 | QCOMPARE(propSpy.count(), 0); |
181 | |
182 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::PreserveAspectCrop))); |
183 | QCOMPARE(videoOutput->property("fillMode" ).value<QDeclarativeVideoOutput::FillMode>(), QDeclarativeVideoOutput::PreserveAspectCrop); |
184 | QCOMPARE(propSpy.count(), 1); |
185 | |
186 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::Stretch))); |
187 | QCOMPARE(videoOutput->property("fillMode" ).value<QDeclarativeVideoOutput::FillMode>(), QDeclarativeVideoOutput::Stretch); |
188 | QCOMPARE(propSpy.count(), 2); |
189 | |
190 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::Stretch))); |
191 | QCOMPARE(videoOutput->property("fillMode" ).value<QDeclarativeVideoOutput::FillMode>(), QDeclarativeVideoOutput::Stretch); |
192 | QCOMPARE(propSpy.count(), 2); |
193 | |
194 | delete videoOutput; |
195 | } |
196 | |
197 | void tst_QDeclarativeVideoOutput::flushMode() |
198 | { |
199 | QQmlComponent component(&m_engine); |
200 | component.loadUrl(url: QUrl("qrc:/main.qml" )); |
201 | |
202 | QObject *videoOutput = component.create(); |
203 | QVERIFY(videoOutput != 0); |
204 | |
205 | QSignalSpy propSpy(videoOutput, SIGNAL(flushModeChanged())); |
206 | |
207 | QCOMPARE(videoOutput->property("flushMode" ).value<QDeclarativeVideoOutput::FlushMode>(), QDeclarativeVideoOutput::EmptyFrame); |
208 | QCOMPARE(propSpy.count(), 0); |
209 | |
210 | videoOutput->setProperty(name: "flushMode" , value: QVariant(int(QDeclarativeVideoOutput::FirstFrame))); |
211 | QCOMPARE(videoOutput->property("fillMode" ).value<QDeclarativeVideoOutput::FlushMode>(), QDeclarativeVideoOutput::FirstFrame); |
212 | QCOMPARE(propSpy.count(), 1); |
213 | } |
214 | |
215 | void tst_QDeclarativeVideoOutput::orientation() |
216 | { |
217 | QQmlComponent component(&m_engine); |
218 | component.loadUrl(url: QUrl("qrc:/main.qml" )); |
219 | |
220 | QObject *videoOutput = component.create(); |
221 | QVERIFY(videoOutput != 0); |
222 | |
223 | QSignalSpy propSpy(videoOutput, SIGNAL(orientationChanged())); |
224 | |
225 | // Default orientation is 0 |
226 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 0); |
227 | QCOMPARE(propSpy.count(), 0); |
228 | |
229 | videoOutput->setProperty(name: "orientation" , value: QVariant(90)); |
230 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 90); |
231 | QCOMPARE(propSpy.count(), 1); |
232 | |
233 | videoOutput->setProperty(name: "orientation" , value: QVariant(180)); |
234 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 180); |
235 | QCOMPARE(propSpy.count(), 2); |
236 | |
237 | videoOutput->setProperty(name: "orientation" , value: QVariant(270)); |
238 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 270); |
239 | QCOMPARE(propSpy.count(), 3); |
240 | |
241 | videoOutput->setProperty(name: "orientation" , value: QVariant(360)); |
242 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 360); |
243 | QCOMPARE(propSpy.count(), 4); |
244 | |
245 | // More than 360 should be fine |
246 | videoOutput->setProperty(name: "orientation" , value: QVariant(540)); |
247 | QCOMPARE(videoOutput->property("orientation" ).toInt(), 540); |
248 | QCOMPARE(propSpy.count(), 5); |
249 | |
250 | // Negative should be fine |
251 | videoOutput->setProperty(name: "orientation" , value: QVariant(-180)); |
252 | QCOMPARE(videoOutput->property("orientation" ).toInt(), -180); |
253 | QCOMPARE(propSpy.count(), 6); |
254 | |
255 | // Same value should not reemit |
256 | videoOutput->setProperty(name: "orientation" , value: QVariant(-180)); |
257 | QCOMPARE(videoOutput->property("orientation" ).toInt(), -180); |
258 | QCOMPARE(propSpy.count(), 6); |
259 | |
260 | // Non multiples of 90 should not work |
261 | videoOutput->setProperty(name: "orientation" , value: QVariant(-1)); |
262 | QCOMPARE(videoOutput->property("orientation" ).toInt(), -180); |
263 | QCOMPARE(propSpy.count(), 6); |
264 | |
265 | delete videoOutput; |
266 | } |
267 | |
268 | void tst_QDeclarativeVideoOutput::surfaceSource() |
269 | { |
270 | QQmlComponent component(&m_engine); |
271 | component.loadUrl(url: QUrl("qrc:/main.qml" )); |
272 | |
273 | QObject *videoOutput = component.create(); |
274 | QVERIFY(videoOutput != 0); |
275 | |
276 | SurfaceHolder holder(this); |
277 | |
278 | QCOMPARE(holder.videoSurface(), static_cast<QAbstractVideoSurface*>(0)); |
279 | |
280 | videoOutput->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder))); |
281 | |
282 | QVERIFY(holder.videoSurface() != 0); |
283 | |
284 | // Now we could do things with the surface.. |
285 | const QList<QVideoFrame::PixelFormat> formats = holder.videoSurface()->supportedPixelFormats(); |
286 | QVERIFY(formats.count() > 0); |
287 | |
288 | // See if we can start and stop each pixel format (..) |
289 | for (QVideoFrame::PixelFormat format : formats) { |
290 | QVideoSurfaceFormat surfaceFormat(QSize(200,100), format); |
291 | QVERIFY(holder.videoSurface()->isFormatSupported(surfaceFormat)); // This does kind of depend on node factories |
292 | |
293 | QVERIFY(holder.videoSurface()->start(surfaceFormat)); |
294 | QVERIFY(holder.videoSurface()->surfaceFormat() == surfaceFormat); |
295 | QVERIFY(holder.videoSurface()->isActive()); |
296 | |
297 | holder.videoSurface()->stop(); |
298 | |
299 | QVERIFY(!holder.videoSurface()->isActive()); |
300 | } |
301 | |
302 | delete videoOutput; |
303 | |
304 | // This should clear the surface |
305 | QCOMPARE(holder.videoSurface(), static_cast<QAbstractVideoSurface*>(0)); |
306 | |
307 | // Also, creating two sources, setting them in order, and destroying the first |
308 | // should not zero holder.videoSurface() |
309 | videoOutput = component.create(); |
310 | videoOutput->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder))); |
311 | |
312 | QAbstractVideoSurface *surface = holder.videoSurface(); |
313 | QVERIFY(holder.videoSurface()); |
314 | |
315 | QObject *videoOutput2 = component.create(); |
316 | QVERIFY(videoOutput2); |
317 | videoOutput2->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder))); |
318 | QVERIFY(holder.videoSurface()); |
319 | QVERIFY(holder.videoSurface() != surface); // Surface should have changed |
320 | surface = holder.videoSurface(); |
321 | |
322 | // Now delete first one |
323 | delete videoOutput; |
324 | QVERIFY(holder.videoSurface()); |
325 | QVERIFY(holder.videoSurface() == surface); // Should not have changed surface |
326 | |
327 | // Now create a second surface and assign it as the source |
328 | // The old surface holder should be zeroed |
329 | SurfaceHolder holder2(this); |
330 | videoOutput2->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder2))); |
331 | |
332 | QCOMPARE(holder.videoSurface(), static_cast<QAbstractVideoSurface*>(0)); |
333 | QVERIFY(holder2.videoSurface() != 0); |
334 | |
335 | // Finally a combination - set the same source to two things, then assign a new source |
336 | // to the first output - should not reset the first source |
337 | videoOutput = component.create(); |
338 | videoOutput->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder2))); |
339 | |
340 | // Both vo and vo2 were pointed to holder2 - setting vo2 should not clear holder2 |
341 | QVERIFY(holder2.videoSurface() != 0); |
342 | QVERIFY(holder.videoSurface() == 0); |
343 | videoOutput2->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder))); |
344 | QVERIFY(holder2.videoSurface() != 0); |
345 | QVERIFY(holder.videoSurface() != 0); |
346 | |
347 | // They should also be independent |
348 | QVERIFY(holder.videoSurface() != holder2.videoSurface()); |
349 | |
350 | delete videoOutput; |
351 | delete videoOutput2; |
352 | } |
353 | |
354 | static const uchar rgb32ImageData[] = |
355 | {// B G R A |
356 | 0x00, 0x01, 0x02, 0xff, 0x03, 0x04, 0x05, 0xff, |
357 | 0x06, 0x07, 0x08, 0xff, 0x09, 0x0a, 0x0b, 0xff |
358 | }; |
359 | |
360 | void tst_QDeclarativeVideoOutput::paintSurface() |
361 | { |
362 | QQuickView window; |
363 | window.setSource(QUrl("qrc:/main.qml" )); |
364 | window.show(); |
365 | QVERIFY(QTest::qWaitForWindowExposed(&window)); |
366 | |
367 | auto videoOutput = qobject_cast<QDeclarativeVideoOutput *>(object: window.rootObject()); |
368 | QVERIFY(videoOutput); |
369 | |
370 | auto surface = videoOutput->property(name: "videoSurface" ).value<QAbstractVideoSurface *>(); |
371 | QVERIFY(surface); |
372 | QVERIFY(!surface->isActive()); |
373 | videoOutput->setSize(QSize(2, 2)); |
374 | QVideoSurfaceFormat format(QSize(2, 2), QVideoFrame::Format_RGB32); |
375 | QVERIFY(surface->isFormatSupported(format)); |
376 | QVERIFY(surface->start(format)); |
377 | QVERIFY(surface->isActive()); |
378 | |
379 | QImage img(rgb32ImageData, 2, 2, 8, QImage::Format_RGB32); |
380 | QVERIFY(surface->present(img)); |
381 | |
382 | if (QGuiApplication::platformName() == QLatin1String("offscreen" ) |
383 | || QGuiApplication::platformName() == QLatin1String("minimal" )) |
384 | return; |
385 | |
386 | QImage capture = window.grabWindow(); |
387 | QCOMPARE(capture.pixelColor(0, 0), QColor(rgb32ImageData[2], rgb32ImageData[1], rgb32ImageData[0], rgb32ImageData[3])); |
388 | QCOMPARE(capture.pixelColor(1, 0), QColor(rgb32ImageData[6], rgb32ImageData[5], rgb32ImageData[4], rgb32ImageData[7])); |
389 | QCOMPARE(capture.pixelColor(0, 1), QColor(rgb32ImageData[10], rgb32ImageData[9], rgb32ImageData[8], rgb32ImageData[11])); |
390 | QCOMPARE(capture.pixelColor(1, 1), QColor(rgb32ImageData[14], rgb32ImageData[13], rgb32ImageData[12], rgb32ImageData[15])); |
391 | } |
392 | |
393 | void tst_QDeclarativeVideoOutput::sourceRect() |
394 | { |
395 | QQmlComponent component(&m_engine); |
396 | component.loadUrl(url: QUrl("qrc:/main.qml" )); |
397 | |
398 | QObject *videoOutput = component.create(); |
399 | QVERIFY(videoOutput != 0); |
400 | |
401 | SurfaceHolder holder(this); |
402 | |
403 | QSignalSpy propSpy(videoOutput, SIGNAL(sourceRectChanged())); |
404 | |
405 | videoOutput->setProperty(name: "source" , value: QVariant::fromValue(value: static_cast<QObject*>(&holder))); |
406 | |
407 | QRectF invalid(0,0,-1,-1); |
408 | |
409 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), invalid); |
410 | |
411 | holder.presentDummyFrame(size: QSize(200,100)); |
412 | |
413 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
414 | QCOMPARE(propSpy.count(), 1); |
415 | |
416 | // Another frame shouldn't cause a source rect change |
417 | holder.presentDummyFrame(size: QSize(200,100)); |
418 | QCOMPARE(propSpy.count(), 1); |
419 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
420 | |
421 | // Changing orientation and stretch modes should not affect this |
422 | videoOutput->setProperty(name: "orientation" , value: QVariant(90)); |
423 | updateOutputGeometry(output: videoOutput); |
424 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
425 | |
426 | videoOutput->setProperty(name: "orientation" , value: QVariant(180)); |
427 | updateOutputGeometry(output: videoOutput); |
428 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
429 | |
430 | videoOutput->setProperty(name: "orientation" , value: QVariant(270)); |
431 | updateOutputGeometry(output: videoOutput); |
432 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
433 | |
434 | videoOutput->setProperty(name: "orientation" , value: QVariant(-90)); |
435 | updateOutputGeometry(output: videoOutput); |
436 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
437 | |
438 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::PreserveAspectCrop))); |
439 | updateOutputGeometry(output: videoOutput); |
440 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
441 | |
442 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::Stretch))); |
443 | updateOutputGeometry(output: videoOutput); |
444 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
445 | |
446 | videoOutput->setProperty(name: "fillMode" , value: QVariant(int(QDeclarativeVideoOutput::Stretch))); |
447 | updateOutputGeometry(output: videoOutput); |
448 | QCOMPARE(videoOutput->property("sourceRect" ).toRectF(), QRectF(0, 0, 200, 100)); |
449 | |
450 | delete videoOutput; |
451 | } |
452 | |
453 | void tst_QDeclarativeVideoOutput::mappingPoint() |
454 | { |
455 | QFETCH(QPointF, point); |
456 | QFETCH(int, orientation); |
457 | QFETCH(QDeclarativeVideoOutput::FillMode, fillMode); |
458 | QFETCH(QPointF, expected); |
459 | |
460 | QVERIFY(m_mappingOutput); |
461 | m_mappingOutput->setProperty(name: "orientation" , value: QVariant(orientation)); |
462 | m_mappingOutput->setProperty(name: "fillMode" , value: QVariant::fromValue(value: fillMode)); |
463 | |
464 | updateOutputGeometry(output: m_mappingOutput); |
465 | |
466 | QPointF output = invokeP2P(object: m_mappingOutput, signature: "mapPointToItem" , point); |
467 | QPointF reverse = invokeP2P(object: m_mappingOutput, signature: "mapPointToSource" , point: output); |
468 | |
469 | QCOMPARE(output, expected); |
470 | QCOMPARE(reverse, point); |
471 | |
472 | // Now the normalized versions |
473 | // Source rectangle is 200x100 |
474 | QPointF normal(point.x() / 200, point.y() / 100); |
475 | |
476 | output = invokeP2P(object: m_mappingOutput, signature: "mapNormalizedPointToItem" , point: normal); |
477 | reverse = invokeP2P(object: m_mappingOutput, signature: "mapPointToSourceNormalized" , point: output); |
478 | |
479 | QCOMPARE(output, expected); |
480 | QCOMPARE(reverse, normal); |
481 | } |
482 | |
483 | void tst_QDeclarativeVideoOutput::mappingPoint_data() |
484 | { |
485 | QTest::addColumn<QPointF>(name: "point" ); |
486 | QTest::addColumn<int>(name: "orientation" ); |
487 | QTest::addColumn<QDeclarativeVideoOutput::FillMode>(name: "fillMode" ); |
488 | QTest::addColumn<QPointF>(name: "expected" ); |
489 | |
490 | QDeclarativeVideoOutput::FillMode stretch = QDeclarativeVideoOutput::Stretch; |
491 | QDeclarativeVideoOutput::FillMode fit = QDeclarativeVideoOutput::PreserveAspectFit; |
492 | QDeclarativeVideoOutput::FillMode crop = QDeclarativeVideoOutput::PreserveAspectCrop; |
493 | |
494 | // First make sure the component has processed the frame |
495 | QCOMPARE(m_mappingOutput->property("sourceRect" ).toRectF(), QRectF(0,0,200,100)); |
496 | |
497 | // 200x100 -> 150,100 stretch, 150x75 fit @ 12.5f, 200x100 @-25,0 crop |
498 | |
499 | // Corners, then the center, then a point in the middle somewhere |
500 | QTest::newRow(dataTag: "s0-0" ) << QPointF(0,0) << 0 << stretch << QPointF(0,0); |
501 | QTest::newRow(dataTag: "s1-0" ) << QPointF(200,0) << 0 << stretch << QPointF(150,0); |
502 | QTest::newRow(dataTag: "s2-0" ) << QPointF(0,100) << 0 << stretch << QPointF(0,100); |
503 | QTest::newRow(dataTag: "s3-0" ) << QPointF(200,100) << 0 << stretch << QPointF(150,100); |
504 | QTest::newRow(dataTag: "s4-0" ) << QPointF(100,50) << 0 << stretch << QPointF(75,50); |
505 | QTest::newRow(dataTag: "s5-0" ) << QPointF(40,80) << 0 << stretch << QPointF(30,80); |
506 | |
507 | QTest::newRow(dataTag: "f0-0" ) << QPointF(0,0) << 0 << fit << QPointF(0,12.5f); |
508 | QTest::newRow(dataTag: "f1-0" ) << QPointF(200,0) << 0 << fit << QPointF(150,12.5f); |
509 | QTest::newRow(dataTag: "f2-0" ) << QPointF(0,100) << 0 << fit << QPointF(0,87.5f); |
510 | QTest::newRow(dataTag: "f3-0" ) << QPointF(200,100) << 0 << fit << QPointF(150,87.5f); |
511 | QTest::newRow(dataTag: "f4-0" ) << QPointF(100,50) << 0 << stretch << QPointF(75,50); |
512 | QTest::newRow(dataTag: "f5-0" ) << QPointF(40,80) << 0 << stretch << QPointF(30,80); |
513 | |
514 | QTest::newRow(dataTag: "c0-0" ) << QPointF(0,0) << 0 << crop << QPointF(-25,0); |
515 | QTest::newRow(dataTag: "c1-0" ) << QPointF(200,0) << 0 << crop << QPointF(175,0); |
516 | QTest::newRow(dataTag: "c2-0" ) << QPointF(0,100) << 0 << crop << QPointF(-25,100); |
517 | QTest::newRow(dataTag: "c3-0" ) << QPointF(200,100) << 0 << crop << QPointF(175,100); |
518 | QTest::newRow(dataTag: "c4-0" ) << QPointF(100,50) << 0 << stretch << QPointF(75,50); |
519 | QTest::newRow(dataTag: "c5-0" ) << QPointF(40,80) << 0 << stretch << QPointF(30,80); |
520 | |
521 | // 90 degrees (anti clockwise) |
522 | QTest::newRow(dataTag: "s0-90" ) << QPointF(0,0) << 90 << stretch << QPointF(0,100); |
523 | QTest::newRow(dataTag: "s1-90" ) << QPointF(200,0) << 90 << stretch << QPointF(0,0); |
524 | QTest::newRow(dataTag: "s2-90" ) << QPointF(0,100) << 90 << stretch << QPointF(150,100); |
525 | QTest::newRow(dataTag: "s3-90" ) << QPointF(200,100) << 90 << stretch << QPointF(150,0); |
526 | QTest::newRow(dataTag: "s4-90" ) << QPointF(100,50) << 90 << stretch << QPointF(75,50); |
527 | QTest::newRow(dataTag: "s5-90" ) << QPointF(40,80) << 90 << stretch << QPointF(120,80); |
528 | |
529 | QTest::newRow(dataTag: "f0-90" ) << QPointF(0,0) << 90 << fit << QPointF(50,100); |
530 | QTest::newRow(dataTag: "f1-90" ) << QPointF(200,0) << 90 << fit << QPointF(50,0); |
531 | QTest::newRow(dataTag: "f2-90" ) << QPointF(0,100) << 90 << fit << QPointF(100,100); |
532 | QTest::newRow(dataTag: "f3-90" ) << QPointF(200,100) << 90 << fit << QPointF(100,0); |
533 | QTest::newRow(dataTag: "f4-90" ) << QPointF(100,50) << 90 << fit << QPointF(75,50); |
534 | QTest::newRow(dataTag: "f5-90" ) << QPointF(40,80) << 90 << fit << QPointF(90,80); |
535 | |
536 | QTest::newRow(dataTag: "c0-90" ) << QPointF(0,0) << 90 << crop << QPointF(0,200); |
537 | QTest::newRow(dataTag: "c1-90" ) << QPointF(200,0) << 90 << crop << QPointF(0,-100); |
538 | QTest::newRow(dataTag: "c2-90" ) << QPointF(0,100) << 90 << crop << QPointF(150,200); |
539 | QTest::newRow(dataTag: "c3-90" ) << QPointF(200,100) << 90 << crop << QPointF(150,-100); |
540 | QTest::newRow(dataTag: "c4-90" ) << QPointF(100,50) << 90 << crop << QPointF(75,50); |
541 | QTest::newRow(dataTag: "c5-90" ) << QPointF(40,80) << 90 << crop << QPointF(120,140); |
542 | |
543 | // 180 |
544 | QTest::newRow(dataTag: "s0-180" ) << QPointF(0,0) << 180 << stretch << QPointF(150,100); |
545 | QTest::newRow(dataTag: "s1-180" ) << QPointF(200,0) << 180 << stretch << QPointF(0,100); |
546 | QTest::newRow(dataTag: "s2-180" ) << QPointF(0,100) << 180 << stretch << QPointF(150,0); |
547 | QTest::newRow(dataTag: "s3-180" ) << QPointF(200,100) << 180 << stretch << QPointF(0,0); |
548 | QTest::newRow(dataTag: "s4-180" ) << QPointF(100,50) << 180 << stretch << QPointF(75,50); |
549 | QTest::newRow(dataTag: "s5-180" ) << QPointF(40,80) << 180 << stretch << QPointF(120,20); |
550 | |
551 | QTest::newRow(dataTag: "f0-180" ) << QPointF(0,0) << 180 << fit << QPointF(150,87.5f); |
552 | QTest::newRow(dataTag: "f1-180" ) << QPointF(200,0) << 180 << fit << QPointF(0,87.5f); |
553 | QTest::newRow(dataTag: "f2-180" ) << QPointF(0,100) << 180 << fit << QPointF(150,12.5f); |
554 | QTest::newRow(dataTag: "f3-180" ) << QPointF(200,100) << 180 << fit << QPointF(0,12.5f); |
555 | QTest::newRow(dataTag: "f4-180" ) << QPointF(100,50) << 180 << fit << QPointF(75,50); |
556 | QTest::newRow(dataTag: "f5-180" ) << QPointF(40,80) << 180 << fit << QPointF(120,27.5f); |
557 | |
558 | QTest::newRow(dataTag: "c0-180" ) << QPointF(0,0) << 180 << crop << QPointF(175,100); |
559 | QTest::newRow(dataTag: "c1-180" ) << QPointF(200,0) << 180 << crop << QPointF(-25,100); |
560 | QTest::newRow(dataTag: "c2-180" ) << QPointF(0,100) << 180 << crop << QPointF(175,0); |
561 | QTest::newRow(dataTag: "c3-180" ) << QPointF(200,100) << 180 << crop << QPointF(-25,0); |
562 | QTest::newRow(dataTag: "c4-180" ) << QPointF(100,50) << 180 << crop << QPointF(75,50); |
563 | QTest::newRow(dataTag: "c5-180" ) << QPointF(40,80) << 180 << crop << QPointF(135,20); |
564 | |
565 | // 270 |
566 | QTest::newRow(dataTag: "s0-270" ) << QPointF(0,0) << 270 << stretch << QPointF(150,0); |
567 | QTest::newRow(dataTag: "s1-270" ) << QPointF(200,0) << 270 << stretch << QPointF(150,100); |
568 | QTest::newRow(dataTag: "s2-270" ) << QPointF(0,100) << 270 << stretch << QPointF(0,0); |
569 | QTest::newRow(dataTag: "s3-270" ) << QPointF(200,100) << 270 << stretch << QPointF(0,100); |
570 | QTest::newRow(dataTag: "s4-270" ) << QPointF(100,50) << 270 << stretch << QPointF(75,50); |
571 | QTest::newRow(dataTag: "s5-270" ) << QPointF(40,80) << 270 << stretch << QPointF(30,20); |
572 | |
573 | QTest::newRow(dataTag: "f0-270" ) << QPointF(0,0) << 270 << fit << QPointF(100,0); |
574 | QTest::newRow(dataTag: "f1-270" ) << QPointF(200,0) << 270 << fit << QPointF(100,100); |
575 | QTest::newRow(dataTag: "f2-270" ) << QPointF(0,100) << 270 << fit << QPointF(50,0); |
576 | QTest::newRow(dataTag: "f3-270" ) << QPointF(200,100) << 270 << fit << QPointF(50,100); |
577 | QTest::newRow(dataTag: "f4-270" ) << QPointF(100,50) << 270 << fit << QPointF(75,50); |
578 | QTest::newRow(dataTag: "f5-270" ) << QPointF(40,80) << 270 << fit << QPointF(60,20); |
579 | |
580 | QTest::newRow(dataTag: "c0-270" ) << QPointF(0,0) << 270 << crop << QPointF(150,-100); |
581 | QTest::newRow(dataTag: "c1-270" ) << QPointF(200,0) << 270 << crop << QPointF(150,200); |
582 | QTest::newRow(dataTag: "c2-270" ) << QPointF(0,100) << 270 << crop << QPointF(0,-100); |
583 | QTest::newRow(dataTag: "c3-270" ) << QPointF(200,100) << 270 << crop << QPointF(0,200); |
584 | QTest::newRow(dataTag: "c4-270" ) << QPointF(100,50) << 270 << crop << QPointF(75,50); |
585 | QTest::newRow(dataTag: "c5-270" ) << QPointF(40,80) << 270 << crop << QPointF(30,-40); |
586 | } |
587 | |
588 | /* Test all rectangle mapping */ |
589 | void tst_QDeclarativeVideoOutput::mappingRect() |
590 | { |
591 | QFETCH(QRectF, rect); |
592 | QFETCH(int, orientation); |
593 | QFETCH(QDeclarativeVideoOutput::FillMode, fillMode); |
594 | QFETCH(QRectF, expected); |
595 | |
596 | QVERIFY(m_mappingOutput); |
597 | m_mappingOutput->setProperty(name: "orientation" , value: QVariant(orientation)); |
598 | m_mappingOutput->setProperty(name: "fillMode" , value: QVariant::fromValue(value: fillMode)); |
599 | |
600 | updateOutputGeometry(output: m_mappingOutput); |
601 | |
602 | QRectF output = invokeR2R(object: m_mappingOutput, signature: "mapRectToItem" , rect); |
603 | QRectF reverse = invokeR2R(object: m_mappingOutput, signature: "mapRectToSource" , rect: output); |
604 | |
605 | QCOMPARE(output, expected); |
606 | QCOMPARE(reverse, rect); |
607 | |
608 | // Now the normalized versions |
609 | // Source rectangle is 200x100 |
610 | QRectF normal(rect.x() / 200, rect.y() / 100, rect.width() / 200, rect.height() / 100); |
611 | |
612 | output = invokeR2R(object: m_mappingOutput, signature: "mapNormalizedRectToItem" , rect: normal); |
613 | reverse = invokeR2R(object: m_mappingOutput, signature: "mapRectToSourceNormalized" , rect: output); |
614 | |
615 | QCOMPARE(output, expected); |
616 | QCOMPARE(reverse, normal); |
617 | } |
618 | |
619 | void tst_QDeclarativeVideoOutput::mappingRect_data() |
620 | { |
621 | QTest::addColumn<QRectF>(name: "rect" ); |
622 | QTest::addColumn<int>(name: "orientation" ); |
623 | QTest::addColumn<QDeclarativeVideoOutput::FillMode>(name: "fillMode" ); |
624 | QTest::addColumn<QRectF>(name: "expected" ); |
625 | |
626 | // First make sure the component has processed the frame |
627 | QCOMPARE(m_mappingOutput->property("sourceRect" ).toRectF(), QRectF(0,0,200,100)); |
628 | |
629 | QDeclarativeVideoOutput::FillMode stretch = QDeclarativeVideoOutput::Stretch; |
630 | QDeclarativeVideoOutput::FillMode fit = QDeclarativeVideoOutput::PreserveAspectFit; |
631 | QDeclarativeVideoOutput::FillMode crop = QDeclarativeVideoOutput::PreserveAspectCrop; |
632 | |
633 | // Full rectangle mapping |
634 | // Stretch |
635 | QTest::newRow(dataTag: "s0" ) << QRectF(0,0, 200, 100) << 0 << stretch << QRectF(0,0,150,100); |
636 | QTest::newRow(dataTag: "s90" ) << QRectF(0,0, 200, 100) << 90 << stretch << QRectF(0,0,150,100); |
637 | QTest::newRow(dataTag: "s180" ) << QRectF(0,0, 200, 100) << 180 << stretch << QRectF(0,0,150,100); |
638 | QTest::newRow(dataTag: "s270" ) << QRectF(0,0, 200, 100) << 270 << stretch << QRectF(0,0,150,100); |
639 | |
640 | // Fit |
641 | QTest::newRow(dataTag: "f0" ) << QRectF(0,0, 200, 100) << 0 << fit << QRectF(0,12.5f,150,75); |
642 | QTest::newRow(dataTag: "f90" ) << QRectF(0,0, 200, 100) << 90 << fit << QRectF(50,0,50,100); |
643 | QTest::newRow(dataTag: "f180" ) << QRectF(0,0, 200, 100) << 180 << fit << QRectF(0,12.5f,150,75); |
644 | QTest::newRow(dataTag: "f270" ) << QRectF(0,0, 200, 100) << 270 << fit << QRectF(50,0,50,100); |
645 | |
646 | // Crop |
647 | QTest::newRow(dataTag: "c0" ) << QRectF(0,0, 200, 100) << 0 << crop << QRectF(-25,0,200,100); |
648 | QTest::newRow(dataTag: "c90" ) << QRectF(0,0, 200, 100) << 90 << crop << QRectF(0,-100,150,300); |
649 | QTest::newRow(dataTag: "c180" ) << QRectF(0,0, 200, 100) << 180 << crop << QRectF(-25,0,200,100); |
650 | QTest::newRow(dataTag: "c270" ) << QRectF(0,0, 200, 100) << 270 << crop << QRectF(0,-100,150,300); |
651 | |
652 | // Partial rectangle mapping |
653 | // Stretch |
654 | // 50-130 in x (0.25 - 0.65), 25-50 (0.25 - 0.5) in y (out of 200, 100) -> 150x100 |
655 | QTest::newRow(dataTag: "p-s0" ) << QRectF(50, 25, 80, 25) << 0 << stretch << QRectF(37.5f,25,60,25); |
656 | QTest::newRow(dataTag: "p-s90" ) << QRectF(50, 25, 80, 25) << 90 << stretch << QRectF(37.5f,35,37.5f,40); |
657 | QTest::newRow(dataTag: "p-s180" ) << QRectF(50, 25, 80, 25) << 180 << stretch << QRectF(52.5f,50,60,25); |
658 | QTest::newRow(dataTag: "p-s270" ) << QRectF(50, 25, 80, 25) << 270 << stretch << QRectF(75,25,37.5f,40); |
659 | |
660 | // Fit |
661 | QTest::newRow(dataTag: "p-f0" ) << QRectF(50, 25, 80, 25) << 0 << fit << QRectF(37.5f,31.25f,60,18.75f); |
662 | QTest::newRow(dataTag: "p-f90" ) << QRectF(50, 25, 80, 25) << 90 << fit << QRectF(62.5f,35,12.5f,40); |
663 | QTest::newRow(dataTag: "p-f180" ) << QRectF(50, 25, 80, 25) << 180 << fit << QRectF(52.5f,50,60,18.75f); |
664 | QTest::newRow(dataTag: "p-f270" ) << QRectF(50, 25, 80, 25) << 270 << fit << QRectF(75,25,12.5f,40); |
665 | |
666 | // Crop |
667 | QTest::newRow(dataTag: "p-c0" ) << QRectF(50, 25, 80, 25) << 0 << crop << QRectF(25,25,80,25); |
668 | QTest::newRow(dataTag: "p-c90" ) << QRectF(50, 25, 80, 25) << 90 << crop << QRectF(37.5f,5,37.5f,120); |
669 | QTest::newRow(dataTag: "p-c180" ) << QRectF(50, 25, 80, 25) << 180 << crop << QRectF(45,50,80,25); |
670 | QTest::newRow(dataTag: "p-c270" ) << QRectF(50, 25, 80, 25) << 270 << crop << QRectF(75,-25,37.5f,120); |
671 | } |
672 | |
673 | void tst_QDeclarativeVideoOutput::updateOutputGeometry(QObject *output) |
674 | { |
675 | // Since the object isn't visible, update() doesn't do anything |
676 | // so we manually force this |
677 | QMetaObject::invokeMethod(obj: output, member: "_q_updateGeometry" ); |
678 | } |
679 | |
680 | void tst_QDeclarativeVideoOutput::contentRect() |
681 | { |
682 | QFETCH(int, orientation); |
683 | QFETCH(QDeclarativeVideoOutput::FillMode, fillMode); |
684 | QFETCH(QRectF, expected); |
685 | |
686 | QVERIFY(m_mappingOutput); |
687 | m_mappingOutput->setProperty(name: "orientation" , value: QVariant(orientation)); |
688 | m_mappingOutput->setProperty(name: "fillMode" , value: QVariant::fromValue(value: fillMode)); |
689 | |
690 | updateOutputGeometry(output: m_mappingOutput); |
691 | |
692 | QRectF output = m_mappingOutput->property(name: "contentRect" ).toRectF(); |
693 | QCOMPARE(output, expected); |
694 | } |
695 | |
696 | void tst_QDeclarativeVideoOutput::contentRect_data() |
697 | { |
698 | QTest::addColumn<int>(name: "orientation" ); |
699 | QTest::addColumn<QDeclarativeVideoOutput::FillMode>(name: "fillMode" ); |
700 | QTest::addColumn<QRectF>(name: "expected" ); |
701 | |
702 | // First make sure the component has processed the frame |
703 | QCOMPARE(m_mappingOutput->property("sourceRect" ).toRectF(), QRectF(0,0,200,100)); |
704 | |
705 | QDeclarativeVideoOutput::FillMode stretch = QDeclarativeVideoOutput::Stretch; |
706 | QDeclarativeVideoOutput::FillMode fit = QDeclarativeVideoOutput::PreserveAspectFit; |
707 | QDeclarativeVideoOutput::FillMode crop = QDeclarativeVideoOutput::PreserveAspectCrop; |
708 | |
709 | // Stretch just keeps the full render rect regardless of orientation |
710 | QTest::newRow(dataTag: "s0" ) << 0 << stretch << QRectF(0,0,150,100); |
711 | QTest::newRow(dataTag: "s90" ) << 90 << stretch << QRectF(0,0,150,100); |
712 | QTest::newRow(dataTag: "s180" ) << 180 << stretch << QRectF(0,0,150,100); |
713 | QTest::newRow(dataTag: "s270" ) << 270 << stretch << QRectF(0,0,150,100); |
714 | |
715 | // Fit depends on orientation |
716 | // Source is 200x100, fitting in 150x100 -> 150x75 |
717 | // or 100x200 -> 50x100 |
718 | QTest::newRow(dataTag: "f0" ) << 0 << fit << QRectF(0,12.5f,150,75); |
719 | QTest::newRow(dataTag: "f90" ) << 90 << fit << QRectF(50,0,50,100); |
720 | QTest::newRow(dataTag: "f180" ) << 180 << fit << QRectF(0,12.5,150,75); |
721 | QTest::newRow(dataTag: "f270" ) << 270 << fit << QRectF(50,0,50,100); |
722 | |
723 | // Crop also depends on orientation, may go outside render rect |
724 | // 200x100 -> -25,0 200x100 |
725 | // 100x200 -> 0,-100 150x300 |
726 | QTest::newRow(dataTag: "c0" ) << 0 << crop << QRectF(-25,0,200,100); |
727 | QTest::newRow(dataTag: "c90" ) << 90 << crop << QRectF(0,-100,150,300); |
728 | QTest::newRow(dataTag: "c180" ) << 180 << crop << QRectF(-25,0,200,100); |
729 | QTest::newRow(dataTag: "c270" ) << 270 << crop << QRectF(0,-100,150,300); |
730 | } |
731 | |
732 | |
733 | QRectF tst_QDeclarativeVideoOutput::invokeR2R(QObject *object, const char *signature, const QRectF &rect) |
734 | { |
735 | QRectF r; |
736 | QMetaObject::invokeMethod(obj: object, member: signature, Q_RETURN_ARG(QRectF, r), Q_ARG(QRectF, rect)); |
737 | return r; |
738 | } |
739 | |
740 | QPointF tst_QDeclarativeVideoOutput::invokeP2P(QObject *object, const char *signature, const QPointF &point) |
741 | { |
742 | QPointF p; |
743 | QMetaObject::invokeMethod(obj: object, member: signature, Q_RETURN_ARG(QPointF, p), Q_ARG(QPointF, point)); |
744 | return p; |
745 | } |
746 | |
747 | |
748 | QTEST_MAIN(tst_QDeclarativeVideoOutput) |
749 | |
750 | #include "tst_qdeclarativevideooutput.moc" |
751 | |