1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qvideowindow_p.h" |
5 | #include <QPlatformSurfaceEvent> |
6 | #include <qfile.h> |
7 | #include <qpainter.h> |
8 | #include <private/qguiapplication_p.h> |
9 | #include <private/qmemoryvideobuffer_p.h> |
10 | #include <private/qhwvideobuffer_p.h> |
11 | #include <private/qmultimediautils_p.h> |
12 | #include <private/qvideoframe_p.h> |
13 | #include <qpa/qplatformintegration.h> |
14 | |
15 | QT_BEGIN_NAMESPACE |
16 | |
17 | static QSurface::SurfaceType platformSurfaceType() |
18 | { |
19 | #if defined(Q_OS_DARWIN) |
20 | return QSurface::MetalSurface; |
21 | #elif defined (Q_OS_WIN) |
22 | return QSurface::Direct3DSurface; |
23 | #endif |
24 | |
25 | auto *integration = QGuiApplicationPrivate::platformIntegration(); |
26 | |
27 | if (!integration->hasCapability(cap: QPlatformIntegration::OpenGL)) |
28 | return QSurface::RasterSurface; |
29 | |
30 | if (QCoreApplication::testAttribute(attribute: Qt::AA_ForceRasterWidgets)) |
31 | return QSurface::RasterSurface; |
32 | |
33 | if (integration->hasCapability(cap: QPlatformIntegration::RasterGLSurface)) |
34 | return QSurface::RasterGLSurface; |
35 | |
36 | return QSurface::OpenGLSurface; |
37 | } |
38 | |
39 | QVideoWindowPrivate::QVideoWindowPrivate(QVideoWindow *q) |
40 | : q(q), |
41 | m_sink(new QVideoSink) |
42 | { |
43 | Q_ASSERT(q); |
44 | |
45 | if (QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::RhiBasedRendering)) { |
46 | auto surfaceType = ::platformSurfaceType(); |
47 | q->setSurfaceType(surfaceType); |
48 | switch (surfaceType) { |
49 | case QSurface::RasterSurface: |
50 | case QSurface::OpenVGSurface: |
51 | // can't use those surfaces, need to render in SW |
52 | m_graphicsApi = QRhi::Null; |
53 | break; |
54 | case QSurface::OpenGLSurface: |
55 | case QSurface::RasterGLSurface: |
56 | m_graphicsApi = QRhi::OpenGLES2; |
57 | break; |
58 | case QSurface::VulkanSurface: |
59 | m_graphicsApi = QRhi::Vulkan; |
60 | break; |
61 | case QSurface::MetalSurface: |
62 | m_graphicsApi = QRhi::Metal; |
63 | break; |
64 | case QSurface::Direct3DSurface: |
65 | m_graphicsApi = QRhi::D3D11; |
66 | break; |
67 | } |
68 | } |
69 | |
70 | QObject::connect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, context: q, slot: &QVideoWindow::setVideoFrame); |
71 | } |
72 | |
73 | QVideoWindowPrivate::~QVideoWindowPrivate() |
74 | { |
75 | QObject::disconnect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, |
76 | receiver: q, slot: &QVideoWindow::setVideoFrame); |
77 | } |
78 | |
79 | static const float g_vw_quad[] = { |
80 | // 4 clockwise rotation of texture vertexes (the second pair) |
81 | // Rotation 0 |
82 | -1.f, -1.f, 0.f, 0.f, |
83 | -1.f, 1.f, 0.f, 1.f, |
84 | 1.f, -1.f, 1.f, 0.f, |
85 | 1.f, 1.f, 1.f, 1.f, |
86 | // Rotation 90 |
87 | -1.f, -1.f, 0.f, 1.f, |
88 | -1.f, 1.f, 1.f, 1.f, |
89 | 1.f, -1.f, 0.f, 0.f, |
90 | 1.f, 1.f, 1.f, 0.f, |
91 | |
92 | // Rotation 180 |
93 | -1.f, -1.f, 1.f, 1.f, |
94 | -1.f, 1.f, 1.f, 0.f, |
95 | 1.f, -1.f, 0.f, 1.f, |
96 | 1.f, 1.f, 0.f, 0.f, |
97 | // Rotation 270 |
98 | -1.f, -1.f, 1.f, 0.f, |
99 | -1.f, 1.f, 0.f, 0.f, |
100 | 1.f, -1.f, 1.f, 1.f, |
101 | 1.f, 1.f, 0.f, 1.f |
102 | }; |
103 | |
104 | static QShader vwGetShader(const QString &name) |
105 | { |
106 | QFile f(name); |
107 | if (f.open(flags: QIODevice::ReadOnly)) |
108 | return QShader::fromSerialized(data: f.readAll()); |
109 | |
110 | return QShader(); |
111 | } |
112 | |
113 | void QVideoWindowPrivate::initRhi() |
114 | { |
115 | if (m_graphicsApi == QRhi::Null) |
116 | return; |
117 | |
118 | QRhi::Flags rhiFlags = {};//QRhi::EnableDebugMarkers | QRhi::EnableProfiling; |
119 | |
120 | #if QT_CONFIG(opengl) |
121 | if (m_graphicsApi == QRhi::OpenGLES2) { |
122 | m_fallbackSurface.reset(p: QRhiGles2InitParams::newFallbackSurface(format: q->format())); |
123 | QRhiGles2InitParams params; |
124 | params.fallbackSurface = m_fallbackSurface.get(); |
125 | params.window = q; |
126 | params.format = q->format(); |
127 | m_rhi.reset(p: QRhi::create(impl: QRhi::OpenGLES2, params: ¶ms, flags: rhiFlags)); |
128 | } |
129 | #endif |
130 | |
131 | #if QT_CONFIG(vulkan) |
132 | if (m_graphicsApi == QRhi::Vulkan) { |
133 | QRhiVulkanInitParams params; |
134 | params.inst = q->vulkanInstance(); |
135 | params.window = q; |
136 | m_rhi.reset(p: QRhi::create(impl: QRhi::Vulkan, params: ¶ms, flags: rhiFlags)); |
137 | } |
138 | #endif |
139 | |
140 | #ifdef Q_OS_WIN |
141 | if (m_graphicsApi == QRhi::D3D11) { |
142 | QRhiD3D11InitParams params; |
143 | params.enableDebugLayer = true; |
144 | m_rhi.reset(QRhi::create(QRhi::D3D11, ¶ms, rhiFlags)); |
145 | } |
146 | #endif |
147 | |
148 | #if defined(Q_OS_MACOS) || defined(Q_OS_IOS) |
149 | if (m_graphicsApi == QRhi::Metal) { |
150 | QRhiMetalInitParams params; |
151 | m_rhi.reset(QRhi::create(QRhi::Metal, ¶ms, rhiFlags)); |
152 | } |
153 | #endif |
154 | if (!m_rhi) |
155 | return; |
156 | |
157 | m_swapChain.reset(p: m_rhi->newSwapChain()); |
158 | m_swapChain->setWindow(q); |
159 | m_renderPass.reset(p: m_swapChain->newCompatibleRenderPassDescriptor()); |
160 | m_swapChain->setRenderPassDescriptor(m_renderPass.get()); |
161 | |
162 | m_vertexBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Immutable, usage: QRhiBuffer::VertexBuffer, size: sizeof(g_vw_quad))); |
163 | m_vertexBuf->create(); |
164 | m_vertexBufReady = false; |
165 | |
166 | m_uniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); |
167 | m_uniformBuf->create(); |
168 | |
169 | m_textureSampler.reset(p: m_rhi->newSampler(magFilter: QRhiSampler::Linear, minFilter: QRhiSampler::Linear, mipmapMode: QRhiSampler::None, |
170 | addressU: QRhiSampler::ClampToEdge, addressV: QRhiSampler::ClampToEdge)); |
171 | m_textureSampler->create(); |
172 | |
173 | m_shaderResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); |
174 | m_subtitleResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); |
175 | |
176 | m_subtitleUniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); |
177 | m_subtitleUniformBuf->create(); |
178 | |
179 | Q_ASSERT(NVideoFrameSlots >= m_rhi->resourceLimit(QRhi::FramesInFlight)); |
180 | } |
181 | |
182 | void QVideoWindowPrivate::setupGraphicsPipeline(QRhiGraphicsPipeline *pipeline, QRhiShaderResourceBindings *bindings, const QVideoFrameFormat &fmt) |
183 | { |
184 | |
185 | pipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip); |
186 | QShader vs = vwGetShader(name: QVideoTextureHelper::vertexShaderFileName(format: fmt)); |
187 | Q_ASSERT(vs.isValid()); |
188 | QShader fs = vwGetShader(name: QVideoTextureHelper::fragmentShaderFileName(format: fmt, surfaceFormat: m_swapChain->format())); |
189 | Q_ASSERT(fs.isValid()); |
190 | pipeline->setShaderStages({ |
191 | { QRhiShaderStage::Vertex, vs }, |
192 | { QRhiShaderStage::Fragment, fs } |
193 | }); |
194 | QRhiVertexInputLayout inputLayout; |
195 | inputLayout.setBindings({ |
196 | { 4 * sizeof(float) } |
197 | }); |
198 | inputLayout.setAttributes({ |
199 | { 0, 0, QRhiVertexInputAttribute::Float2, 0 }, |
200 | { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) } |
201 | }); |
202 | pipeline->setVertexInputLayout(inputLayout); |
203 | pipeline->setShaderResourceBindings(bindings); |
204 | pipeline->setRenderPassDescriptor(m_renderPass.get()); |
205 | pipeline->create(); |
206 | } |
207 | |
208 | void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub) |
209 | { |
210 | m_texturesDirty = false; |
211 | |
212 | // We render a 1x1 black pixel when we don't have a video |
213 | if (!m_currentFrame.isValid()) |
214 | m_currentFrame = QVideoFramePrivate::createFrame( |
215 | buffer: std::make_unique<QMemoryVideoBuffer>(args: QByteArray{ 4, 0 }, args: 4), |
216 | format: QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888)); |
217 | |
218 | m_frameTextures = QVideoTextureHelper::createTextures(frame&: m_currentFrame, rhi: m_rhi.get(), rub, oldTextures: std::move(m_frameTextures)); |
219 | if (!m_frameTextures) |
220 | return; |
221 | |
222 | QRhiShaderResourceBinding bindings[4]; |
223 | auto *b = bindings; |
224 | *(b++) = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, |
225 | buf: m_uniformBuf.get()); |
226 | |
227 | auto fmt = m_currentFrame.surfaceFormat(); |
228 | auto textureDesc = QVideoTextureHelper::textureDescription(format: fmt.pixelFormat()); |
229 | |
230 | for (int i = 0; i < textureDesc->nplanes; ++i) |
231 | (*b++) = QRhiShaderResourceBinding::sampledTexture(binding: i + 1, stage: QRhiShaderResourceBinding::FragmentStage, |
232 | tex: m_frameTextures->texture(plane: i), sampler: m_textureSampler.get()); |
233 | m_shaderResourceBindings->setBindings(first: bindings, last: b); |
234 | m_shaderResourceBindings->create(); |
235 | |
236 | if (fmt != format) { |
237 | format = fmt; |
238 | if (!m_graphicsPipeline) |
239 | m_graphicsPipeline.reset(p: m_rhi->newGraphicsPipeline()); |
240 | |
241 | setupGraphicsPipeline(pipeline: m_graphicsPipeline.get(), bindings: m_shaderResourceBindings.get(), fmt: format); |
242 | } |
243 | } |
244 | |
245 | void QVideoWindowPrivate::updateSubtitle(QRhiResourceUpdateBatch *rub, const QSize &frameSize) |
246 | { |
247 | m_subtitleDirty = false; |
248 | m_hasSubtitle = !m_currentFrame.subtitleText().isEmpty(); |
249 | if (!m_hasSubtitle) |
250 | return; |
251 | |
252 | m_subtitleLayout.update(frameSize, text: m_currentFrame.subtitleText()); |
253 | QSize size = m_subtitleLayout.bounds.size().toSize(); |
254 | |
255 | QImage img = m_subtitleLayout.toImage(); |
256 | |
257 | m_subtitleTexture.reset(p: m_rhi->newTexture(format: QRhiTexture::RGBA8, pixelSize: size)); |
258 | m_subtitleTexture->create(); |
259 | rub->uploadTexture(tex: m_subtitleTexture.get(), image: img); |
260 | |
261 | QRhiShaderResourceBinding bindings[2]; |
262 | |
263 | bindings[0] = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, |
264 | buf: m_subtitleUniformBuf.get()); |
265 | |
266 | bindings[1] = QRhiShaderResourceBinding::sampledTexture(binding: 1, stage: QRhiShaderResourceBinding::FragmentStage, |
267 | tex: m_subtitleTexture.get(), sampler: m_textureSampler.get()); |
268 | m_subtitleResourceBindings->setBindings(first: bindings, last: bindings + 2); |
269 | m_subtitleResourceBindings->create(); |
270 | |
271 | if (!m_subtitlePipeline) { |
272 | m_subtitlePipeline.reset(p: m_rhi->newGraphicsPipeline()); |
273 | |
274 | QRhiGraphicsPipeline::TargetBlend blend; |
275 | blend.enable = true; |
276 | m_subtitlePipeline->setTargetBlends({ blend }); |
277 | setupGraphicsPipeline(pipeline: m_subtitlePipeline.get(), bindings: m_subtitleResourceBindings.get(), fmt: QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888)); |
278 | } |
279 | } |
280 | |
281 | void QVideoWindowPrivate::init() |
282 | { |
283 | if (initialized) |
284 | return; |
285 | initialized = true; |
286 | |
287 | initRhi(); |
288 | |
289 | if (!m_rhi) |
290 | backingStore = new QBackingStore(q); |
291 | else |
292 | m_sink->setRhi(m_rhi.get()); |
293 | } |
294 | |
295 | void QVideoWindowPrivate::resizeSwapChain() |
296 | { |
297 | m_hasSwapChain = m_swapChain->createOrResize(); |
298 | } |
299 | |
300 | void QVideoWindowPrivate::releaseSwapChain() |
301 | { |
302 | if (m_hasSwapChain) { |
303 | m_hasSwapChain = false; |
304 | m_swapChain->destroy(); |
305 | } |
306 | } |
307 | |
308 | void QVideoWindowPrivate::render() |
309 | { |
310 | if (!initialized) |
311 | init(); |
312 | |
313 | if (!q->isExposed() || !isExposed) |
314 | return; |
315 | |
316 | QRect rect(0, 0, q->width(), q->height()); |
317 | |
318 | if (backingStore) { |
319 | if (backingStore->size() != q->size()) |
320 | backingStore->resize(size: q->size()); |
321 | |
322 | backingStore->beginPaint(rect); |
323 | |
324 | QPaintDevice *device = backingStore->paintDevice(); |
325 | if (!device) |
326 | return; |
327 | QPainter painter(device); |
328 | |
329 | m_currentFrame.paint(painter: &painter, rect, options: { .backgroundColor: Qt::black, .aspectRatioMode: aspectRatioMode }); |
330 | painter.end(); |
331 | |
332 | backingStore->endPaint(); |
333 | backingStore->flush(region: rect); |
334 | return; |
335 | } |
336 | |
337 | const VideoTransformation frameTransformation = |
338 | qNormalizedFrameTransformation(frame: m_currentFrame.surfaceFormat()); |
339 | const QSize frameSize = qRotatedFramePresentationSize(frame: m_currentFrame); |
340 | const QSize scaled = frameSize.scaled(s: rect.size(), mode: aspectRatioMode); |
341 | QRect videoRect = QRect(QPoint(0, 0), scaled); |
342 | videoRect.moveCenter(p: rect.center()); |
343 | QRect subtitleRect = videoRect.intersected(other: rect); |
344 | |
345 | if (!m_hasSwapChain || (m_swapChain->currentPixelSize() != m_swapChain->surfacePixelSize())) |
346 | resizeSwapChain(); |
347 | |
348 | const auto requiredSwapChainFormat = |
349 | qGetRequiredSwapChainFormat(format: m_currentFrame.surfaceFormat()); |
350 | if (qShouldUpdateSwapChainFormat(swapChain: m_swapChain.get(), requiredSwapChainFormat)) { |
351 | releaseSwapChain(); |
352 | m_swapChain->setFormat(requiredSwapChainFormat); |
353 | resizeSwapChain(); |
354 | } |
355 | |
356 | if (!m_hasSwapChain) |
357 | return; |
358 | |
359 | QRhi::FrameOpResult r = m_rhi->beginFrame(swapChain: m_swapChain.get()); |
360 | |
361 | // keep the video frames alive until we know that they are not needed anymore |
362 | m_videoFrameSlots[m_rhi->currentFrameSlot()] = m_currentFrame; |
363 | |
364 | if (r == QRhi::FrameOpSwapChainOutOfDate) { |
365 | resizeSwapChain(); |
366 | if (!m_hasSwapChain) |
367 | return; |
368 | r = m_rhi->beginFrame(swapChain: m_swapChain.get()); |
369 | } |
370 | if (r != QRhi::FrameOpSuccess) { |
371 | qWarning(msg: "beginFrame failed with %d, retry" , r); |
372 | q->requestUpdate(); |
373 | return; |
374 | } |
375 | |
376 | QRhiResourceUpdateBatch *rub = m_rhi->nextResourceUpdateBatch(); |
377 | |
378 | if (!m_vertexBufReady) { |
379 | m_vertexBufReady = true; |
380 | rub->uploadStaticBuffer(buf: m_vertexBuf.get(), data: g_vw_quad); |
381 | } |
382 | |
383 | if (m_texturesDirty) |
384 | updateTextures(rub); |
385 | |
386 | if (m_subtitleDirty || m_subtitleLayout.videoSize != subtitleRect.size()) |
387 | updateSubtitle(rub, frameSize: subtitleRect.size()); |
388 | |
389 | const float mirrorFrame = frameTransformation.mirrorredHorizontallyAfterRotation ? -1.f : 1.f; |
390 | const float xscale = mirrorFrame * float(videoRect.width()) / float(rect.width()); |
391 | const float yscale = -1.f * float(videoRect.height()) / float(rect.height()); |
392 | |
393 | QMatrix4x4 transform; |
394 | transform.scale(x: xscale, y: yscale); |
395 | |
396 | float maxNits = 100; |
397 | if (m_swapChain->format() == QRhiSwapChain::HDRExtendedSrgbLinear) { |
398 | auto info = m_swapChain->hdrInfo(); |
399 | if (info.limitsType == QRhiSwapChainHdrInfo::ColorComponentValue) |
400 | maxNits = 100 * info.limits.colorComponentValue.maxColorComponentValue; |
401 | else |
402 | maxNits = info.limits.luminanceInNits.maxLuminance; |
403 | } |
404 | |
405 | QByteArray uniformData; |
406 | QVideoTextureHelper::updateUniformData(dst: &uniformData, format: m_currentFrame.surfaceFormat(), frame: m_currentFrame, transform, opacity: 1.f, maxNits); |
407 | rub->updateDynamicBuffer(buf: m_uniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); |
408 | |
409 | if (m_hasSubtitle) { |
410 | QMatrix4x4 st; |
411 | st.translate(x: 0, y: -2.f * (float(m_subtitleLayout.bounds.center().y()) + float(subtitleRect.top()))/ float(rect.height()) + 1.f); |
412 | st.scale(x: float(m_subtitleLayout.bounds.width())/float(rect.width()), |
413 | y: -1.f * float(m_subtitleLayout.bounds.height())/float(rect.height())); |
414 | |
415 | QByteArray uniformData; |
416 | QVideoFrameFormat fmt(m_subtitleLayout.bounds.size().toSize(), QVideoFrameFormat::Format_ARGB8888); |
417 | QVideoTextureHelper::updateUniformData(dst: &uniformData, format: fmt, frame: QVideoFrame(), transform: st, opacity: 1.f); |
418 | rub->updateDynamicBuffer(buf: m_subtitleUniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); |
419 | } |
420 | |
421 | QRhiCommandBuffer *cb = m_swapChain->currentFrameCommandBuffer(); |
422 | cb->beginPass(rt: m_swapChain->currentFrameRenderTarget(), colorClearValue: Qt::black, depthStencilClearValue: { 1.0f, 0 }, resourceUpdates: rub); |
423 | cb->setGraphicsPipeline(m_graphicsPipeline.get()); |
424 | auto size = m_swapChain->currentPixelSize(); |
425 | cb->setViewport({ 0, 0, float(size.width()), float(size.height()) }); |
426 | cb->setShaderResources(srb: m_shaderResourceBindings.get()); |
427 | |
428 | const quint32 vertexOffset = quint32(sizeof(float)) * 16 * frameTransformation.rotationIndex(); |
429 | const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), vertexOffset); |
430 | cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); |
431 | cb->draw(vertexCount: 4); |
432 | |
433 | if (m_hasSubtitle) { |
434 | cb->setGraphicsPipeline(m_subtitlePipeline.get()); |
435 | cb->setShaderResources(srb: m_subtitleResourceBindings.get()); |
436 | const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), 0); |
437 | cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); |
438 | cb->draw(vertexCount: 4); |
439 | } |
440 | |
441 | cb->endPass(); |
442 | |
443 | m_rhi->endFrame(swapChain: m_swapChain.get()); |
444 | } |
445 | |
446 | /*! |
447 | \class QVideoWindow |
448 | \internal |
449 | */ |
450 | QVideoWindow::QVideoWindow(QScreen *screen) |
451 | : QWindow(screen) |
452 | , d(new QVideoWindowPrivate(this)) |
453 | { |
454 | } |
455 | |
456 | QVideoWindow::QVideoWindow(QWindow *parent) |
457 | : QWindow(parent) |
458 | , d(new QVideoWindowPrivate(this)) |
459 | { |
460 | } |
461 | |
462 | QVideoWindow::~QVideoWindow() = default; |
463 | |
464 | QVideoSink *QVideoWindow::videoSink() const |
465 | { |
466 | return d->m_sink.get(); |
467 | } |
468 | |
469 | Qt::AspectRatioMode QVideoWindow::aspectRatioMode() const |
470 | { |
471 | return d->aspectRatioMode; |
472 | } |
473 | |
474 | void QVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode) |
475 | { |
476 | if (d->aspectRatioMode == mode) |
477 | return; |
478 | d->aspectRatioMode = mode; |
479 | emit aspectRatioModeChanged(mode); |
480 | } |
481 | |
482 | bool QVideoWindow::event(QEvent *e) |
483 | { |
484 | switch (e->type()) { |
485 | case QEvent::UpdateRequest: |
486 | d->render(); |
487 | return true; |
488 | |
489 | case QEvent::PlatformSurface: |
490 | // this is the proper time to tear down the swapchain (while the native window and surface are still around) |
491 | if (static_cast<QPlatformSurfaceEvent *>(e)->surfaceEventType() == QPlatformSurfaceEvent::SurfaceAboutToBeDestroyed) { |
492 | d->releaseSwapChain(); |
493 | d->isExposed = false; |
494 | } |
495 | break; |
496 | case QEvent::Expose: |
497 | d->isExposed = isExposed(); |
498 | if (d->isExposed) |
499 | d->render(); |
500 | return true; |
501 | |
502 | default: |
503 | break; |
504 | } |
505 | |
506 | return QWindow::event(e); |
507 | } |
508 | |
509 | void QVideoWindow::resizeEvent(QResizeEvent *resizeEvent) |
510 | { |
511 | if (!d->backingStore) |
512 | return; |
513 | if (!d->initialized) |
514 | d->init(); |
515 | d->backingStore->resize(size: resizeEvent->size()); |
516 | } |
517 | |
518 | void QVideoWindow::setVideoFrame(const QVideoFrame &frame) |
519 | { |
520 | if (d->m_currentFrame.subtitleText() != frame.subtitleText()) |
521 | d->m_subtitleDirty = true; |
522 | d->m_currentFrame = frame; |
523 | d->m_texturesDirty = true; |
524 | if (d->isExposed) |
525 | requestUpdate(); |
526 | } |
527 | |
528 | QT_END_NAMESPACE |
529 | |
530 | #include "moc_qvideowindow_p.cpp" |
531 | |