1 | // Copyright (C) 2021 The Qt Company Ltd. |
2 | // SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only |
3 | |
4 | #include "qvideowindow_p.h" |
5 | #include <QPlatformSurfaceEvent> |
6 | #include <qfile.h> |
7 | #include <qpainter.h> |
8 | #include <private/qguiapplication_p.h> |
9 | #include <private/qmemoryvideobuffer_p.h> |
10 | #include <qpa/qplatformintegration.h> |
11 | |
12 | QT_BEGIN_NAMESPACE |
13 | |
14 | static QSurface::SurfaceType platformSurfaceType() |
15 | { |
16 | #if defined(Q_OS_DARWIN) |
17 | return QSurface::MetalSurface; |
18 | #elif defined (Q_OS_WIN) |
19 | return QSurface::Direct3DSurface; |
20 | #endif |
21 | |
22 | auto *integration = QGuiApplicationPrivate::platformIntegration(); |
23 | |
24 | if (!integration->hasCapability(cap: QPlatformIntegration::OpenGL)) |
25 | return QSurface::RasterSurface; |
26 | |
27 | if (QCoreApplication::testAttribute(attribute: Qt::AA_ForceRasterWidgets)) |
28 | return QSurface::RasterSurface; |
29 | |
30 | if (integration->hasCapability(cap: QPlatformIntegration::RasterGLSurface)) |
31 | return QSurface::RasterGLSurface; |
32 | |
33 | return QSurface::OpenGLSurface; |
34 | } |
35 | |
36 | QVideoWindowPrivate::QVideoWindowPrivate(QVideoWindow *q) |
37 | : q(q), |
38 | m_sink(new QVideoSink) |
39 | { |
40 | Q_ASSERT(q); |
41 | |
42 | if (QGuiApplicationPrivate::platformIntegration()->hasCapability(cap: QPlatformIntegration::RhiBasedRendering)) { |
43 | auto surfaceType = ::platformSurfaceType(); |
44 | q->setSurfaceType(surfaceType); |
45 | switch (surfaceType) { |
46 | case QSurface::RasterSurface: |
47 | case QSurface::OpenVGSurface: |
48 | // can't use those surfaces, need to render in SW |
49 | m_graphicsApi = QRhi::Null; |
50 | break; |
51 | case QSurface::OpenGLSurface: |
52 | case QSurface::RasterGLSurface: |
53 | m_graphicsApi = QRhi::OpenGLES2; |
54 | break; |
55 | case QSurface::VulkanSurface: |
56 | m_graphicsApi = QRhi::Vulkan; |
57 | break; |
58 | case QSurface::MetalSurface: |
59 | m_graphicsApi = QRhi::Metal; |
60 | break; |
61 | case QSurface::Direct3DSurface: |
62 | m_graphicsApi = QRhi::D3D11; |
63 | break; |
64 | } |
65 | } |
66 | |
67 | QObject::connect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, context: q, slot: &QVideoWindow::setVideoFrame); |
68 | } |
69 | |
70 | QVideoWindowPrivate::~QVideoWindowPrivate() |
71 | { |
72 | QObject::disconnect(sender: m_sink.get(), signal: &QVideoSink::videoFrameChanged, |
73 | receiver: q, slot: &QVideoWindow::setVideoFrame); |
74 | } |
75 | |
76 | static const float g_vw_quad[] = { |
77 | // 4 clockwise rotation of texture vertexes (the second pair) |
78 | // Rotation 0 |
79 | -1.f, -1.f, 0.f, 0.f, |
80 | -1.f, 1.f, 0.f, 1.f, |
81 | 1.f, -1.f, 1.f, 0.f, |
82 | 1.f, 1.f, 1.f, 1.f, |
83 | // Rotation 90 |
84 | -1.f, -1.f, 0.f, 1.f, |
85 | -1.f, 1.f, 1.f, 1.f, |
86 | 1.f, -1.f, 0.f, 0.f, |
87 | 1.f, 1.f, 1.f, 0.f, |
88 | |
89 | // Rotation 180 |
90 | -1.f, -1.f, 1.f, 1.f, |
91 | -1.f, 1.f, 1.f, 0.f, |
92 | 1.f, -1.f, 0.f, 1.f, |
93 | 1.f, 1.f, 0.f, 0.f, |
94 | // Rotation 270 |
95 | -1.f, -1.f, 1.f, 0.f, |
96 | -1.f, 1.f, 0.f, 0.f, |
97 | 1.f, -1.f, 1.f, 1.f, |
98 | 1.f, 1.f, 0.f, 1.f |
99 | }; |
100 | |
101 | static QShader vwGetShader(const QString &name) |
102 | { |
103 | QFile f(name); |
104 | if (f.open(flags: QIODevice::ReadOnly)) |
105 | return QShader::fromSerialized(data: f.readAll()); |
106 | |
107 | return QShader(); |
108 | } |
109 | |
110 | void QVideoWindowPrivate::initRhi() |
111 | { |
112 | if (m_graphicsApi == QRhi::Null) |
113 | return; |
114 | |
115 | QRhi::Flags rhiFlags = {};//QRhi::EnableDebugMarkers | QRhi::EnableProfiling; |
116 | |
117 | #if QT_CONFIG(opengl) |
118 | if (m_graphicsApi == QRhi::OpenGLES2) { |
119 | m_fallbackSurface.reset(p: QRhiGles2InitParams::newFallbackSurface(format: q->format())); |
120 | QRhiGles2InitParams params; |
121 | params.fallbackSurface = m_fallbackSurface.get(); |
122 | params.window = q; |
123 | params.format = q->format(); |
124 | m_rhi.reset(p: QRhi::create(impl: QRhi::OpenGLES2, params: ¶ms, flags: rhiFlags)); |
125 | } |
126 | #endif |
127 | |
128 | #if QT_CONFIG(vulkan) |
129 | if (m_graphicsApi == QRhi::Vulkan) { |
130 | QRhiVulkanInitParams params; |
131 | params.inst = q->vulkanInstance(); |
132 | params.window = q; |
133 | m_rhi.reset(p: QRhi::create(impl: QRhi::Vulkan, params: ¶ms, flags: rhiFlags)); |
134 | } |
135 | #endif |
136 | |
137 | #ifdef Q_OS_WIN |
138 | if (m_graphicsApi == QRhi::D3D11) { |
139 | QRhiD3D11InitParams params; |
140 | params.enableDebugLayer = true; |
141 | m_rhi.reset(QRhi::create(QRhi::D3D11, ¶ms, rhiFlags)); |
142 | } |
143 | #endif |
144 | |
145 | #if defined(Q_OS_MACOS) || defined(Q_OS_IOS) |
146 | if (m_graphicsApi == QRhi::Metal) { |
147 | QRhiMetalInitParams params; |
148 | m_rhi.reset(QRhi::create(QRhi::Metal, ¶ms, rhiFlags)); |
149 | } |
150 | #endif |
151 | if (!m_rhi) |
152 | return; |
153 | |
154 | m_swapChain.reset(p: m_rhi->newSwapChain()); |
155 | m_swapChain->setWindow(q); |
156 | if (m_swapChain->isFormatSupported(f: QRhiSwapChain::HDRExtendedSrgbLinear)) |
157 | m_swapChain->setFormat(QRhiSwapChain::HDRExtendedSrgbLinear); |
158 | m_renderPass.reset(p: m_swapChain->newCompatibleRenderPassDescriptor()); |
159 | m_swapChain->setRenderPassDescriptor(m_renderPass.get()); |
160 | |
161 | m_vertexBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Immutable, usage: QRhiBuffer::VertexBuffer, size: sizeof(g_vw_quad))); |
162 | m_vertexBuf->create(); |
163 | m_vertexBufReady = false; |
164 | |
165 | m_uniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); |
166 | m_uniformBuf->create(); |
167 | |
168 | m_textureSampler.reset(p: m_rhi->newSampler(magFilter: QRhiSampler::Linear, minFilter: QRhiSampler::Linear, mipmapMode: QRhiSampler::None, |
169 | addressU: QRhiSampler::ClampToEdge, addressV: QRhiSampler::ClampToEdge)); |
170 | m_textureSampler->create(); |
171 | |
172 | m_shaderResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); |
173 | m_subtitleResourceBindings.reset(p: m_rhi->newShaderResourceBindings()); |
174 | |
175 | m_subtitleUniformBuf.reset(p: m_rhi->newBuffer(type: QRhiBuffer::Dynamic, usage: QRhiBuffer::UniformBuffer, size: sizeof(QVideoTextureHelper::UniformData))); |
176 | m_subtitleUniformBuf->create(); |
177 | |
178 | Q_ASSERT(NVideoFrameSlots >= m_rhi->resourceLimit(QRhi::FramesInFlight)); |
179 | } |
180 | |
181 | void QVideoWindowPrivate::setupGraphicsPipeline(QRhiGraphicsPipeline *pipeline, QRhiShaderResourceBindings *bindings, const QVideoFrameFormat &fmt) |
182 | { |
183 | |
184 | pipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip); |
185 | QShader vs = vwGetShader(name: QVideoTextureHelper::vertexShaderFileName(format: fmt)); |
186 | Q_ASSERT(vs.isValid()); |
187 | QShader fs = vwGetShader(name: QVideoTextureHelper::fragmentShaderFileName(format: fmt, surfaceFormat: m_swapChain->format())); |
188 | Q_ASSERT(fs.isValid()); |
189 | pipeline->setShaderStages({ |
190 | { QRhiShaderStage::Vertex, vs }, |
191 | { QRhiShaderStage::Fragment, fs } |
192 | }); |
193 | QRhiVertexInputLayout inputLayout; |
194 | inputLayout.setBindings({ |
195 | { 4 * sizeof(float) } |
196 | }); |
197 | inputLayout.setAttributes({ |
198 | { 0, 0, QRhiVertexInputAttribute::Float2, 0 }, |
199 | { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) } |
200 | }); |
201 | pipeline->setVertexInputLayout(inputLayout); |
202 | pipeline->setShaderResourceBindings(bindings); |
203 | pipeline->setRenderPassDescriptor(m_renderPass.get()); |
204 | pipeline->create(); |
205 | } |
206 | |
207 | void QVideoWindowPrivate::updateTextures(QRhiResourceUpdateBatch *rub) |
208 | { |
209 | m_texturesDirty = false; |
210 | |
211 | // We render a 1x1 black pixel when we don't have a video |
212 | if (!m_currentFrame.isValid()) |
213 | m_currentFrame = QVideoFrame(new QMemoryVideoBuffer(QByteArray{4, 0}, 4), |
214 | QVideoFrameFormat(QSize(1,1), QVideoFrameFormat::Format_RGBA8888)); |
215 | |
216 | m_frameTextures = QVideoTextureHelper::createTextures(frame&: m_currentFrame, rhi: m_rhi.get(), rub, oldTextures: std::move(m_frameTextures)); |
217 | if (!m_frameTextures) |
218 | return; |
219 | |
220 | QRhiShaderResourceBinding bindings[4]; |
221 | auto *b = bindings; |
222 | *(b++) = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, |
223 | buf: m_uniformBuf.get()); |
224 | |
225 | auto fmt = m_currentFrame.surfaceFormat(); |
226 | auto textureDesc = QVideoTextureHelper::textureDescription(format: fmt.pixelFormat()); |
227 | |
228 | for (int i = 0; i < textureDesc->nplanes; ++i) |
229 | (*b++) = QRhiShaderResourceBinding::sampledTexture(binding: i + 1, stage: QRhiShaderResourceBinding::FragmentStage, |
230 | tex: m_frameTextures->texture(plane: i), sampler: m_textureSampler.get()); |
231 | m_shaderResourceBindings->setBindings(first: bindings, last: b); |
232 | m_shaderResourceBindings->create(); |
233 | |
234 | if (fmt != format) { |
235 | format = fmt; |
236 | if (!m_graphicsPipeline) |
237 | m_graphicsPipeline.reset(p: m_rhi->newGraphicsPipeline()); |
238 | |
239 | setupGraphicsPipeline(pipeline: m_graphicsPipeline.get(), bindings: m_shaderResourceBindings.get(), fmt: format); |
240 | } |
241 | } |
242 | |
243 | void QVideoWindowPrivate::updateSubtitle(QRhiResourceUpdateBatch *rub, const QSize &frameSize) |
244 | { |
245 | m_subtitleDirty = false; |
246 | m_hasSubtitle = !m_currentFrame.subtitleText().isEmpty(); |
247 | if (!m_hasSubtitle) |
248 | return; |
249 | |
250 | m_subtitleLayout.update(frameSize, text: m_currentFrame.subtitleText()); |
251 | QSize size = m_subtitleLayout.bounds.size().toSize(); |
252 | |
253 | QImage img = m_subtitleLayout.toImage(); |
254 | |
255 | m_subtitleTexture.reset(p: m_rhi->newTexture(format: QRhiTexture::RGBA8, pixelSize: size)); |
256 | m_subtitleTexture->create(); |
257 | rub->uploadTexture(tex: m_subtitleTexture.get(), image: img); |
258 | |
259 | QRhiShaderResourceBinding bindings[2]; |
260 | |
261 | bindings[0] = QRhiShaderResourceBinding::uniformBuffer(binding: 0, stage: QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage, |
262 | buf: m_subtitleUniformBuf.get()); |
263 | |
264 | bindings[1] = QRhiShaderResourceBinding::sampledTexture(binding: 1, stage: QRhiShaderResourceBinding::FragmentStage, |
265 | tex: m_subtitleTexture.get(), sampler: m_textureSampler.get()); |
266 | m_subtitleResourceBindings->setBindings(first: bindings, last: bindings + 2); |
267 | m_subtitleResourceBindings->create(); |
268 | |
269 | if (!m_subtitlePipeline) { |
270 | m_subtitlePipeline.reset(p: m_rhi->newGraphicsPipeline()); |
271 | |
272 | QRhiGraphicsPipeline::TargetBlend blend; |
273 | blend.enable = true; |
274 | m_subtitlePipeline->setTargetBlends({ blend }); |
275 | setupGraphicsPipeline(pipeline: m_subtitlePipeline.get(), bindings: m_subtitleResourceBindings.get(), fmt: QVideoFrameFormat(QSize(1, 1), QVideoFrameFormat::Format_RGBA8888)); |
276 | } |
277 | } |
278 | |
279 | void QVideoWindowPrivate::init() |
280 | { |
281 | if (initialized) |
282 | return; |
283 | initialized = true; |
284 | |
285 | initRhi(); |
286 | |
287 | if (!m_rhi) |
288 | backingStore = new QBackingStore(q); |
289 | else |
290 | m_sink->setRhi(m_rhi.get()); |
291 | } |
292 | |
293 | void QVideoWindowPrivate::resizeSwapChain() |
294 | { |
295 | m_hasSwapChain = m_swapChain->createOrResize(); |
296 | } |
297 | |
298 | void QVideoWindowPrivate::releaseSwapChain() |
299 | { |
300 | if (m_hasSwapChain) { |
301 | m_hasSwapChain = false; |
302 | m_swapChain->destroy(); |
303 | } |
304 | } |
305 | |
306 | void QVideoWindowPrivate::render() |
307 | { |
308 | if (!initialized) |
309 | init(); |
310 | |
311 | if (!q->isExposed() || !isExposed) |
312 | return; |
313 | |
314 | QRect rect(0, 0, q->width(), q->height()); |
315 | |
316 | if (backingStore) { |
317 | if (backingStore->size() != q->size()) |
318 | backingStore->resize(size: q->size()); |
319 | |
320 | backingStore->beginPaint(rect); |
321 | |
322 | QPaintDevice *device = backingStore->paintDevice(); |
323 | if (!device) |
324 | return; |
325 | QPainter painter(device); |
326 | |
327 | m_currentFrame.paint(painter: &painter, rect, options: { .backgroundColor: Qt::black, .aspectRatioMode: aspectRatioMode }); |
328 | painter.end(); |
329 | |
330 | backingStore->endPaint(); |
331 | backingStore->flush(region: rect); |
332 | return; |
333 | } |
334 | |
335 | int frameRotationIndex = (m_currentFrame.rotationAngle() / 90) % 4; |
336 | QSize frameSize = m_currentFrame.size(); |
337 | if (frameRotationIndex % 2) |
338 | frameSize.transpose(); |
339 | QSize scaled = frameSize.scaled(s: rect.size(), mode: aspectRatioMode); |
340 | QRect videoRect = QRect(QPoint(0, 0), scaled); |
341 | videoRect.moveCenter(p: rect.center()); |
342 | QRect subtitleRect = videoRect.intersected(other: rect); |
343 | |
344 | if (m_swapChain->currentPixelSize() != m_swapChain->surfacePixelSize()) |
345 | resizeSwapChain(); |
346 | |
347 | if (!m_hasSwapChain) |
348 | return; |
349 | |
350 | QRhi::FrameOpResult r = m_rhi->beginFrame(swapChain: m_swapChain.get()); |
351 | |
352 | // keep the video frames alive until we know that they are not needed anymore |
353 | m_videoFrameSlots[m_rhi->currentFrameSlot()] = m_currentFrame; |
354 | |
355 | if (r == QRhi::FrameOpSwapChainOutOfDate) { |
356 | resizeSwapChain(); |
357 | if (!m_hasSwapChain) |
358 | return; |
359 | r = m_rhi->beginFrame(swapChain: m_swapChain.get()); |
360 | } |
361 | if (r != QRhi::FrameOpSuccess) { |
362 | qWarning(msg: "beginFrame failed with %d, retry" , r); |
363 | q->requestUpdate(); |
364 | return; |
365 | } |
366 | |
367 | QRhiResourceUpdateBatch *rub = m_rhi->nextResourceUpdateBatch(); |
368 | |
369 | if (!m_vertexBufReady) { |
370 | m_vertexBufReady = true; |
371 | rub->uploadStaticBuffer(buf: m_vertexBuf.get(), data: g_vw_quad); |
372 | } |
373 | |
374 | if (m_texturesDirty) |
375 | updateTextures(rub); |
376 | |
377 | if (m_subtitleDirty || m_subtitleLayout.videoSize != subtitleRect.size()) |
378 | updateSubtitle(rub, frameSize: subtitleRect.size()); |
379 | |
380 | float mirrorFrame = m_currentFrame.mirrored() ? -1.f : 1.f; |
381 | float xscale = mirrorFrame * float(videoRect.width())/float(rect.width()); |
382 | float yscale = -1.f * float(videoRect.height())/float(rect.height()); |
383 | |
384 | QMatrix4x4 transform; |
385 | transform.scale(x: xscale, y: yscale); |
386 | |
387 | float maxNits = 100; |
388 | if (m_swapChain->format() == QRhiSwapChain::HDRExtendedSrgbLinear) { |
389 | auto info = m_swapChain->hdrInfo(); |
390 | if (info.limitsType == QRhiSwapChainHdrInfo::ColorComponentValue) |
391 | maxNits = 100 * info.limits.colorComponentValue.maxColorComponentValue; |
392 | else |
393 | maxNits = info.limits.luminanceInNits.maxLuminance; |
394 | } |
395 | |
396 | QByteArray uniformData; |
397 | QVideoTextureHelper::updateUniformData(dst: &uniformData, format: m_currentFrame.surfaceFormat(), frame: m_currentFrame, transform, opacity: 1.f, maxNits); |
398 | rub->updateDynamicBuffer(buf: m_uniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); |
399 | |
400 | if (m_hasSubtitle) { |
401 | QMatrix4x4 st; |
402 | st.translate(x: 0, y: -2.f * (float(m_subtitleLayout.bounds.center().y()) + float(subtitleRect.top()))/ float(rect.height()) + 1.f); |
403 | st.scale(x: float(m_subtitleLayout.bounds.width())/float(rect.width()), |
404 | y: -1.f * float(m_subtitleLayout.bounds.height())/float(rect.height())); |
405 | |
406 | QByteArray uniformData; |
407 | QVideoFrameFormat fmt(m_subtitleLayout.bounds.size().toSize(), QVideoFrameFormat::Format_ARGB8888); |
408 | QVideoTextureHelper::updateUniformData(dst: &uniformData, format: fmt, frame: QVideoFrame(), transform: st, opacity: 1.f); |
409 | rub->updateDynamicBuffer(buf: m_subtitleUniformBuf.get(), offset: 0, size: uniformData.size(), data: uniformData.constData()); |
410 | } |
411 | |
412 | QRhiCommandBuffer *cb = m_swapChain->currentFrameCommandBuffer(); |
413 | cb->beginPass(rt: m_swapChain->currentFrameRenderTarget(), colorClearValue: Qt::black, depthStencilClearValue: { 1.0f, 0 }, resourceUpdates: rub); |
414 | cb->setGraphicsPipeline(m_graphicsPipeline.get()); |
415 | auto size = m_swapChain->currentPixelSize(); |
416 | cb->setViewport({ 0, 0, float(size.width()), float(size.height()) }); |
417 | cb->setShaderResources(srb: m_shaderResourceBindings.get()); |
418 | |
419 | quint32 vertexOffset = quint32(sizeof(float)) * 16 * frameRotationIndex; |
420 | const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), vertexOffset); |
421 | cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); |
422 | cb->draw(vertexCount: 4); |
423 | |
424 | if (m_hasSubtitle) { |
425 | cb->setGraphicsPipeline(m_subtitlePipeline.get()); |
426 | cb->setShaderResources(srb: m_subtitleResourceBindings.get()); |
427 | const QRhiCommandBuffer::VertexInput vbufBinding(m_vertexBuf.get(), 0); |
428 | cb->setVertexInput(startBinding: 0, bindingCount: 1, bindings: &vbufBinding); |
429 | cb->draw(vertexCount: 4); |
430 | } |
431 | |
432 | cb->endPass(); |
433 | |
434 | m_rhi->endFrame(swapChain: m_swapChain.get()); |
435 | } |
436 | |
437 | /*! |
438 | \class QVideoWindow |
439 | \internal |
440 | */ |
441 | QVideoWindow::QVideoWindow(QScreen *screen) |
442 | : QWindow(screen) |
443 | , d(new QVideoWindowPrivate(this)) |
444 | { |
445 | } |
446 | |
447 | QVideoWindow::QVideoWindow(QWindow *parent) |
448 | : QWindow(parent) |
449 | , d(new QVideoWindowPrivate(this)) |
450 | { |
451 | } |
452 | |
453 | QVideoWindow::~QVideoWindow() = default; |
454 | |
455 | QVideoSink *QVideoWindow::videoSink() const |
456 | { |
457 | return d->m_sink.get(); |
458 | } |
459 | |
460 | Qt::AspectRatioMode QVideoWindow::aspectRatioMode() const |
461 | { |
462 | return d->aspectRatioMode; |
463 | } |
464 | |
465 | void QVideoWindow::setAspectRatioMode(Qt::AspectRatioMode mode) |
466 | { |
467 | if (d->aspectRatioMode == mode) |
468 | return; |
469 | d->aspectRatioMode = mode; |
470 | emit aspectRatioModeChanged(mode); |
471 | } |
472 | |
473 | bool QVideoWindow::event(QEvent *e) |
474 | { |
475 | switch (e->type()) { |
476 | case QEvent::UpdateRequest: |
477 | d->render(); |
478 | return true; |
479 | |
480 | case QEvent::PlatformSurface: |
481 | // this is the proper time to tear down the swapchain (while the native window and surface are still around) |
482 | if (static_cast<QPlatformSurfaceEvent *>(e)->surfaceEventType() == QPlatformSurfaceEvent::SurfaceAboutToBeDestroyed) { |
483 | d->releaseSwapChain(); |
484 | d->isExposed = false; |
485 | } |
486 | break; |
487 | case QEvent::Expose: |
488 | d->isExposed = isExposed(); |
489 | if (d->isExposed) |
490 | requestUpdate(); |
491 | return true; |
492 | |
493 | default: |
494 | break; |
495 | } |
496 | |
497 | return QWindow::event(e); |
498 | } |
499 | |
500 | void QVideoWindow::resizeEvent(QResizeEvent *resizeEvent) |
501 | { |
502 | if (!d->backingStore) |
503 | return; |
504 | if (!d->initialized) |
505 | d->init(); |
506 | d->backingStore->resize(size: resizeEvent->size()); |
507 | } |
508 | |
509 | void QVideoWindow::setVideoFrame(const QVideoFrame &frame) |
510 | { |
511 | if (d->m_currentFrame.subtitleText() != frame.subtitleText()) |
512 | d->m_subtitleDirty = true; |
513 | d->m_currentFrame = frame; |
514 | d->m_texturesDirty = true; |
515 | if (d->isExposed) |
516 | requestUpdate(); |
517 | } |
518 | |
519 | QT_END_NAMESPACE |
520 | |
521 | #include "moc_qvideowindow_p.cpp" |
522 | |