1// Copyright (C) 2020 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only
3
4#include "qquick3deffect_p.h"
5
6#include <ssg/qssgrendercontextcore.h>
7#include <QtQuick3DRuntimeRender/private/qssgrendereffect_p.h>
8#include <QtQuick3DRuntimeRender/private/qssgshadermaterialadapter_p.h>
9#include <QtQuick3DUtils/private/qssgutils_p.h>
10#include <QtQuick/qquickwindow.h>
11#include <QtQuick3D/private/qquick3dobject_p.h>
12#include <QtQuick3D/private/qquick3dscenemanager_p.h>
13#include <QtCore/qfile.h>
14#include <QtCore/qurl.h>
15
16
17QT_BEGIN_NAMESPACE
18
19/*!
20 \qmltype Effect
21 \inherits Object3D
22 \inqmlmodule QtQuick3D
23 \nativetype QQuick3DEffect
24 \brief Base component for creating a post-processing effect.
25
26 The Effect type allows the user to implement their own post-processing
27 effects for QtQuick3D.
28
29 \section1 Post-processing effects
30
31 A post-processing effect is conceptually very similar to Qt Quick's \l
32 ShaderEffect item. When an effect is present, the scene is rendered into a
33 separate texture first. The effect is then applied by drawing a textured
34 quad to the main render target, depending on the
35 \l{View3D::renderMode}{render mode} of the View3D. The effect can provide a
36 vertex shader, a fragment shader, or both. Effects are always applied on the
37 entire scene, per View3D.
38
39 Effects are associated with the \l SceneEnvironment in the
40 \l{SceneEnvironment::effects} property. The property is a list: effects can
41 be chained together; they are applied in the order they are in the list,
42 using the previous step's output as the input to the next one, with the last
43 effect's output defining the contents of the View3D.
44
45 \note \l SceneEnvironment and \l ExtendedSceneEnvironment provide a set of
46 built-in effects, such as depth of field, glow/bloom, lens flare, color
47 grading, and vignette. Always consider first if these are sufficient for
48 the application's needs, and prefer using the built-in facilities instead
49 of implementing a custom post-processing effect.
50
51 Effects are similar to \l{CustomMaterial}{custom materials} in many
52 ways. However, a custom material is associated with a model and is
53 responsible for the shading of that given mesh. Whereas an effect's vertex
54 shader always gets a quad (for example, two triangles) as its input, while
55 its fragment shader samples the texture with the scene's content.
56
57 Unlike custom materials, effects support multiple passes. For many effects
58 this it not necessary, and when there is a need to apply multiple effects,
59 identical results can often be achieved by chaining together multiple
60 effects in \l{SceneEnvironment::effects}{the SceneEnvironment}. This is
61 demonstrated by the \l{Qt Quick 3D - Custom Effect Example}{Custom Effect
62 example} as well. However, passes have the possibility to request additional
63 color buffers (texture), and specify which of these additional buffers they
64 output to. This allows implementing more complex image processing techniques
65 since subsequent passes can then use one or more of these additional
66 buffers, plus the original scene's content, as their input. If necessary,
67 these additional buffers can have an extended lifetime, meaning their
68 content is preserved between frames, which allows implementing effects that
69 rely on accumulating content from multiple frames, such as, motion blur.
70
71 When compared to Qt Quick's 2D ShaderEffect, the 3D post-processing effects
72 have the advantage of being able to work with depth buffer data, as well as
73 the ability to implement multiple passes with intermediate buffers. In
74 addition, the texture-related capabilities are extended: Qt Quick 3D allows
75 more fine-grained control over filtering modes, and allows effects to work
76 with texture formats other than RGBA8, for example, floating point formats.
77
78 \note Post-processing effects are currently available when the View3D
79 has its \l{View3D::renderMode}{renderMode} set to \c Offscreen,
80 \c Underlay or \c Overlay. Effects will not be rendered for \c Inline mode.
81
82 \note When using post-processing effects, the application-provided shaders
83 should expect linear color data without tonemapping applied. The
84 tonemapping that is performed during the main render pass (or during skybox
85 rendering, if there is a skybox) when
86 \l{SceneEnvironment::tonemapMode}{tonemapMode} is set to a value other than
87 \c SceneEnvironment.TonemapModeNone, is automatically disabled when there
88 is at least one post-processing effect specified in the SceneEnvironment.
89 The last effect in the chain (more precisely, the last pass of the last
90 effect in the chain) will automatically get its fragment shader amended to
91 perform the same tonemapping the main render pass would.
92
93 \note Effects that perform their own tonemapping should be used in a
94 SceneEnvironment that has the built-in tonemapping disabled by setting
95 \l{SceneEnvironment::tonemapMode}{tonemapMode} to \c
96 SceneEnvironment.TonemapModeNone.
97
98 \note By default the texture used as the effects' input is created with a
99 floating point texture format, such as 16-bit floating point RGBA. The
100 output texture's format is the same since by default it follows the input
101 format. This can be overridden using \l Buffer and an empty name. The
102 default RGBA16F is useful because it allows working with non-tonemapped
103 linear data without having the color values outside the 0-1 range clamped.
104
105 \section1 Exposing data to the shaders
106
107 Like with CustomMaterial or ShaderEffect, the dynamic properties of an
108 Effect object can be changed and animated using the usual QML and Qt Quick
109 facilities, and the values are exposed to the shaders automatically. The
110 following list shows how properties are mapped:
111
112 \list
113 \li bool, int, real -> bool, int, float
114 \li QColor, \l{QtQml::Qt::rgba()}{color} -> vec4, and the color gets
115 converted to linear, assuming sRGB space for the color value specified in
116 QML. The built-in Qt colors, such as \c{"green"} are in sRGB color space as
117 well, and the same conversion is performed for all color properties of
118 DefaultMaterial and PrincipledMaterial, so this behavior of Effect
119 matches those.
120 \li QRect, QRectF, \l{QtQml::Qt::rect()}{rect} -> vec4
121 \li QPoint, QPointF, \l{QtQml::Qt::point()}{point}, QSize, QSizeF, \l{QtQml::Qt::size()}{size} -> vec2
122 \li QVector2D, \l{QtQml::Qt::vector2d()}{vector2d} -> vec3
123 \li QVector3D, \l{QtQml::Qt::vector3d()}{vector3d} -> vec3
124 \li QVector4D, \l{QtQml::Qt::vector4d()}{vector4d} -> vec4
125 \li QMatrix4x4, \l{QtQml::Qt::matrix4x4()}{matrix4x4} -> mat4
126 \li QQuaternion, \l{QtQml::Qt::quaternion()}{quaternion} -> vec4, scalar value is \c w
127
128 \li TextureInput -> sampler2D or samplerCube, depending on whether \l
129 Texture or \l CubeMapTexture is used in the texture property of the
130 TextureInput. Setting the \l{TextureInput::enabled}{enabled} property to
131 false leads to exposing a dummy texture to the shader, meaning the shaders
132 are still functional but will sample a texture with opaque black image
133 content. Pay attention to the fact that properties for samplers must always
134 reference a \l TextureInput object, not a \l Texture directly. When it
135 comes to the \l Texture properties, the source, tiling, and filtering
136 related ones are the only ones that are taken into account implicitly with
137 effects, as the rest (such as, UV transformations) is up to the custom
138 shaders to implement as they see fit.
139
140 \endlist
141
142 \note When a uniform referenced in the shader code does not have a
143 corresponding property, it will cause a shader compilation error when
144 processing the effect at run time. There are some exceptions to this,
145 such as, sampler uniforms, that get a dummy texture bound when no
146 corresponding QML property is present, but as a general rule, all uniforms
147 and samplers must have a corresponding property declared in the
148 Effect object.
149
150 \section1 Getting started with user-defined effects
151
152 A custom post-processing effect involves at minimum an Effect object and a
153 fragment shader snippet. Some effects will also want a customized vertex
154 shader as well.
155
156 As a simple example, let's create an effect that combines the scene's
157 content with an image, while further altering the red channel's value in an
158 animated manner:
159
160 \table 70%
161 \row
162 \li \qml
163 Effect {
164 id: simpleEffect
165 property TextureInput tex: TextureInput {
166 texture: Texture { source: "image.png" }
167 }
168 property real redLevel
169 NumberAnimation on redLevel { from: 0; to: 1; duration: 5000; loops: -1 }
170 passes: Pass {
171 shaders: Shader {
172 stage: Shader.Fragment
173 shader: "effect.frag"
174 }
175 }
176 }
177 \endqml
178 \li \badcode
179 void MAIN()
180 {
181 vec4 c = texture(tex, TEXTURE_UV);
182 c.r *= redLevel;
183 FRAGCOLOR = c * texture(INPUT, INPUT_UV);
184 }
185 \endcode
186 \endtable
187
188 Here the texture with the image \c{image.png} is exposed to the shader under
189 the name \c tex. The value of redLevel is available in the shader in a \c
190 float uniform with the same name.
191
192 The fragment shader must contain a function called \c MAIN. The final
193 fragment color is determined by \c FRAGCOLOR. The main input texture, with
194 the contents of the View3D's scene, is accessible under a \c sampler2D with
195 the name \c INPUT. The UV coordinates from the quad are in \c
196 INPUT_UV. These UV values are always suitable for sampling \c INPUT,
197 regardless of the underlying graphics API at run time (and so regardless of
198 the Y axis direction in images since the necessary adjustments are applied
199 automatically by Qt Quick 3D). Sampling the texture with our external image
200 is done using \c TEXTURE_UV. \c INPUT_UV is not suitable in cross-platform
201 applications since V needs to be flipped to cater for the coordinate system
202 differences mentioned before, using a logic that is different for textures
203 based on images and textures used as render targets. Fortunately this is all
204 taken care of by the engine so the shader need no further logic for this.
205
206 Once simpleEffect is available, it can be associated with the effects list
207 of a the View3D's SceneEnvironment:
208
209 \qml
210 environment: SceneEnvironment {
211 effects: [ simpleEffect ]
212 }
213 \endqml
214
215 The results would look something like the following, with the original scene
216 on the left and with the effect applied on the right:
217
218 \table 70%
219 \row
220 \li \image effect_intro_1.png
221 \li \image effect_intro_2.png
222 \endtable
223
224 \note The \c shader property value in Shader is a URL, as is the custom in
225 QML and Qt Quick, referencing the file containing the shader snippet, and
226 works very similarly to ShaderEffect or
227 \l{Image::source}{Image.source}. Only the \c file and \c qrc schemes are
228 supported.. It is also possible to omit the \c file scheme, allowing to
229 specify a relative path in a convenient way. Such a path is resolved
230 relative to the component's (the \c{.qml} file's) location.
231
232 \note Shader code is always provided using Vulkan-style GLSL, regardless of
233 the graphics API used by Qt at run time.
234
235 \note The vertex and fragment shader code provided by the effect are not
236 full, complete GLSL shaders on their own. Rather, they provide a \c MAIN
237 function, and optionally a set of \c VARYING declarations, which are then
238 amended with further shader code by the engine.
239
240 \note The above example is not compatible with the optional multiview rendering mode that is used in some VR/AR applications.
241 To make it function both with and without multiview mode, change MAIN() like this:
242 \badcode
243 void MAIN()
244 {
245 vec4 c = texture(tex, TEXTURE_UV);
246 c.r *= redLevel;
247 #if QSHADER_VIEW_COUNT >= 2
248 FRAGCOLOR = c * texture(INPUT, vec3(INPUT_UV, VIEW_INDEX));
249 #else
250 FRAGCOLOR = c * texture(INPUT, INPUT_UV);
251 #endif
252 }
253 \endcode
254
255 \section1 Effects with vertex shaders
256
257 A vertex shader, when present, must provide a function called \c MAIN. In
258 the vast majority of cases the custom vertex shader will not want to provide
259 its own calculation of the homogenous vertex position, but it is possible
260 using \c POSITION, \c VERTEX, and \c MODELVIEWPROJECTION_MATRIX. When
261 \c POSITION is not present in the custom shader code, a statement equivalent to
262 \c{POSITION = MODELVIEWPROJECTION_MATRIX * vec4(VERTEX, 1.0);} will be
263 injected automatically by Qt Quick 3D.
264
265 To pass data between the vertex and fragment shaders, use the VARYING
266 keyword. Internally this will then be transformed into the appropriate
267 vertex output or fragment input declaration. The fragment shader can use the
268 same declaration, which then allows to read the interpolated value for the
269 current fragment.
270
271 Let's look at example, that is in effect very similar to the built-in
272 DistortionSpiral effect:
273
274 \table 70%
275 \row
276 \li \badcode
277 VARYING vec2 center_vec;
278 void MAIN()
279 {
280 center_vec = INPUT_UV - vec2(0.5, 0.5);
281 center_vec.y *= INPUT_SIZE.y / INPUT_SIZE.x;
282 }
283 \endcode
284 \li \badcode
285 VARYING vec2 center_vec;
286 void MAIN()
287 {
288 float radius = 0.25;
289 float dist_to_center = length(center_vec) / radius;
290 vec2 texcoord = INPUT_UV;
291 if (dist_to_center <= 1.0) {
292 float rotation_amount = (1.0 - dist_to_center) * (1.0 - dist_to_center);
293 float r = radians(360.0) * rotation_amount / 4.0;
294 mat2 rotation = mat2(cos(r), sin(r), -sin(r), cos(r));
295 texcoord = vec2(0.5, 0.5) + rotation * (INPUT_UV - vec2(0.5, 0.5));
296 }
297 FRAGCOLOR = texture(INPUT, texcoord);
298 }
299 \endcode
300 \endtable
301
302 The Effect object's \c passes list should now specify both the vertex and
303 fragment snippets:
304
305 \qml
306 passes: Pass {
307 shaders: [
308 Shader {
309 stage: Shader.Vertex
310 shader: "effect.vert"
311 },
312 Shader {
313 stage: Shader.Fragment
314 shader: "effect.frag"
315 }
316 ]
317 }
318 \endqml
319
320 The end result looks like the following:
321
322 \table 70%
323 \row
324 \li \image effect_intro_1.png
325 \li \image effect_intro_3.png
326 \endtable
327
328 \section1 Special keywords in effect shaders
329
330 \list
331
332 \li \c VARYING - Declares a vertex output or fragment input, depending on the type of the current shader.
333 \li \c MAIN - This function must always be present in an effect shader.
334 \li \c FRAGCOLOR - \c vec4 - The final fragment color; the output of the fragment shader. (fragment shader only)
335 \li \c POSITION - \c vec4 - The homogenous position calculated in the vertex shader. (vertex shader only)
336 \li \c MODELVIEWPROJECTION_MATRIX - \c mat4 - The transformation matrix for the screen quad.
337 \li \c VERTEX - \c vec3 - The vertices of the quad; the input to the vertex shader. (vertex shader only)
338
339 \li \c INPUT - \c sampler2D or \c sampler2DArray - The sampler for the input
340 texture with the scene rendered into it, unless a pass redirects its input
341 via a BufferInput object, in which case \c INPUT refers to the additional
342 color buffer's texture referenced by the BufferInput. With \l{Multiview
343 Rendering}{multiview rendering} enabled, which can be relevant for VR/AR
344 applications, this is a sampler2DArray, while the input texture becomes a 2D
345 texture array.
346
347 \li \c INPUT_UV - \c vec2 - UV coordinates for sampling \c INPUT.
348
349 \li \c TEXTURE_UV - \c vec2 - UV coordinates suitable for sampling a Texture
350 with contents loaded from an image file.
351
352 \li \c INPUT_SIZE - \c vec2 - The size of the \c INPUT texture, in pixels.
353
354 \li \c OUTPUT_SIZE - \c vec2 - The size of the output buffer, in
355 pixels. Often the same as \c INPUT_SIZE, unless the pass outputs to an extra
356 Buffer with a size multiplier on it.
357
358 \li \c FRAME - \c float - A frame counter, incremented after each frame in the View3D.
359
360 \li \c DEPTH_TEXTURE - \c sampler2D - A depth texture with the depth buffer
361 contents with the opaque objects in the scene. Like with CustomMaterial, the
362 presence of this keyword in the shader triggers generating the depth texture
363 automatically.
364
365 \li \c VIEW_INDEX - \c uint - With \l{Multiview Rendering}{multiview
366 rendering} enabled, this is the current view index, available in both vertex
367 and fragment shaders. Always 0 when multiview rendering is not used.
368
369 \endlist
370
371 \section1 Building multi-pass effects
372
373 A multi-pass effect often uses more than one set of shaders, and takes the
374 \l{Pass::output}{output} and \l{Pass::commands}{commands} properties into
375 use. Each entry in the passes list translates to a render pass drawing a
376 quad into the pass's output texture, while sampling the effect's input texture
377 and optionally other textures as well.
378
379 The typical outline of a multi-pass Effect can look like the following:
380
381 \qml
382 passes: [
383 Pass {
384 shaders: [
385 Shader {
386 stage: Shader.Vertex
387 shader: "pass1.vert"
388 },
389 Shader {
390 stage: Shader.Fragment
391 shader: "pass1.frag"
392 }
393 // This pass outputs to the intermediate texture described
394 // by the Buffer object.
395 output: intermediateColorBuffer
396 ],
397 },
398 Pass {
399 shaders: [
400 Shader {
401 stage: Shader.Vertex
402 shader: "pass2.vert"
403 },
404 Shader {
405 stage: Shader.Fragment
406 shader: "pass2.frag"
407 }
408 // The output of the last pass needs no redirection, it is
409 // the final result of the effect.
410 ],
411 commands: [
412 // This pass reads from the intermediate texture, meaning
413 // INPUT in the shader will refer to the texture associated
414 // with the Buffer.
415 BufferInput {
416 buffer: intermediateColorBuffer
417 }
418 ]
419 }
420 ]
421 \endqml
422
423 What is \c intermediateColorBuffer?
424
425 \qml
426 Buffer {
427 id: intermediateColorBuffer
428 name: "tempBuffer"
429 // format: Buffer.RGBA8
430 // textureFilterOperation: Buffer.Linear
431 // textureCoordOperation: Buffer.ClampToEdge
432 }
433 \endqml
434
435 The commented properties are not necessary if the desired values match the
436 defaults.
437
438 Internally the presence of this Buffer object and referencing it from the \c
439 output property of a Pass leads to creating a texture with a size matching
440 the View3D, and so the size of the implicit input and output textures. When
441 this is not desired, the \l{Buffer::sizeMultiplier}{sizeMultiplier} property
442 can be used to get an intermediate texture with a different size. This can
443 lead to the \c INPUT_SIZE and \c OUTPUT_SIZE uniforms in the shader having
444 different values.
445
446 By default the Effect cannot count on textures preserving their contents
447 between frames. When a new intermediate texture is created, it is cleared to
448 \c{vec4(0.0)}. Afterwards, the same texture can be reused for another
449 purpose. Therefore, effect passes should always write to the entire texture,
450 without making assumptions about their content at the start of the pass.
451 There is an exception to this: Buffer objects with
452 \l{Buffer::bufferFlags}{bufferFlags} set to Buffer.SceneLifetime. This
453 indicates that the texture is permanently associated with a pass of the
454 effect and it will not be reused for other purposes. The contents of such
455 color buffers is preserved between frames. This is typically used in a
456 ping-pong fashion in effects like motion blur: the first pass takes the
457 persistent buffer as its input, in addition to the effects main input
458 texture, outputting to another intermediate buffer, while the second pass
459 outputs to the persistent buffer. This way in the first frame the first pass
460 samples an empty (transparent) texture, whereas in subsequent frames it
461 samples the output of the second pass from the previous frame. A third pass
462 can then blend the effect's input and the second pass' output together.
463
464 The BufferInput command type is used to expose custom texture buffers to the
465 render pass.
466
467 For instance, to access \c someBuffer in the render pass shaders under
468 the name, \c mySampler, the following can be added to its command list:
469 \qml
470 BufferInput { buffer: someBuffer; sampler: "mySampler" }
471 \endqml
472
473 If the \c sampler name is not specified, \c INPUT will be used as default.
474
475 Buffers can be useful to share intermediate results between render passes.
476
477 To expose preloaded textures to the effect, TextureInput should be used instead.
478 These can be defined as properties of the Effect itself, and will automatically
479 be accessible to the shaders by their property names.
480 \qml
481 property TextureInput tex: TextureInput {
482 texture: Texture { source: "image.png" }
483 }
484 \endqml
485
486 Here \c tex is a valid sampler in all shaders of all the passes of the
487 effect.
488
489 When it comes to uniform values from properties, all passes in the Effect
490 read the same values in their shaders. If necessary it is possible to
491 override the value of a uniform just for a given pass. This is achieved by
492 adding the \l SetUniformValue command to the list of commands for the pass.
493
494 \note The \l{SetUniformValue::target}{target} of the pass-specific uniform
495 value setter can only refer to a name that is the name of a property of the
496 effect. It can override the value for a property's corresponding uniform,
497 but it cannot introduce new uniforms.
498
499 \section1 Performance considerations
500
501 Be aware of the increased resource usage and potentially reduced performance
502 when using post-processing effects. Just like with Qt Quick layers and
503 ShaderEffect, rendering the scene into a texture and then using that to
504 texture a quad is not a cheap operation, especially on low-end hardware with
505 limited fragment processing power. The amount of additional graphics memory
506 needed, as well as the increase in GPU load both depend on the size of the
507 View3D (which, on embedded devices without a windowing system, may often be
508 as big as the screen resolution). Multi-pass effects, as well as applying
509 multiple effects increase the resource and performance requirements further.
510
511 Therefore, it is highly advisable to ensure early on in the development
512 lifecycle that the targeted device and graphics stack is able to cope with
513 the effects included in the design of the 3D scene at the final product's
514 screen resolution.
515
516 While unavoidable with techniques that need it, \c DEPTH_TEXTURE implies an
517 additional rendering pass to generate the contents of that texture, which
518 can also present a hit on less capable hardware. Therefore, use \c
519 DEPTH_TEXTURE in the effect's shaders only when essential.
520
521 The complexity of the operations in the shaders is also important. Just like
522 with CustomMaterial, a sub-optimal fragment shader can easily lead to
523 reduced rendering performance.
524
525 Be cautious with \l{Buffer::sizeMultiplier}{sizeMultiplier in Buffer} when
526 values larger than 1 are involved. For example, a multiplier of 4 means
527 creating and then rendering to a texture that is 4 times the size of the
528 View3D. Just like with shadow maps and multi- or supersampling, the
529 increased resource and performance costs can quickly outweigh the benefits
530 from better quality on systems with limited GPU power.
531
532 \section1 VR/AR considerations
533
534 When developing applications for virtual or augmented reality by using Qt
535 Quick 3D XR, postprocessing effects are functional and available to use.
536 However, designers are developers should take special care to understand
537 which and what kind of effects make sense in a virtual reality environment.
538 Some effects, including some of the built-in ones in
539 ExtendedSceneEnvironment or the deprecated Effects module, do not lead to a
540 good visual experience in a VR environment, possibly affecting the user
541 physically even (causing, for example, motion sickness or dizziness).
542
543 When the more efficient \l{Multiview Rendering}{multiview rendering mode} is
544 enabled in a VR/AR application, there is no separate render pass for the
545 left and right eye contents. Instead, it all happens in one pass, using a 2D
546 texture array with two layers instead of two independent 2D textures. This
547 also means that many intermediate buffers, meaning color or depth textures,
548 will need to become texture arrays in this mode. This then has implications
549 for custom materials and postprocessing effects. Textures such as the input
550 texture (\c INPUT), the depth texture (\c DEPTH_TEXTURE), the screen texture
551 (\c SCREEN_TEXTURE), and some others becomes 2D texture arrays, exposed in
552 the shader as a \c sampler2DArray instead of \c sampler2D. This has
553 implications for GLSL functions such as texture(), textureLod(), or
554 textureSize(). The UV coordinate is then a vec3, not a vec2. Whereas
555 textureSize() returns a vec3, not a vec2. Effects intended to function
556 regardless of the rendering mode, can be written with an appropriate ifdef:
557 \badcode
558 #if QSHADER_VIEW_COUNT >= 2
559 vec4 c = texture(INPUT, vec3(INPUT_UV, VIEW_INDEX));
560 #else
561 vec4 c = texture(INPUT, INPUT_UV);
562 #endif
563 \endcode
564
565 \sa Shader, Pass, Buffer, BufferInput, {Qt Quick 3D - Custom Effect Example}
566*/
567
568/*!
569 \qmlproperty list Effect::passes
570 Contains a list of render \l {Pass}{passes} implemented by the effect.
571*/
572
573QQuick3DEffect::QQuick3DEffect(QQuick3DObject *parent)
574 : QQuick3DObject(*(new QQuick3DObjectPrivate(QQuick3DObjectPrivate::Type::Effect)), parent)
575{
576}
577
578QQmlListProperty<QQuick3DShaderUtilsRenderPass> QQuick3DEffect::passes()
579{
580 return QQmlListProperty<QQuick3DShaderUtilsRenderPass>(this,
581 nullptr,
582 QQuick3DEffect::qmlAppendPass,
583 QQuick3DEffect::qmlPassCount,
584 QQuick3DEffect::qmlPassAt,
585 QQuick3DEffect::qmlPassClear);
586}
587
588// Default vertex and fragment shader code that is used when no corresponding
589// Shader is present in the Effect. These go through the usual processing so
590// should use the user-facing builtins.
591
592static const char *default_effect_vertex_shader =
593 "void MAIN()\n"
594 "{\n"
595 "}\n";
596
597static const char *default_effect_fragment_shader =
598 "void MAIN()\n"
599 "{\n"
600 "#if QSHADER_VIEW_COUNT >= 2\n"
601 " FRAGCOLOR = texture(INPUT, vec3(INPUT_UV, VIEW_INDEX));\n"
602 "#else\n"
603 " FRAGCOLOR = texture(INPUT, INPUT_UV);\n"
604 "#endif\n"
605 "}\n";
606
607static inline void insertVertexMainArgs(QByteArray &snippet)
608{
609 static const char *argKey = "/*%QT_ARGS_MAIN%*/";
610 const int argKeyLen = int(strlen(s: argKey));
611 const int argKeyPos = snippet.indexOf(bv: argKey);
612 if (argKeyPos >= 0)
613 snippet = snippet.left(n: argKeyPos) + QByteArrayLiteral("inout vec3 VERTEX") + snippet.mid(index: argKeyPos + argKeyLen);
614}
615
616QSSGRenderGraphObject *QQuick3DEffect::updateSpatialNode(QSSGRenderGraphObject *node)
617{
618 using namespace QSSGShaderUtils;
619
620 const auto &renderContext = QQuick3DObjectPrivate::get(item: this)->sceneManager->wattached->rci();
621 if (!renderContext) {
622 qWarning(msg: "QQuick3DEffect: No render context interface?");
623 return nullptr;
624 }
625
626 QSSGRenderEffect *effectNode = static_cast<QSSGRenderEffect *>(node);
627 bool newBackendNode = false;
628 if (!effectNode) {
629 effectNode = new QSSGRenderEffect;
630 newBackendNode = true;
631 }
632
633 bool shadersMayChange = false;
634 if (m_dirtyAttributes & Dirty::EffectChainDirty)
635 shadersMayChange = true;
636
637 const bool fullUpdate = newBackendNode || effectNode->incompleteBuildTimeObject || (m_dirtyAttributes & Dirty::TextureDirty);
638
639 if (fullUpdate || shadersMayChange) {
640 markAllDirty();
641
642 // Need to clear the old list with properties and textures first.
643 effectNode->properties.clear();
644 effectNode->textureProperties.clear();
645
646 QMetaMethod propertyDirtyMethod;
647 const int idx = metaObject()->indexOfSlot(slot: "onPropertyDirty()");
648 if (idx != -1)
649 propertyDirtyMethod = metaObject()->method(index: idx);
650
651 // Properties -> uniforms
652 QSSGShaderCustomMaterialAdapter::StringPairList uniforms;
653 QSSGShaderCustomMaterialAdapter::StringPairList multiViewDependentSamplers;
654 const int propCount = metaObject()->propertyCount();
655 int propOffset = metaObject()->propertyOffset();
656
657 // Effect can have multilayered inheritance structure, so find the actual propOffset
658 const QMetaObject *superClass = metaObject()->superClass();
659 while (superClass && qstrcmp(str1: superClass->className(), str2: "QQuick3DEffect") != 0) {
660 propOffset = superClass->propertyOffset();
661 superClass = superClass->superClass();
662 }
663
664 using TextureInputProperty = QPair<QQuick3DShaderUtilsTextureInput *, const char *>;
665
666 QVector<TextureInputProperty> textureProperties; // We'll deal with these later
667 for (int i = propOffset; i != propCount; ++i) {
668 const QMetaProperty property = metaObject()->property(index: i);
669 if (Q_UNLIKELY(!property.isValid()))
670 continue;
671
672 const auto name = property.name();
673 QMetaType propType = property.metaType();
674 QVariant propValue = property.read(obj: this);
675 if (propType == QMetaType(QMetaType::QVariant))
676 propType = propValue.metaType();
677
678 if (propType.id() >= QMetaType::User) {
679 if (propType.id() == qMetaTypeId<QQuick3DShaderUtilsTextureInput *>()) {
680 if (QQuick3DShaderUtilsTextureInput *texture = property.read(obj: this).value<QQuick3DShaderUtilsTextureInput *>())
681 textureProperties.push_back(t: {texture, name});
682 }
683 } else if (propType == QMetaType(QMetaType::QObjectStar)) {
684 if (QQuick3DShaderUtilsTextureInput *texture = qobject_cast<QQuick3DShaderUtilsTextureInput *>(object: propValue.value<QObject *>()))
685 textureProperties.push_back(t: {texture, name});
686 } else {
687 const auto type = uniformType(type: propType);
688 if (type != QSSGRenderShaderValue::Unknown) {
689 uniforms.append(t: { uniformTypeName(type: propType), name });
690 effectNode->properties.push_back(t: { name, uniformTypeName(type: propType),
691 propValue, uniformType(type: propType), i});
692 // Track the property changes
693 if (fullUpdate) {
694 if (property.hasNotifySignal() && propertyDirtyMethod.isValid())
695 connect(sender: this, signal: property.notifySignal(), receiver: this, method: propertyDirtyMethod);
696 } // else already connected
697 } else {
698 // ### figure out how _not_ to warn when there are no dynamic
699 // properties defined (because warnings like Blah blah objectName etc. are not helpful)
700 //qWarning("No known uniform conversion found for effect property %s. Skipping", property.name());
701 }
702 }
703 }
704
705 const auto processTextureProperty = [&](QQuick3DShaderUtilsTextureInput &texture, const QByteArray &name) {
706 QSSGRenderEffect::TextureProperty texProp;
707 QQuick3DTexture *tex = texture.texture(); // may be null if the TextureInput has no 'texture' set
708 if (fullUpdate) {
709 connect(sender: &texture, signal: &QQuick3DShaderUtilsTextureInput::enabledChanged, context: this, slot: &QQuick3DEffect::onTextureDirty);
710 connect(sender: &texture, signal: &QQuick3DShaderUtilsTextureInput::textureChanged, context: this, slot: &QQuick3DEffect::onTextureDirty);
711 } // else already connected
712 texProp.name = name;
713 if (texture.enabled && tex)
714 texProp.texImage = tex->getRenderImage();
715
716 texProp.shaderDataType = QSSGRenderShaderValue::Texture;
717
718 if (tex) {
719 texProp.minFilterType = tex->minFilter() == QQuick3DTexture::Nearest ? QSSGRenderTextureFilterOp::Nearest
720 : QSSGRenderTextureFilterOp::Linear;
721 texProp.magFilterType = tex->magFilter() == QQuick3DTexture::Nearest ? QSSGRenderTextureFilterOp::Nearest
722 : QSSGRenderTextureFilterOp::Linear;
723 texProp.mipFilterType = tex->generateMipmaps() ? (tex->mipFilter() == QQuick3DTexture::Nearest ? QSSGRenderTextureFilterOp::Nearest
724 : QSSGRenderTextureFilterOp::Linear)
725 : QSSGRenderTextureFilterOp::None;
726 texProp.horizontalClampType = tex->horizontalTiling() == QQuick3DTexture::Repeat ? QSSGRenderTextureCoordOp::Repeat
727 : (tex->horizontalTiling() == QQuick3DTexture::ClampToEdge ? QSSGRenderTextureCoordOp::ClampToEdge
728 : QSSGRenderTextureCoordOp::MirroredRepeat);
729 texProp.verticalClampType = tex->verticalTiling() == QQuick3DTexture::Repeat ? QSSGRenderTextureCoordOp::Repeat
730 : (tex->verticalTiling() == QQuick3DTexture::ClampToEdge ? QSSGRenderTextureCoordOp::ClampToEdge
731 : QSSGRenderTextureCoordOp::MirroredRepeat);
732 texProp.zClampType = tex->depthTiling() == QQuick3DTexture::Repeat ? QSSGRenderTextureCoordOp::Repeat
733 : (tex->depthTiling() == QQuick3DTexture::ClampToEdge) ? QSSGRenderTextureCoordOp::ClampToEdge
734 : QSSGRenderTextureCoordOp::MirroredRepeat;
735 }
736
737 // Knowing upfront that a sampler2D needs to be a sampler2DArray in
738 // the multiview-compatible version of the shader is not trivial.
739 // Consider: we know the list of TextureInputs, without any
740 // knowledge about the usage of those textures. Intermediate buffers
741 // (textures) also have a default constructed (no source, no source
742 // item, no texture data) Texture set. What indicates that these are
743 // used as intermediate buffers, is the 'output' property of a Pass,
744 // referencing a Buffer object (which objects we otherwise do not
745 // track), the 'name' of which matches TextureInput property name.
746 // The list of passes may vary dynamically, and some Passes may not
747 // be listed at any point in time if the effect has an
748 // ubershader-ish design. Thus one can have TextureInputs that are
749 // not associated with a Buffer (when scanning through the Passes),
750 // and so we cannot just check the 'output'-referenced Buffers to
751 // decide if a TextureInput's Texture needs to be treated specially
752 // in the generated shader code. (and the type must be correct even
753 // for, from our perspective, "unused" samplers since they are still
754 // in the shader code, and will get a dummy texture bound)
755 //
756 // Therefore, in the absence of more sophisticated options, we just
757 // look at the TextureInput's texture, and if it is something along
758 // the lines of
759 // property TextureInput intermediateColorBuffer1: TextureInput { texture: Texture { } }
760 // then it is added to the special list, indicating the the type is
761 // sampler2D or sampler2DArray, depending on the rendering mode the
762 // shader is targeting.
763
764 if (tex && !tex->hasSourceData()) {
765 multiViewDependentSamplers.append(t: { QByteArrayLiteral("sampler2D"), name }); // the type may get adjusted later
766 } else {
767 if (tex && QQuick3DObjectPrivate::get(item: tex)->type == QQuick3DObjectPrivate::Type::ImageCube)
768 uniforms.append(t: { QByteArrayLiteral("samplerCube"), name });
769 else if (tex && tex->textureData() && tex->textureData()->depth() > 0)
770 uniforms.append(t: { QByteArrayLiteral("sampler3D"), name });
771 else
772 uniforms.append(t: { QByteArrayLiteral("sampler2D"), name });
773 }
774
775 effectNode->textureProperties.push_back(t: texProp);
776 };
777
778 // Textures
779 for (const auto &property : std::as_const(t&: textureProperties))
780 processTextureProperty(*property.first, property.second);
781
782 if (effectNode->incompleteBuildTimeObject) { // This object came from the shadergen tool
783 const auto names = dynamicPropertyNames();
784 for (const auto &name : names) {
785 QVariant propValue = property(name: name.constData());
786 QMetaType propType = propValue.metaType();
787 if (propType == QMetaType(QMetaType::QVariant))
788 propType = propValue.metaType();
789
790 if (propType.id() >= QMetaType::User) {
791 if (propType.id() == qMetaTypeId<QQuick3DShaderUtilsTextureInput *>()) {
792 if (QQuick3DShaderUtilsTextureInput *texture = propValue.value<QQuick3DShaderUtilsTextureInput *>())
793 textureProperties.push_back(t: {texture, name});
794 }
795 } else if (propType.id() == QMetaType::QObjectStar) {
796 if (QQuick3DShaderUtilsTextureInput *texture = qobject_cast<QQuick3DShaderUtilsTextureInput *>(object: propValue.value<QObject *>()))
797 textureProperties.push_back(t: {texture, name});
798 } else {
799 const auto type = uniformType(type: propType);
800 if (type != QSSGRenderShaderValue::Unknown) {
801 uniforms.append(t: { uniformTypeName(type: propType), name });
802 effectNode->properties.push_back(t: { name, uniformTypeName(type: propType),
803 propValue, uniformType(type: propType), -1 /* aka. dynamic property */});
804 // We don't need to track property changes
805 } else {
806 // ### figure out how _not_ to warn when there are no dynamic
807 // properties defined (because warnings like Blah blah objectName etc. are not helpful)
808 qWarning(msg: "No known uniform conversion found for effect property %s. Skipping", name.constData());
809 }
810 }
811 }
812
813 for (const auto &property : std::as_const(t&: textureProperties))
814 processTextureProperty(*property.first, property.second);
815 }
816
817 // built-ins
818 uniforms.append(t: { "mat4", "qt_modelViewProjection" });
819 uniforms.append(t: { "vec2", "qt_inputSize" });
820 uniforms.append(t: { "vec2", "qt_outputSize" });
821 uniforms.append(t: { "float", "qt_frame_num" });
822 uniforms.append(t: { "float", "qt_fps" });
823 uniforms.append(t: { "vec2", "qt_cameraProperties" });
824 uniforms.append(t: { "float", "qt_normalAdjustViewportFactor" });
825 uniforms.append(t: { "float", "qt_nearClipValue" });
826
827 // qt_inputTexture is not listed in uniforms, will be added by prepareCustomShader()
828 // since the name and type varies between non-multiview and multiview mode
829
830 QSSGShaderCustomMaterialAdapter::StringPairList builtinVertexInputs;
831 builtinVertexInputs.append(t: { "vec3", "attr_pos" });
832 builtinVertexInputs.append(t: { "vec2", "attr_uv" });
833
834 QSSGShaderCustomMaterialAdapter::StringPairList builtinVertexOutputs;
835 builtinVertexOutputs.append(t: { "vec2", "qt_inputUV" });
836 builtinVertexOutputs.append(t: { "vec2", "qt_textureUV" });
837 builtinVertexOutputs.append(t: { "flat uint", "qt_viewIndex" });
838
839 // fragOutput is added automatically by the program generator
840
841 if (!m_passes.isEmpty()) {
842 const QQmlContext *context = qmlContext(this);
843 effectNode->resetCommands();
844 for (QQuick3DShaderUtilsRenderPass *pass : std::as_const(t&: m_passes)) {
845 // Have a key composed more or less of the vertex and fragment filenames.
846 // The shaderLibraryManager uses stage+shaderPathKey as the key.
847 // Thus shaderPathKey is then sufficient to look up both the vertex and fragment shaders later on.
848 // Note that this key is not suitable as a unique key for the graphics resources because the same
849 // set of shader files can be used in multiple different passes, or in multiple active effects.
850 // But that's the effect system's problem.
851 QByteArray shaderPathKey("effect pipeline--");
852 QSSGRenderEffect::ShaderPrepPassData passData;
853 for (QQuick3DShaderUtilsShader::Stage stage : { QQuick3DShaderUtilsShader::Stage::Vertex, QQuick3DShaderUtilsShader::Stage::Fragment }) {
854 QQuick3DShaderUtilsShader *shader = nullptr;
855 for (QQuick3DShaderUtilsShader *s : pass->m_shaders) {
856 if (s->stage == stage) {
857 shader = s;
858 break;
859 }
860 }
861
862 // just how many enums does one need for the exact same thing...
863 QSSGShaderCache::ShaderType type = QSSGShaderCache::ShaderType::Vertex;
864 if (stage == QQuick3DShaderUtilsShader::Stage::Fragment)
865 type = QSSGShaderCache::ShaderType::Fragment;
866
867 // Will just use the custom material infrastructure. Some
868 // substitutions are common between custom materials and effects.
869 //
870 // Substitutions relevant to us here:
871 // MAIN -> qt_customMain
872 // FRAGCOLOR -> fragOutput
873 // POSITION -> gl_Position
874 // MODELVIEWPROJECTION_MATRIX -> qt_modelViewProjection
875 // DEPTH_TEXTURE -> qt_depthTexture
876 // ... other things shared with custom material
877 //
878 // INPUT -> qt_inputTexture
879 // INPUT_UV -> qt_inputUV
880 // ... other effect specifics
881 //
882 // Built-in uniforms, inputs and outputs will be baked into
883 // metadata comment blocks in the resulting source code.
884 // Same goes for inputs/outputs declared with VARYING.
885
886 QByteArray code;
887 if (shader) {
888 code = QSSGShaderUtils::resolveShader(fileUrl: shader->shader, context, shaderPathKey); // appends to shaderPathKey
889 } else {
890 if (!shaderPathKey.isEmpty())
891 shaderPathKey.append(c: '>');
892 shaderPathKey += "DEFAULT";
893 if (type == QSSGShaderCache::ShaderType::Vertex)
894 code = default_effect_vertex_shader;
895 else
896 code = default_effect_fragment_shader;
897 }
898
899 QSSGShaderCustomMaterialAdapter::ShaderCodeAndMetaData result[2];
900 if (type == QSSGShaderCache::ShaderType::Vertex) {
901 QByteArray buf;
902 result[QSSGRenderCustomMaterial::RegularShaderPathKeyIndex] =
903 QSSGShaderCustomMaterialAdapter::prepareCustomShader(dst&: buf, shaderCode: code, type,
904 baseUniforms: uniforms, baseInputs: builtinVertexInputs, baseOutputs: builtinVertexOutputs,
905 multiViewCompatible: false, multiViewDependentSamplers);
906 result[QSSGRenderCustomMaterial::RegularShaderPathKeyIndex].first += buf;
907 buf.clear();
908 result[QSSGRenderCustomMaterial::MultiViewShaderPathKeyIndex] =
909 QSSGShaderCustomMaterialAdapter::prepareCustomShader(dst&: buf, shaderCode: code, type,
910 baseUniforms: uniforms, baseInputs: builtinVertexInputs, baseOutputs: builtinVertexOutputs,
911 multiViewCompatible: true, multiViewDependentSamplers);
912 result[QSSGRenderCustomMaterial::MultiViewShaderPathKeyIndex].first += buf;
913 } else {
914 QByteArray buf;
915 result[QSSGRenderCustomMaterial::RegularShaderPathKeyIndex] =
916 QSSGShaderCustomMaterialAdapter::prepareCustomShader(dst&: buf, shaderCode: code, type,
917 baseUniforms: uniforms, baseInputs: builtinVertexOutputs, baseOutputs: {},
918 multiViewCompatible: false, multiViewDependentSamplers);
919 result[QSSGRenderCustomMaterial::RegularShaderPathKeyIndex].first += buf;
920 buf.clear();
921 result[QSSGRenderCustomMaterial::MultiViewShaderPathKeyIndex] =
922 QSSGShaderCustomMaterialAdapter::prepareCustomShader(dst&: buf, shaderCode: code, type,
923 baseUniforms: uniforms, baseInputs: builtinVertexOutputs, baseOutputs: {},
924 multiViewCompatible: true, multiViewDependentSamplers);
925 result[QSSGRenderCustomMaterial::MultiViewShaderPathKeyIndex].first += buf;
926 }
927
928 if (result[QSSGRenderCustomMaterial::RegularShaderPathKeyIndex].second.flags.testFlag(flag: QSSGCustomShaderMetaData::UsesDepthTexture))
929 effectNode->requiresDepthTexture = true;
930
931 for (int i : { QSSGRenderCustomMaterial::RegularShaderPathKeyIndex, QSSGRenderCustomMaterial::MultiViewShaderPathKeyIndex }) {
932 if (type == QSSGShaderCache::ShaderType::Vertex) {
933 // qt_customMain() has an argument list which gets injected here
934 insertVertexMainArgs(snippet&: result[i].first);
935 passData.vertexShaderCode[i] = result[i].first;
936 passData.vertexMetaData[i] = result[i].second;
937 } else {
938 passData.fragmentShaderCode[i] = result[i].first;
939 passData.fragmentMetaData[i] = result[i].second;
940 }
941 }
942 }
943
944 effectNode->commands.push_back(t: { .command: nullptr, .own: true }); // will be changed to QSSGBindShader in finalizeShaders
945 passData.bindShaderCmdIndex = effectNode->commands.size() - 1;
946
947 // finalizing the shader code happens in a separate step later on by the backend node
948 passData.shaderPathKeyPrefix = shaderPathKey;
949 effectNode->shaderPrepData.passes.append(t: passData);
950 effectNode->shaderPrepData.valid = true; // trigger reprocessing the shader code later on
951
952 effectNode->commands.push_back(t: { .command: new QSSGApplyInstanceValue, .own: true });
953
954 // Buffers
955 QQuick3DShaderUtilsBuffer *outputBuffer = pass->outputBuffer;
956 if (outputBuffer) {
957 const QByteArray &outBufferName = outputBuffer->name;
958 if (outBufferName.isEmpty()) {
959 // default output buffer (with settings)
960 auto outputFormat = QQuick3DShaderUtilsBuffer::mapTextureFormat(fmt: outputBuffer->format());
961 effectNode->commands.push_back(t: { .command: new QSSGBindTarget(outputFormat), .own: true });
962 effectNode->outputFormat = outputFormat;
963 } else {
964 // Allocate buffer command
965 effectNode->commands.push_back(t: { .command: outputBuffer->getCommand(), .own: false });
966 // bind buffer
967 effectNode->commands.push_back(t: { .command: new QSSGBindBuffer(outBufferName), .own: true });
968 }
969 } else {
970 // Use the default output buffer, same format as the source buffer
971 effectNode->commands.push_back(t: { .command: new QSSGBindTarget(QSSGRenderTextureFormat::Unknown), .own: true });
972 effectNode->outputFormat = QSSGRenderTextureFormat::Unknown;
973 }
974
975 // Other commands (BufferInput, Blending ... )
976 const auto &extraCommands = pass->m_commands;
977 for (const auto &command : extraCommands) {
978 const int bufferCount = command->bufferCount();
979 for (int i = 0; i != bufferCount; ++i)
980 effectNode->commands.push_back(t: { .command: command->bufferAt(idx: i)->getCommand(), .own: false });
981 effectNode->commands.push_back(t: { .command: command->getCommand(), .own: false });
982 }
983
984 effectNode->commands.push_back(t: { .command: new QSSGRender, .own: true });
985 }
986 }
987 }
988
989 if (m_dirtyAttributes & Dirty::PropertyDirty) {
990 for (const auto &prop : std::as_const(t&: effectNode->properties)) {
991 auto p = metaObject()->property(index: prop.pid);
992 if (Q_LIKELY(p.isValid()))
993 prop.value = p.read(obj: this);
994 }
995 }
996
997 m_dirtyAttributes = 0;
998
999 DebugViewHelpers::ensureDebugObjectName(node: effectNode, src: this);
1000
1001 return effectNode;
1002}
1003
1004void QQuick3DEffect::onPropertyDirty()
1005{
1006 markDirty(type: Dirty::PropertyDirty);
1007}
1008
1009void QQuick3DEffect::onTextureDirty()
1010{
1011 markDirty(type: Dirty::TextureDirty);
1012}
1013
1014void QQuick3DEffect::onPassDirty()
1015{
1016 markDirty(type: Dirty::EffectChainDirty);
1017}
1018
1019void QQuick3DEffect::effectChainDirty()
1020{
1021 markDirty(type: Dirty::EffectChainDirty);
1022}
1023
1024void QQuick3DEffect::markDirty(QQuick3DEffect::Dirty type)
1025{
1026 if (!(m_dirtyAttributes & quint32(type))) {
1027 m_dirtyAttributes |= quint32(type);
1028 update();
1029 }
1030}
1031
1032void QQuick3DEffect::updateSceneManager(QQuick3DSceneManager *sceneManager)
1033{
1034 if (sceneManager) {
1035 for (const auto &it : std::as_const(t&: m_dynamicTextureMaps)) {
1036 if (auto tex = it->texture())
1037 QQuick3DObjectPrivate::refSceneManager(obj: tex, mgr&: *sceneManager);
1038 }
1039 } else {
1040 for (const auto &it : std::as_const(t&: m_dynamicTextureMaps)) {
1041 if (auto tex = it->texture())
1042 QQuick3DObjectPrivate::derefSceneManager(obj: tex);
1043 }
1044 }
1045}
1046
1047void QQuick3DEffect::itemChange(QQuick3DObject::ItemChange change, const QQuick3DObject::ItemChangeData &value)
1048{
1049 if (change == QQuick3DObject::ItemSceneChange)
1050 updateSceneManager(sceneManager: value.sceneManager);
1051}
1052
1053void QQuick3DEffect::qmlAppendPass(QQmlListProperty<QQuick3DShaderUtilsRenderPass> *list, QQuick3DShaderUtilsRenderPass *pass)
1054{
1055 if (!pass)
1056 return;
1057
1058 QQuick3DEffect *that = qobject_cast<QQuick3DEffect *>(object: list->object);
1059 that->m_passes.push_back(t: pass);
1060
1061 connect(sender: pass, signal: &QQuick3DShaderUtilsRenderPass::changed, context: that, slot: &QQuick3DEffect::onPassDirty);
1062 that->effectChainDirty();
1063}
1064
1065QQuick3DShaderUtilsRenderPass *QQuick3DEffect::qmlPassAt(QQmlListProperty<QQuick3DShaderUtilsRenderPass> *list, qsizetype index)
1066{
1067 QQuick3DEffect *that = qobject_cast<QQuick3DEffect *>(object: list->object);
1068 return that->m_passes.at(i: index);
1069}
1070
1071qsizetype QQuick3DEffect::qmlPassCount(QQmlListProperty<QQuick3DShaderUtilsRenderPass> *list)
1072{
1073 QQuick3DEffect *that = qobject_cast<QQuick3DEffect *>(object: list->object);
1074 return that->m_passes.size();
1075}
1076
1077void QQuick3DEffect::qmlPassClear(QQmlListProperty<QQuick3DShaderUtilsRenderPass> *list)
1078{
1079 QQuick3DEffect *that = qobject_cast<QQuick3DEffect *>(object: list->object);
1080
1081 for (QQuick3DShaderUtilsRenderPass *pass : that->m_passes)
1082 pass->disconnect(receiver: that);
1083
1084 that->m_passes.clear();
1085 that->effectChainDirty();
1086}
1087
1088void QQuick3DEffect::setDynamicTextureMap(QQuick3DShaderUtilsTextureInput *textureMap)
1089{
1090 // There can only be one texture input per property, as the texture input is a combination
1091 // of the texture used and the uniform name!
1092 auto it = m_dynamicTextureMaps.constFind(value: textureMap);
1093
1094 if (it == m_dynamicTextureMaps.constEnd()) {
1095 // Track the object, if it's destroyed we need to remove it from our table.
1096 connect(sender: textureMap, signal: &QQuick3DShaderUtilsTextureInput::destroyed, context: this, slot: [this, textureMap]() {
1097 auto it = m_dynamicTextureMaps.constFind(value: textureMap);
1098 if (it != m_dynamicTextureMaps.constEnd())
1099 m_dynamicTextureMaps.erase(i: it);
1100 });
1101 m_dynamicTextureMaps.insert(value: textureMap);
1102
1103 update();
1104 }
1105}
1106
1107QT_END_NAMESPACE
1108

source code of qtquick3d/src/quick3d/qquick3deffect.cpp