1 | // Copyright 2009-2021 Intel Corporation |
2 | // SPDX-License-Identifier: Apache-2.0 |
3 | |
4 | #pragma once |
5 | |
6 | #include "rtcore.h" |
7 | |
8 | namespace embree { |
9 | namespace instance_id_stack { |
10 | |
11 | static_assert(RTC_MAX_INSTANCE_LEVEL_COUNT > 0, |
12 | "RTC_MAX_INSTANCE_LEVEL_COUNT must be greater than 0." ); |
13 | |
14 | /******************************************************************************* |
15 | * Instance ID stack manipulation. |
16 | * This is used from the instance intersector. |
17 | ******************************************************************************/ |
18 | |
19 | /* |
20 | * Push an instance to the stack. |
21 | */ |
22 | RTC_FORCEINLINE bool push(RTCIntersectContext* context, |
23 | unsigned instanceId) |
24 | { |
25 | #if RTC_MAX_INSTANCE_LEVEL_COUNT > 1 |
26 | const bool spaceAvailable = context->instStackSize < RTC_MAX_INSTANCE_LEVEL_COUNT; |
27 | /* We assert here because instances are silently dropped when the stack is full. |
28 | This might be quite hard to find in production. */ |
29 | assert(spaceAvailable); |
30 | if (likely(spaceAvailable)) |
31 | context->instID[context->instStackSize++] = instanceId; |
32 | return spaceAvailable; |
33 | #else |
34 | const bool spaceAvailable = (context->instID[0] == RTC_INVALID_GEOMETRY_ID); |
35 | assert(spaceAvailable); |
36 | if (likely(spaceAvailable)) |
37 | context->instID[0] = instanceId; |
38 | return spaceAvailable; |
39 | #endif |
40 | } |
41 | |
42 | |
43 | /* |
44 | * Pop the last instance pushed to the stack. |
45 | * Do not call on an empty stack. |
46 | */ |
47 | RTC_FORCEINLINE void pop(RTCIntersectContext* context) |
48 | { |
49 | assert(context); |
50 | #if RTC_MAX_INSTANCE_LEVEL_COUNT > 1 |
51 | assert(context->instStackSize > 0); |
52 | context->instID[--context->instStackSize] = RTC_INVALID_GEOMETRY_ID; |
53 | #else |
54 | assert(context->instID[0] != RTC_INVALID_GEOMETRY_ID); |
55 | context->instID[0] = RTC_INVALID_GEOMETRY_ID; |
56 | #endif |
57 | } |
58 | |
59 | /* |
60 | * Optimized instance id stack copy. |
61 | * The copy() functions will either copy full |
62 | * stacks or copy only until the last valid element has been copied, depending |
63 | * on RTC_MAX_INSTANCE_LEVEL_COUNT. |
64 | */ |
65 | RTC_FORCEINLINE void copy_UU(const unsigned* src, unsigned* tgt) |
66 | { |
67 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
68 | tgt[0] = src[0]; |
69 | |
70 | #else |
71 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
72 | tgt[l] = src[l]; |
73 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
74 | if (src[l] == RTC_INVALID_GEOMETRY_ID) |
75 | break; |
76 | } |
77 | #endif |
78 | } |
79 | |
80 | template <int K> |
81 | RTC_FORCEINLINE void copy_UV(const unsigned* src, vuint<K>* tgt) |
82 | { |
83 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
84 | tgt[0] = src[0]; |
85 | |
86 | #else |
87 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
88 | tgt[l] = src[l]; |
89 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
90 | if (src[l] == RTC_INVALID_GEOMETRY_ID) |
91 | break; |
92 | } |
93 | #endif |
94 | } |
95 | |
96 | template <int K> |
97 | RTC_FORCEINLINE void copy_UV(const unsigned* src, vuint<K>* tgt, size_t j) |
98 | { |
99 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
100 | tgt[0][j] = src[0]; |
101 | |
102 | #else |
103 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
104 | tgt[l][j] = src[l]; |
105 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
106 | if (src[l] == RTC_INVALID_GEOMETRY_ID) |
107 | break; |
108 | } |
109 | #endif |
110 | } |
111 | |
112 | template <int K> |
113 | RTC_FORCEINLINE void copy_UV(const unsigned* src, vuint<K>* tgt, const vbool<K>& mask) |
114 | { |
115 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
116 | vuint<K>::store(mask, tgt, src[0]); |
117 | |
118 | #else |
119 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
120 | vuint<K>::store(mask, tgt + l, src[l]); |
121 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
122 | if (src[l] == RTC_INVALID_GEOMETRY_ID) |
123 | break; |
124 | } |
125 | #endif |
126 | } |
127 | |
128 | template <int K> |
129 | RTC_FORCEINLINE void copy_VU(const vuint<K>* src, unsigned* tgt, size_t i) |
130 | { |
131 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
132 | tgt[0] = src[0][i]; |
133 | |
134 | #else |
135 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
136 | tgt[l] = src[l][i]; |
137 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
138 | if (src[l][i] == RTC_INVALID_GEOMETRY_ID) |
139 | break; |
140 | } |
141 | #endif |
142 | } |
143 | |
144 | template <int K> |
145 | RTC_FORCEINLINE void copy_VV(const vuint<K>* src, vuint<K>* tgt, size_t i, size_t j) |
146 | { |
147 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
148 | tgt[0][j] = src[0][i]; |
149 | |
150 | #else |
151 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
152 | tgt[l][j] = src[l][i]; |
153 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) |
154 | if (src[l][i] == RTC_INVALID_GEOMETRY_ID) |
155 | break; |
156 | } |
157 | #endif |
158 | } |
159 | |
160 | template <int K> |
161 | RTC_FORCEINLINE void copy_VV(const vuint<K>* src, vuint<K>* tgt, const vbool<K>& mask) |
162 | { |
163 | #if (RTC_MAX_INSTANCE_LEVEL_COUNT == 1) |
164 | vuint<K>::store(mask, tgt, src[0]); |
165 | |
166 | #else |
167 | vbool<K> done = !mask; |
168 | for (unsigned l = 0; l < RTC_MAX_INSTANCE_LEVEL_COUNT; ++l) { |
169 | vuint<K>::store(mask, tgt + l, src[l]); |
170 | if (RTC_MAX_INSTANCE_LEVEL_COUNT > 4) { |
171 | done |= src[l] == RTC_INVALID_GEOMETRY_ID; |
172 | if (all(done)) break; |
173 | } |
174 | } |
175 | #endif |
176 | } |
177 | |
178 | } // namespace instance_id_stack |
179 | } // namespace embree |
180 | |