1 | //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | |
9 | #include "Program.h" |
10 | #include "ByteCodeStmtGen.h" |
11 | #include "Context.h" |
12 | #include "Function.h" |
13 | #include "Integral.h" |
14 | #include "Opcode.h" |
15 | #include "PrimType.h" |
16 | #include "clang/AST/Decl.h" |
17 | #include "clang/AST/DeclCXX.h" |
18 | |
19 | using namespace clang; |
20 | using namespace clang::interp; |
21 | |
22 | unsigned Program::getOrCreateNativePointer(const void *Ptr) { |
23 | auto It = NativePointerIndices.find(Val: Ptr); |
24 | if (It != NativePointerIndices.end()) |
25 | return It->second; |
26 | |
27 | unsigned Idx = NativePointers.size(); |
28 | NativePointers.push_back(x: Ptr); |
29 | NativePointerIndices[Ptr] = Idx; |
30 | return Idx; |
31 | } |
32 | |
33 | const void *Program::getNativePointer(unsigned Idx) { |
34 | return NativePointers[Idx]; |
35 | } |
36 | |
37 | unsigned Program::createGlobalString(const StringLiteral *S) { |
38 | const size_t CharWidth = S->getCharByteWidth(); |
39 | const size_t BitWidth = CharWidth * Ctx.getCharBit(); |
40 | |
41 | PrimType CharType; |
42 | switch (CharWidth) { |
43 | case 1: |
44 | CharType = PT_Sint8; |
45 | break; |
46 | case 2: |
47 | CharType = PT_Uint16; |
48 | break; |
49 | case 4: |
50 | CharType = PT_Uint32; |
51 | break; |
52 | default: |
53 | llvm_unreachable("unsupported character width" ); |
54 | } |
55 | |
56 | // Create a descriptor for the string. |
57 | Descriptor *Desc = allocateDescriptor(Args&: S, Args&: CharType, Args: Descriptor::InlineDescMD, |
58 | Args: S->getLength() + 1, |
59 | /*isConst=*/Args: true, |
60 | /*isTemporary=*/Args: false, |
61 | /*isMutable=*/Args: false); |
62 | |
63 | // Allocate storage for the string. |
64 | // The byte length does not include the null terminator. |
65 | unsigned I = Globals.size(); |
66 | unsigned Sz = Desc->getAllocSize(); |
67 | auto *G = new (Allocator, Sz) Global(Desc, /*isStatic=*/true, |
68 | /*isExtern=*/false); |
69 | G->block()->invokeCtor(); |
70 | |
71 | new (G->block()->rawData()) InlineDescriptor(Desc); |
72 | Globals.push_back(x: G); |
73 | |
74 | // Construct the string in storage. |
75 | const Pointer Ptr(G->block()); |
76 | for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { |
77 | Pointer Field = Ptr.atIndex(Idx: I).narrow(); |
78 | const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(i: I); |
79 | switch (CharType) { |
80 | case PT_Sint8: { |
81 | using T = PrimConv<PT_Sint8>::T; |
82 | Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth); |
83 | Field.initialize(); |
84 | break; |
85 | } |
86 | case PT_Uint16: { |
87 | using T = PrimConv<PT_Uint16>::T; |
88 | Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth); |
89 | Field.initialize(); |
90 | break; |
91 | } |
92 | case PT_Uint32: { |
93 | using T = PrimConv<PT_Uint32>::T; |
94 | Field.deref<T>() = T::from(Value: CodePoint, NumBits: BitWidth); |
95 | Field.initialize(); |
96 | break; |
97 | } |
98 | default: |
99 | llvm_unreachable("unsupported character type" ); |
100 | } |
101 | } |
102 | return I; |
103 | } |
104 | |
105 | Pointer Program::getPtrGlobal(unsigned Idx) const { |
106 | assert(Idx < Globals.size()); |
107 | return Pointer(Globals[Idx]->block()); |
108 | } |
109 | |
110 | std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { |
111 | if (auto It = GlobalIndices.find(Val: VD); It != GlobalIndices.end()) |
112 | return It->second; |
113 | |
114 | // Find any previous declarations which were already evaluated. |
115 | std::optional<unsigned> Index; |
116 | for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) { |
117 | if (auto It = GlobalIndices.find(Val: P); It != GlobalIndices.end()) { |
118 | Index = It->second; |
119 | break; |
120 | } |
121 | } |
122 | |
123 | // Map the decl to the existing index. |
124 | if (Index) |
125 | GlobalIndices[VD] = *Index; |
126 | |
127 | return std::nullopt; |
128 | } |
129 | |
130 | std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, |
131 | const Expr *Init) { |
132 | if (auto Idx = getGlobal(VD)) |
133 | return Idx; |
134 | |
135 | if (auto Idx = createGlobal(VD, Init)) { |
136 | GlobalIndices[VD] = *Idx; |
137 | return Idx; |
138 | } |
139 | return std::nullopt; |
140 | } |
141 | |
142 | std::optional<unsigned> Program::getOrCreateDummy(const ValueDecl *VD) { |
143 | // Dedup blocks since they are immutable and pointers cannot be compared. |
144 | if (auto It = DummyVariables.find(Val: VD); It != DummyVariables.end()) |
145 | return It->second; |
146 | |
147 | // Create dummy descriptor. |
148 | // We create desriptors of 'array of unknown size' if the type is an array |
149 | // type _and_ the size isn't known (it's not a ConstantArrayType). If the size |
150 | // is known however, we create a regular dummy pointer. |
151 | Descriptor *Desc; |
152 | if (const auto *AT = VD->getType()->getAsArrayTypeUnsafe(); |
153 | AT && !isa<ConstantArrayType>(AT)) |
154 | Desc = allocateDescriptor(Args&: VD, Args: Descriptor::UnknownSize{}); |
155 | else |
156 | Desc = allocateDescriptor(Args&: VD); |
157 | |
158 | // Allocate a block for storage. |
159 | unsigned I = Globals.size(); |
160 | |
161 | auto *G = new (Allocator, Desc->getAllocSize()) |
162 | Global(getCurrentDecl(), Desc, /*IsStatic=*/true, /*IsExtern=*/false); |
163 | G->block()->invokeCtor(); |
164 | |
165 | Globals.push_back(x: G); |
166 | DummyVariables[VD] = I; |
167 | return I; |
168 | } |
169 | |
170 | std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, |
171 | const Expr *Init) { |
172 | bool IsStatic, IsExtern; |
173 | if (const auto *Var = dyn_cast<VarDecl>(Val: VD)) { |
174 | IsStatic = Context::shouldBeGloballyIndexed(VD); |
175 | IsExtern = Var->hasExternalStorage(); |
176 | } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl>(Val: VD)) { |
177 | IsStatic = true; |
178 | IsExtern = false; |
179 | } else { |
180 | IsStatic = false; |
181 | IsExtern = true; |
182 | } |
183 | if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) { |
184 | for (const Decl *P = VD; P; P = P->getPreviousDecl()) |
185 | GlobalIndices[P] = *Idx; |
186 | return *Idx; |
187 | } |
188 | return std::nullopt; |
189 | } |
190 | |
191 | std::optional<unsigned> Program::createGlobal(const Expr *E) { |
192 | return createGlobal(D: E, Ty: E->getType(), /*isStatic=*/IsStatic: true, /*isExtern=*/IsExtern: false); |
193 | } |
194 | |
195 | std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, |
196 | bool IsStatic, bool IsExtern, |
197 | const Expr *Init) { |
198 | // Create a descriptor for the global. |
199 | Descriptor *Desc; |
200 | const bool IsConst = Ty.isConstQualified(); |
201 | const bool IsTemporary = D.dyn_cast<const Expr *>(); |
202 | if (std::optional<PrimType> T = Ctx.classify(T: Ty)) |
203 | Desc = |
204 | createDescriptor(D, Type: *T, MDSize: Descriptor::InlineDescMD, IsConst, IsTemporary); |
205 | else |
206 | Desc = createDescriptor(D, Ty: Ty.getTypePtr(), MDSize: Descriptor::InlineDescMD, |
207 | IsConst, IsTemporary); |
208 | |
209 | if (!Desc) |
210 | return std::nullopt; |
211 | |
212 | // Allocate a block for storage. |
213 | unsigned I = Globals.size(); |
214 | |
215 | auto *G = new (Allocator, Desc->getAllocSize()) |
216 | Global(getCurrentDecl(), Desc, IsStatic, IsExtern); |
217 | G->block()->invokeCtor(); |
218 | |
219 | // Initialize InlineDescriptor fields. |
220 | new (G->block()->rawData()) InlineDescriptor(Desc); |
221 | Globals.push_back(x: G); |
222 | |
223 | return I; |
224 | } |
225 | |
226 | Function *Program::getFunction(const FunctionDecl *F) { |
227 | F = F->getCanonicalDecl(); |
228 | assert(F); |
229 | auto It = Funcs.find(Val: F); |
230 | return It == Funcs.end() ? nullptr : It->second.get(); |
231 | } |
232 | |
233 | Record *Program::getOrCreateRecord(const RecordDecl *RD) { |
234 | // Use the actual definition as a key. |
235 | RD = RD->getDefinition(); |
236 | if (!RD) |
237 | return nullptr; |
238 | |
239 | if (!RD->isCompleteDefinition()) |
240 | return nullptr; |
241 | |
242 | // Deduplicate records. |
243 | if (auto It = Records.find(Val: RD); It != Records.end()) |
244 | return It->second; |
245 | |
246 | // We insert nullptr now and replace that later, so recursive calls |
247 | // to this function with the same RecordDecl don't run into |
248 | // infinite recursion. |
249 | Records.insert(KV: {RD, nullptr}); |
250 | |
251 | // Number of bytes required by fields and base classes. |
252 | unsigned BaseSize = 0; |
253 | // Number of bytes required by virtual base. |
254 | unsigned VirtSize = 0; |
255 | |
256 | // Helper to get a base descriptor. |
257 | auto GetBaseDesc = [this](const RecordDecl *BD, |
258 | const Record *BR) -> const Descriptor * { |
259 | if (!BR) |
260 | return nullptr; |
261 | return allocateDescriptor(Args&: BD, Args&: BR, Args: std::nullopt, /*isConst=*/Args: false, |
262 | /*isTemporary=*/Args: false, |
263 | /*isMutable=*/Args: false); |
264 | }; |
265 | |
266 | // Reserve space for base classes. |
267 | Record::BaseList Bases; |
268 | Record::VirtualBaseList VirtBases; |
269 | if (const auto *CD = dyn_cast<CXXRecordDecl>(Val: RD)) { |
270 | |
271 | for (const CXXBaseSpecifier &Spec : CD->bases()) { |
272 | if (Spec.isVirtual()) |
273 | continue; |
274 | |
275 | // In error cases, the base might not be a RecordType. |
276 | if (const auto *RT = Spec.getType()->getAs<RecordType>()) { |
277 | const RecordDecl *BD = RT->getDecl(); |
278 | const Record *BR = getOrCreateRecord(RD: BD); |
279 | |
280 | if (const Descriptor *Desc = GetBaseDesc(BD, BR)) { |
281 | BaseSize += align(Size: sizeof(InlineDescriptor)); |
282 | Bases.push_back(Elt: {.Decl: BD, .Offset: BaseSize, .Desc: Desc, .R: BR}); |
283 | BaseSize += align(Size: BR->getSize()); |
284 | continue; |
285 | } |
286 | } |
287 | return nullptr; |
288 | } |
289 | |
290 | for (const CXXBaseSpecifier &Spec : CD->vbases()) { |
291 | |
292 | if (const auto *RT = Spec.getType()->getAs<RecordType>()) { |
293 | const RecordDecl *BD = RT->getDecl(); |
294 | const Record *BR = getOrCreateRecord(RD: BD); |
295 | |
296 | if (const Descriptor *Desc = GetBaseDesc(BD, BR)) { |
297 | VirtSize += align(Size: sizeof(InlineDescriptor)); |
298 | VirtBases.push_back(Elt: {.Decl: BD, .Offset: VirtSize, .Desc: Desc, .R: BR}); |
299 | VirtSize += align(Size: BR->getSize()); |
300 | continue; |
301 | } |
302 | } |
303 | return nullptr; |
304 | } |
305 | } |
306 | |
307 | // Reserve space for fields. |
308 | Record::FieldList Fields; |
309 | for (const FieldDecl *FD : RD->fields()) { |
310 | // Note that we DO create fields and descriptors |
311 | // for unnamed bitfields here, even though we later ignore |
312 | // them everywhere. That's because so the FieldDecl's |
313 | // getFieldIndex() matches. |
314 | |
315 | // Reserve space for the field's descriptor and the offset. |
316 | BaseSize += align(Size: sizeof(InlineDescriptor)); |
317 | |
318 | // Classify the field and add its metadata. |
319 | QualType FT = FD->getType(); |
320 | const bool IsConst = FT.isConstQualified(); |
321 | const bool IsMutable = FD->isMutable(); |
322 | const Descriptor *Desc; |
323 | if (std::optional<PrimType> T = Ctx.classify(T: FT)) { |
324 | Desc = createDescriptor(FD, *T, std::nullopt, IsConst, |
325 | /*isTemporary=*/false, IsMutable); |
326 | } else { |
327 | Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, |
328 | /*isTemporary=*/false, IsMutable); |
329 | } |
330 | if (!Desc) |
331 | return nullptr; |
332 | Fields.push_back(Elt: {.Decl: FD, .Offset: BaseSize, .Desc: Desc}); |
333 | BaseSize += align(Size: Desc->getAllocSize()); |
334 | } |
335 | |
336 | Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), |
337 | std::move(VirtBases), VirtSize, BaseSize); |
338 | Records[RD] = R; |
339 | return R; |
340 | } |
341 | |
342 | Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, |
343 | Descriptor::MetadataSize MDSize, |
344 | bool IsConst, bool IsTemporary, |
345 | bool IsMutable, const Expr *Init) { |
346 | // Classes and structures. |
347 | if (const auto *RT = Ty->getAs<RecordType>()) { |
348 | if (const auto *Record = getOrCreateRecord(RD: RT->getDecl())) |
349 | return allocateDescriptor(Args: D, Args&: Record, Args&: MDSize, Args&: IsConst, Args&: IsTemporary, |
350 | Args&: IsMutable); |
351 | } |
352 | |
353 | // Arrays. |
354 | if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) { |
355 | QualType ElemTy = ArrayType->getElementType(); |
356 | // Array of well-known bounds. |
357 | if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { |
358 | size_t NumElems = CAT->getZExtSize(); |
359 | if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) { |
360 | // Arrays of primitives. |
361 | unsigned ElemSize = primSize(Type: *T); |
362 | if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { |
363 | return {}; |
364 | } |
365 | return allocateDescriptor(Args: D, Args&: *T, Args&: MDSize, Args&: NumElems, Args&: IsConst, Args&: IsTemporary, |
366 | Args&: IsMutable); |
367 | } else { |
368 | // Arrays of composites. In this case, the array is a list of pointers, |
369 | // followed by the actual elements. |
370 | const Descriptor *ElemDesc = createDescriptor( |
371 | D, Ty: ElemTy.getTypePtr(), MDSize, IsConst, IsTemporary); |
372 | if (!ElemDesc) |
373 | return nullptr; |
374 | unsigned ElemSize = |
375 | ElemDesc->getAllocSize() + sizeof(InlineDescriptor); |
376 | if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) |
377 | return {}; |
378 | return allocateDescriptor(Args: D, Args&: ElemDesc, Args&: MDSize, Args&: NumElems, Args&: IsConst, |
379 | Args&: IsTemporary, Args&: IsMutable); |
380 | } |
381 | } |
382 | |
383 | // Array of unknown bounds - cannot be accessed and pointer arithmetic |
384 | // is forbidden on pointers to such objects. |
385 | if (isa<IncompleteArrayType>(ArrayType)) { |
386 | if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) { |
387 | return allocateDescriptor(Args: D, Args&: *T, Args&: MDSize, Args&: IsTemporary, |
388 | Args: Descriptor::UnknownSize{}); |
389 | } else { |
390 | const Descriptor *Desc = createDescriptor(D, Ty: ElemTy.getTypePtr(), |
391 | MDSize, IsConst, IsTemporary); |
392 | if (!Desc) |
393 | return nullptr; |
394 | return allocateDescriptor(Args: D, Args&: Desc, Args&: MDSize, Args&: IsTemporary, |
395 | Args: Descriptor::UnknownSize{}); |
396 | } |
397 | } |
398 | } |
399 | |
400 | // Atomic types. |
401 | if (const auto *AT = Ty->getAs<AtomicType>()) { |
402 | const Type *InnerTy = AT->getValueType().getTypePtr(); |
403 | return createDescriptor(D, Ty: InnerTy, MDSize, IsConst, IsTemporary, |
404 | IsMutable); |
405 | } |
406 | |
407 | // Complex types - represented as arrays of elements. |
408 | if (const auto *CT = Ty->getAs<ComplexType>()) { |
409 | PrimType ElemTy = *Ctx.classify(T: CT->getElementType()); |
410 | return allocateDescriptor(Args: D, Args&: ElemTy, Args&: MDSize, Args: 2, Args&: IsConst, Args&: IsTemporary, |
411 | Args&: IsMutable); |
412 | } |
413 | |
414 | // Same with vector types. |
415 | if (const auto *VT = Ty->getAs<VectorType>()) { |
416 | PrimType ElemTy = *Ctx.classify(T: VT->getElementType()); |
417 | return allocateDescriptor(Args: D, Args&: ElemTy, Args&: MDSize, Args: VT->getNumElements(), Args&: IsConst, |
418 | Args&: IsTemporary, Args&: IsMutable); |
419 | } |
420 | |
421 | return nullptr; |
422 | } |
423 | |