1//===--- Pointer.cpp - Types for the constexpr VM ---------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Pointer.h"
10#include "Boolean.h"
11#include "Context.h"
12#include "Floating.h"
13#include "Function.h"
14#include "Integral.h"
15#include "InterpBlock.h"
16#include "MemberPointer.h"
17#include "PrimType.h"
18#include "Record.h"
19#include "clang/AST/ExprCXX.h"
20#include "clang/AST/RecordLayout.h"
21
22using namespace clang;
23using namespace clang::interp;
24
25Pointer::Pointer(Block *Pointee)
26 : Pointer(Pointee, Pointee->getDescriptor()->getMetadataSize(),
27 Pointee->getDescriptor()->getMetadataSize()) {}
28
29Pointer::Pointer(Block *Pointee, uint64_t BaseAndOffset)
30 : Pointer(Pointee, BaseAndOffset, BaseAndOffset) {}
31
32Pointer::Pointer(const Pointer &P)
33 : Offset(P.Offset), StorageKind(P.StorageKind),
34 PointeeStorage(P.PointeeStorage) {
35
36 if (isBlockPointer() && PointeeStorage.BS.Pointee)
37 PointeeStorage.BS.Pointee->addPointer(P: this);
38}
39
40Pointer::Pointer(Block *Pointee, unsigned Base, uint64_t Offset)
41 : Offset(Offset), StorageKind(Storage::Block) {
42 assert((Base == RootPtrMark || Base % alignof(void *) == 0) && "wrong base");
43
44 PointeeStorage.BS = {.Pointee: Pointee, .Base: Base};
45
46 if (Pointee)
47 Pointee->addPointer(P: this);
48}
49
50Pointer::Pointer(Pointer &&P)
51 : Offset(P.Offset), StorageKind(P.StorageKind),
52 PointeeStorage(P.PointeeStorage) {
53
54 if (StorageKind == Storage::Block && PointeeStorage.BS.Pointee)
55 PointeeStorage.BS.Pointee->replacePointer(Old: &P, New: this);
56}
57
58Pointer::~Pointer() {
59 if (!isBlockPointer())
60 return;
61
62 if (Block *Pointee = PointeeStorage.BS.Pointee) {
63 Pointee->removePointer(P: this);
64 PointeeStorage.BS.Pointee = nullptr;
65 Pointee->cleanup();
66 }
67}
68
69void Pointer::operator=(const Pointer &P) {
70 // If the current storage type is Block, we need to remove
71 // this pointer from the block.
72 if (isBlockPointer()) {
73 if (P.isBlockPointer() && this->block() == P.block()) {
74 Offset = P.Offset;
75 PointeeStorage.BS.Base = P.PointeeStorage.BS.Base;
76 return;
77 }
78
79 if (Block *Pointee = PointeeStorage.BS.Pointee) {
80 Pointee->removePointer(P: this);
81 PointeeStorage.BS.Pointee = nullptr;
82 Pointee->cleanup();
83 }
84 }
85
86 StorageKind = P.StorageKind;
87 Offset = P.Offset;
88
89 if (P.isBlockPointer()) {
90 PointeeStorage.BS = P.PointeeStorage.BS;
91 PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee;
92
93 if (PointeeStorage.BS.Pointee)
94 PointeeStorage.BS.Pointee->addPointer(P: this);
95 } else if (P.isIntegralPointer()) {
96 PointeeStorage.Int = P.PointeeStorage.Int;
97 } else if (P.isFunctionPointer()) {
98 PointeeStorage.Fn = P.PointeeStorage.Fn;
99 } else if (P.isTypeidPointer()) {
100 PointeeStorage.Typeid = P.PointeeStorage.Typeid;
101 } else {
102 assert(false && "Unhandled storage kind");
103 }
104}
105
106void Pointer::operator=(Pointer &&P) {
107 // If the current storage type is Block, we need to remove
108 // this pointer from the block.
109 if (isBlockPointer()) {
110 if (P.isBlockPointer() && this->block() == P.block()) {
111 Offset = P.Offset;
112 PointeeStorage.BS.Base = P.PointeeStorage.BS.Base;
113 return;
114 }
115
116 if (Block *Pointee = PointeeStorage.BS.Pointee) {
117 Pointee->removePointer(P: this);
118 PointeeStorage.BS.Pointee = nullptr;
119 Pointee->cleanup();
120 }
121 }
122
123 StorageKind = P.StorageKind;
124 Offset = P.Offset;
125
126 if (P.isBlockPointer()) {
127 PointeeStorage.BS = P.PointeeStorage.BS;
128 PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee;
129
130 if (PointeeStorage.BS.Pointee)
131 PointeeStorage.BS.Pointee->addPointer(P: this);
132 } else if (P.isIntegralPointer()) {
133 PointeeStorage.Int = P.PointeeStorage.Int;
134 } else if (P.isFunctionPointer()) {
135 PointeeStorage.Fn = P.PointeeStorage.Fn;
136 } else if (P.isTypeidPointer()) {
137 PointeeStorage.Typeid = P.PointeeStorage.Typeid;
138 } else {
139 assert(false && "Unhandled storage kind");
140 }
141}
142
143APValue Pointer::toAPValue(const ASTContext &ASTCtx) const {
144 llvm::SmallVector<APValue::LValuePathEntry, 5> Path;
145
146 if (isZero())
147 return APValue(static_cast<const Expr *>(nullptr), CharUnits::Zero(), Path,
148 /*IsOnePastEnd=*/false, /*IsNullPtr=*/true);
149 if (isIntegralPointer())
150 return APValue(static_cast<const Expr *>(nullptr),
151 CharUnits::fromQuantity(Quantity: asIntPointer().Value + this->Offset),
152 Path,
153 /*IsOnePastEnd=*/false, /*IsNullPtr=*/false);
154 if (isFunctionPointer()) {
155 const FunctionPointer &FP = asFunctionPointer();
156 if (const FunctionDecl *FD = FP.getFunction()->getDecl())
157 return APValue(FD, CharUnits::fromQuantity(Quantity: Offset), {},
158 /*OnePastTheEnd=*/false, /*IsNull=*/false);
159 return APValue(FP.getFunction()->getExpr(), CharUnits::fromQuantity(Quantity: Offset),
160 {},
161 /*OnePastTheEnd=*/false, /*IsNull=*/false);
162 }
163
164 if (isTypeidPointer()) {
165 TypeInfoLValue TypeInfo(PointeeStorage.Typeid.TypePtr);
166 return APValue(
167 APValue::LValueBase::getTypeInfo(
168 LV: TypeInfo, TypeInfo: QualType(PointeeStorage.Typeid.TypeInfoType, 0)),
169 CharUnits::Zero(), {},
170 /*OnePastTheEnd=*/false, /*IsNull=*/false);
171 }
172
173 // Build the lvalue base from the block.
174 const Descriptor *Desc = getDeclDesc();
175 APValue::LValueBase Base;
176 if (const auto *VD = Desc->asValueDecl())
177 Base = VD;
178 else if (const auto *E = Desc->asExpr()) {
179 if (block()->isDynamic()) {
180 QualType AllocatedType = getDeclPtr().getFieldDesc()->getDataType(Ctx: ASTCtx);
181 // FIXME: Suboptimal counting of dynamic allocations. Move this to Context
182 // or InterpState?
183 static int ReportedDynamicAllocs = 0;
184 DynamicAllocLValue DA(ReportedDynamicAllocs++);
185 Base = APValue::LValueBase::getDynamicAlloc(LV: DA, Type: AllocatedType);
186 } else {
187 Base = E;
188 }
189 } else
190 llvm_unreachable("Invalid allocation type");
191
192 if (isUnknownSizeArray())
193 return APValue(Base, CharUnits::Zero(), Path,
194 /*IsOnePastEnd=*/isOnePastEnd(), /*IsNullPtr=*/false);
195
196 CharUnits Offset = CharUnits::Zero();
197
198 auto getFieldOffset = [&](const FieldDecl *FD) -> CharUnits {
199 // This shouldn't happen, but if it does, don't crash inside
200 // getASTRecordLayout.
201 if (FD->getParent()->isInvalidDecl())
202 return CharUnits::Zero();
203 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
204 unsigned FieldIndex = FD->getFieldIndex();
205 return ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex));
206 };
207
208 bool UsePath = true;
209 if (const ValueDecl *VD = getDeclDesc()->asValueDecl();
210 VD && VD->getType()->isReferenceType())
211 UsePath = false;
212
213 // Build the path into the object.
214 bool OnePastEnd = isOnePastEnd();
215 Pointer Ptr = *this;
216 while (Ptr.isField() || Ptr.isArrayElement()) {
217
218 if (Ptr.isArrayRoot()) {
219 // An array root may still be an array element itself.
220 if (Ptr.isArrayElement()) {
221 Ptr = Ptr.expand();
222 const Descriptor *Desc = Ptr.getFieldDesc();
223 unsigned Index = Ptr.getIndex();
224 QualType ElemType = Desc->getElemQualType();
225 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
226 if (Ptr.getArray().getType()->isArrayType())
227 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
228 Ptr = Ptr.getArray();
229 } else {
230 const Descriptor *Desc = Ptr.getFieldDesc();
231 const auto *Dcl = Desc->asDecl();
232 Path.push_back(Elt: APValue::LValuePathEntry({Dcl, /*IsVirtual=*/false}));
233
234 if (const auto *FD = dyn_cast_if_present<FieldDecl>(Val: Dcl))
235 Offset += getFieldOffset(FD);
236
237 Ptr = Ptr.getBase();
238 }
239 } else if (Ptr.isArrayElement()) {
240 Ptr = Ptr.expand();
241 const Descriptor *Desc = Ptr.getFieldDesc();
242 unsigned Index;
243 if (Ptr.isOnePastEnd()) {
244 Index = Ptr.getArray().getNumElems();
245 OnePastEnd = false;
246 } else
247 Index = Ptr.getIndex();
248
249 QualType ElemType = Desc->getElemQualType();
250 if (const auto *RD = ElemType->getAsRecordDecl();
251 RD && !RD->getDefinition()) {
252 // Ignore this for the offset.
253 } else {
254 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
255 }
256 if (Ptr.getArray().getType()->isArrayType())
257 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
258 Ptr = Ptr.getArray();
259 } else {
260 const Descriptor *Desc = Ptr.getFieldDesc();
261 bool IsVirtual = false;
262
263 // Create a path entry for the field.
264 if (const auto *BaseOrMember = Desc->asDecl()) {
265 if (const auto *FD = dyn_cast<FieldDecl>(Val: BaseOrMember)) {
266 Ptr = Ptr.getBase();
267 Offset += getFieldOffset(FD);
268 } else if (const auto *RD = dyn_cast<CXXRecordDecl>(Val: BaseOrMember)) {
269 IsVirtual = Ptr.isVirtualBaseClass();
270 Ptr = Ptr.getBase();
271 const Record *BaseRecord = Ptr.getRecord();
272
273 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(
274 D: cast<CXXRecordDecl>(Val: BaseRecord->getDecl()));
275 if (IsVirtual)
276 Offset += Layout.getVBaseClassOffset(VBase: RD);
277 else
278 Offset += Layout.getBaseClassOffset(Base: RD);
279
280 } else {
281 Ptr = Ptr.getBase();
282 }
283 Path.push_back(Elt: APValue::LValuePathEntry({BaseOrMember, IsVirtual}));
284 continue;
285 }
286 llvm_unreachable("Invalid field type");
287 }
288 }
289
290 // We assemble the LValuePath starting from the innermost pointer to the
291 // outermost one. SO in a.b.c, the first element in Path will refer to
292 // the field 'c', while later code expects it to refer to 'a'.
293 // Just invert the order of the elements.
294 std::reverse(first: Path.begin(), last: Path.end());
295
296 if (UsePath)
297 return APValue(Base, Offset, Path, OnePastEnd);
298
299 return APValue(Base, Offset, APValue::NoLValuePath());
300}
301
302void Pointer::print(llvm::raw_ostream &OS) const {
303 switch (StorageKind) {
304 case Storage::Block: {
305 const Block *B = PointeeStorage.BS.Pointee;
306 OS << "(Block) " << B << " {";
307
308 if (isRoot())
309 OS << "rootptr(" << PointeeStorage.BS.Base << "), ";
310 else
311 OS << PointeeStorage.BS.Base << ", ";
312
313 if (isElementPastEnd())
314 OS << "pastend, ";
315 else
316 OS << Offset << ", ";
317
318 if (B)
319 OS << B->getSize();
320 else
321 OS << "nullptr";
322 OS << "}";
323 } break;
324 case Storage::Int:
325 OS << "(Int) {";
326 OS << PointeeStorage.Int.Value << " + " << Offset << ", "
327 << PointeeStorage.Int.Desc;
328 OS << "}";
329 break;
330 case Storage::Fn:
331 OS << "(Fn) { " << asFunctionPointer().getFunction() << " + " << Offset
332 << " }";
333 break;
334 case Storage::Typeid:
335 OS << "(Typeid) { " << (const void *)asTypeidPointer().TypePtr << ", "
336 << (const void *)asTypeidPointer().TypeInfoType << " + " << Offset
337 << "}";
338 }
339}
340
341size_t Pointer::computeOffsetForComparison() const {
342 if (isIntegralPointer())
343 return asIntPointer().Value + Offset;
344 if (isTypeidPointer())
345 return reinterpret_cast<uintptr_t>(asTypeidPointer().TypePtr) + Offset;
346
347 if (!isBlockPointer())
348 return Offset;
349
350 size_t Result = 0;
351 Pointer P = *this;
352 while (true) {
353
354 if (P.isVirtualBaseClass()) {
355 Result += getInlineDesc()->Offset;
356 P = P.getBase();
357 continue;
358 }
359
360 if (P.isBaseClass()) {
361 if (P.getRecord()->getNumVirtualBases() > 0)
362 Result += P.getInlineDesc()->Offset;
363 P = P.getBase();
364 continue;
365 }
366 if (P.isArrayElement()) {
367 P = P.expand();
368 Result += (P.getIndex() * P.elemSize());
369 P = P.getArray();
370 continue;
371 }
372
373 if (P.isRoot()) {
374 if (P.isOnePastEnd())
375 ++Result;
376 break;
377 }
378
379 if (const Record *R = P.getBase().getRecord(); R && R->isUnion()) {
380 // Direct child of a union - all have offset 0.
381 P = P.getBase();
382 continue;
383 }
384
385 // Fields, etc.
386 Result += P.getInlineDesc()->Offset;
387 if (P.isOnePastEnd())
388 ++Result;
389
390 P = P.getBase();
391 if (P.isRoot())
392 break;
393 }
394
395 return Result;
396}
397
398std::string Pointer::toDiagnosticString(const ASTContext &Ctx) const {
399 if (isZero())
400 return "nullptr";
401
402 if (isIntegralPointer())
403 return (Twine("&(") + Twine(asIntPointer().Value + Offset) + ")").str();
404
405 if (isFunctionPointer())
406 return asFunctionPointer().toDiagnosticString(Ctx);
407
408 return toAPValue(ASTCtx: Ctx).getAsString(Ctx, Ty: getType());
409}
410
411bool Pointer::isInitialized() const {
412 if (!isBlockPointer())
413 return true;
414
415 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) {
416 const GlobalInlineDescriptor &GD =
417 *reinterpret_cast<const GlobalInlineDescriptor *>(block()->rawData());
418 return GD.InitState == GlobalInitState::Initialized;
419 }
420
421 assert(PointeeStorage.BS.Pointee &&
422 "Cannot check if null pointer was initialized");
423 const Descriptor *Desc = getFieldDesc();
424 assert(Desc);
425 if (Desc->isPrimitiveArray()) {
426 if (isStatic() && PointeeStorage.BS.Base == 0)
427 return true;
428
429 InitMapPtr &IM = getInitMap();
430
431 if (!IM)
432 return false;
433
434 if (IM->first)
435 return true;
436
437 return IM->second->isElementInitialized(I: getIndex());
438 }
439
440 if (asBlockPointer().Base == 0)
441 return true;
442
443 // Field has its bit in an inline descriptor.
444 return getInlineDesc()->IsInitialized;
445}
446
447void Pointer::initialize() const {
448 if (!isBlockPointer())
449 return;
450
451 assert(PointeeStorage.BS.Pointee && "Cannot initialize null pointer");
452
453 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) {
454 GlobalInlineDescriptor &GD = *reinterpret_cast<GlobalInlineDescriptor *>(
455 asBlockPointer().Pointee->rawData());
456 GD.InitState = GlobalInitState::Initialized;
457 return;
458 }
459
460 const Descriptor *Desc = getFieldDesc();
461 assert(Desc);
462 if (Desc->isPrimitiveArray()) {
463 // Primitive global arrays don't have an initmap.
464 if (isStatic() && PointeeStorage.BS.Base == 0)
465 return;
466
467 // Nothing to do for these.
468 if (Desc->getNumElems() == 0)
469 return;
470
471 InitMapPtr &IM = getInitMap();
472 if (!IM)
473 IM =
474 std::make_pair(x: false, y: std::make_shared<InitMap>(args: Desc->getNumElems()));
475
476 assert(IM);
477
478 // All initialized.
479 if (IM->first)
480 return;
481
482 if (IM->second->initializeElement(I: getIndex())) {
483 IM->first = true;
484 IM->second.reset();
485 }
486 return;
487 }
488
489 // Field has its bit in an inline descriptor.
490 assert(PointeeStorage.BS.Base != 0 &&
491 "Only composite fields can be initialised");
492 getInlineDesc()->IsInitialized = true;
493}
494
495void Pointer::activate() const {
496 // Field has its bit in an inline descriptor.
497 assert(PointeeStorage.BS.Base != 0 &&
498 "Only composite fields can be activated");
499
500 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor))
501 return;
502 if (!getInlineDesc()->InUnion)
503 return;
504
505 auto activate = [](Pointer &P) -> void {
506 P.getInlineDesc()->IsActive = true;
507 };
508
509 std::function<void(Pointer &)> deactivate;
510 deactivate = [&deactivate](Pointer &P) -> void {
511 P.getInlineDesc()->IsActive = false;
512
513 if (const Record *R = P.getRecord()) {
514 for (const Record::Field &F : R->fields()) {
515 Pointer FieldPtr = P.atField(Off: F.Offset);
516 if (FieldPtr.getInlineDesc()->IsActive)
517 deactivate(FieldPtr);
518 }
519 // FIXME: Bases?
520 }
521 };
522
523 Pointer B = *this;
524 while (!B.isRoot() && B.inUnion()) {
525 activate(B);
526
527 // When walking up the pointer chain, deactivate
528 // all union child pointers that aren't on our path.
529 Pointer Cur = B;
530 B = B.getBase();
531 if (const Record *BR = B.getRecord(); BR && BR->isUnion()) {
532 for (const Record::Field &F : BR->fields()) {
533 Pointer FieldPtr = B.atField(Off: F.Offset);
534 if (FieldPtr != Cur)
535 deactivate(FieldPtr);
536 }
537 }
538 }
539}
540
541void Pointer::deactivate() const {
542 // TODO: this only appears in constructors, so nothing to deactivate.
543}
544
545bool Pointer::hasSameBase(const Pointer &A, const Pointer &B) {
546 // Two null pointers always have the same base.
547 if (A.isZero() && B.isZero())
548 return true;
549
550 if (A.isIntegralPointer() && B.isIntegralPointer())
551 return true;
552 if (A.isFunctionPointer() && B.isFunctionPointer())
553 return true;
554 if (A.isTypeidPointer() && B.isTypeidPointer())
555 return true;
556
557 if (A.isIntegralPointer() || B.isIntegralPointer())
558 return A.getSource() == B.getSource();
559
560 if (A.StorageKind != B.StorageKind)
561 return false;
562
563 return A.asBlockPointer().Pointee == B.asBlockPointer().Pointee;
564}
565
566bool Pointer::pointToSameBlock(const Pointer &A, const Pointer &B) {
567 if (!A.isBlockPointer() || !B.isBlockPointer())
568 return false;
569 return A.block() == B.block();
570}
571
572bool Pointer::hasSameArray(const Pointer &A, const Pointer &B) {
573 return hasSameBase(A, B) &&
574 A.PointeeStorage.BS.Base == B.PointeeStorage.BS.Base &&
575 A.getFieldDesc()->IsArray;
576}
577
578bool Pointer::pointsToLiteral() const {
579 if (isZero() || !isBlockPointer())
580 return false;
581
582 if (block()->isDynamic())
583 return false;
584
585 const Expr *E = block()->getDescriptor()->asExpr();
586 return E && !isa<MaterializeTemporaryExpr, StringLiteral>(Val: E);
587}
588
589bool Pointer::pointsToStringLiteral() const {
590 if (isZero() || !isBlockPointer())
591 return false;
592
593 if (block()->isDynamic())
594 return false;
595
596 const Expr *E = block()->getDescriptor()->asExpr();
597 return E && isa<StringLiteral>(Val: E);
598}
599
600std::optional<std::pair<Pointer, Pointer>>
601Pointer::computeSplitPoint(const Pointer &A, const Pointer &B) {
602 if (!A.isBlockPointer() || !B.isBlockPointer())
603 return std::nullopt;
604
605 if (A.asBlockPointer().Pointee != B.asBlockPointer().Pointee)
606 return std::nullopt;
607 if (A.isRoot() && B.isRoot())
608 return std::nullopt;
609
610 if (A == B)
611 return std::make_pair(x: A, y: B);
612
613 auto getBase = [](const Pointer &P) -> Pointer {
614 if (P.isArrayElement())
615 return P.expand().getArray();
616 return P.getBase();
617 };
618
619 Pointer IterA = A;
620 Pointer IterB = B;
621 Pointer CurA = IterA;
622 Pointer CurB = IterB;
623 for (;;) {
624 if (IterA.asBlockPointer().Base > IterB.asBlockPointer().Base) {
625 CurA = IterA;
626 IterA = getBase(IterA);
627 } else {
628 CurB = IterB;
629 IterB = getBase(IterB);
630 }
631
632 if (IterA == IterB)
633 return std::make_pair(x&: CurA, y&: CurB);
634
635 if (IterA.isRoot() && IterB.isRoot())
636 return std::nullopt;
637 }
638
639 llvm_unreachable("The loop above should've returned.");
640}
641
642std::optional<APValue> Pointer::toRValue(const Context &Ctx,
643 QualType ResultType) const {
644 const ASTContext &ASTCtx = Ctx.getASTContext();
645 assert(!ResultType.isNull());
646 // Method to recursively traverse composites.
647 std::function<bool(QualType, const Pointer &, APValue &)> Composite;
648 Composite = [&Composite, &Ctx, &ASTCtx](QualType Ty, const Pointer &Ptr,
649 APValue &R) {
650 if (const auto *AT = Ty->getAs<AtomicType>())
651 Ty = AT->getValueType();
652
653 // Invalid pointers.
654 if (Ptr.isDummy() || !Ptr.isLive() || !Ptr.isBlockPointer() ||
655 Ptr.isPastEnd())
656 return false;
657
658 // Primitive values.
659 if (std::optional<PrimType> T = Ctx.classify(T: Ty)) {
660 TYPE_SWITCH(*T, R = Ptr.deref<T>().toAPValue(ASTCtx));
661 return true;
662 }
663
664 if (const auto *RT = Ty->getAs<RecordType>()) {
665 const auto *Record = Ptr.getRecord();
666 assert(Record && "Missing record descriptor");
667
668 bool Ok = true;
669 if (RT->getDecl()->isUnion()) {
670 const FieldDecl *ActiveField = nullptr;
671 APValue Value;
672 for (const auto &F : Record->fields()) {
673 const Pointer &FP = Ptr.atField(Off: F.Offset);
674 QualType FieldTy = F.Decl->getType();
675 if (FP.isActive()) {
676 if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) {
677 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
678 } else {
679 Ok &= Composite(FieldTy, FP, Value);
680 }
681 ActiveField = FP.getFieldDesc()->asFieldDecl();
682 break;
683 }
684 }
685 R = APValue(ActiveField, Value);
686 } else {
687 unsigned NF = Record->getNumFields();
688 unsigned NB = Record->getNumBases();
689 unsigned NV = Ptr.isBaseClass() ? 0 : Record->getNumVirtualBases();
690
691 R = APValue(APValue::UninitStruct(), NB, NF);
692
693 for (unsigned I = 0; I < NF; ++I) {
694 const Record::Field *FD = Record->getField(I);
695 QualType FieldTy = FD->Decl->getType();
696 const Pointer &FP = Ptr.atField(Off: FD->Offset);
697 APValue &Value = R.getStructField(i: I);
698
699 if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) {
700 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
701 } else {
702 Ok &= Composite(FieldTy, FP, Value);
703 }
704 }
705
706 for (unsigned I = 0; I < NB; ++I) {
707 const Record::Base *BD = Record->getBase(I);
708 QualType BaseTy = Ctx.getASTContext().getRecordType(Decl: BD->Decl);
709 const Pointer &BP = Ptr.atField(Off: BD->Offset);
710 Ok &= Composite(BaseTy, BP, R.getStructBase(i: I));
711 }
712
713 for (unsigned I = 0; I < NV; ++I) {
714 const Record::Base *VD = Record->getVirtualBase(I);
715 QualType VirtBaseTy = Ctx.getASTContext().getRecordType(Decl: VD->Decl);
716 const Pointer &VP = Ptr.atField(Off: VD->Offset);
717 Ok &= Composite(VirtBaseTy, VP, R.getStructBase(i: NB + I));
718 }
719 }
720 return Ok;
721 }
722
723 if (Ty->isIncompleteArrayType()) {
724 R = APValue(APValue::UninitArray(), 0, 0);
725 return true;
726 }
727
728 if (const auto *AT = Ty->getAsArrayTypeUnsafe()) {
729 const size_t NumElems = Ptr.getNumElems();
730 QualType ElemTy = AT->getElementType();
731 R = APValue(APValue::UninitArray{}, NumElems, NumElems);
732
733 bool Ok = true;
734 for (unsigned I = 0; I < NumElems; ++I) {
735 APValue &Slot = R.getArrayInitializedElt(I);
736 const Pointer &EP = Ptr.atIndex(Idx: I);
737 if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) {
738 TYPE_SWITCH(*T, Slot = EP.deref<T>().toAPValue(ASTCtx));
739 } else {
740 Ok &= Composite(ElemTy, EP.narrow(), Slot);
741 }
742 }
743 return Ok;
744 }
745
746 // Complex types.
747 if (const auto *CT = Ty->getAs<ComplexType>()) {
748 QualType ElemTy = CT->getElementType();
749
750 if (ElemTy->isIntegerType()) {
751 std::optional<PrimType> ElemT = Ctx.classify(T: ElemTy);
752 assert(ElemT);
753 INT_TYPE_SWITCH(*ElemT, {
754 auto V1 = Ptr.atIndex(0).deref<T>();
755 auto V2 = Ptr.atIndex(1).deref<T>();
756 R = APValue(V1.toAPSInt(), V2.toAPSInt());
757 return true;
758 });
759 } else if (ElemTy->isFloatingType()) {
760 R = APValue(Ptr.atIndex(Idx: 0).deref<Floating>().getAPFloat(),
761 Ptr.atIndex(Idx: 1).deref<Floating>().getAPFloat());
762 return true;
763 }
764 return false;
765 }
766
767 // Vector types.
768 if (const auto *VT = Ty->getAs<VectorType>()) {
769 assert(Ptr.getFieldDesc()->isPrimitiveArray());
770 QualType ElemTy = VT->getElementType();
771 PrimType ElemT = *Ctx.classify(T: ElemTy);
772
773 SmallVector<APValue> Values;
774 Values.reserve(N: VT->getNumElements());
775 for (unsigned I = 0; I != VT->getNumElements(); ++I) {
776 TYPE_SWITCH(ElemT, {
777 Values.push_back(Ptr.atIndex(I).deref<T>().toAPValue(ASTCtx));
778 });
779 }
780
781 assert(Values.size() == VT->getNumElements());
782 R = APValue(Values.data(), Values.size());
783 return true;
784 }
785
786 llvm_unreachable("invalid value to return");
787 };
788
789 // Invalid to read from.
790 if (isDummy() || !isLive() || isPastEnd())
791 return std::nullopt;
792
793 // We can return these as rvalues, but we can't deref() them.
794 if (isZero() || isIntegralPointer())
795 return toAPValue(ASTCtx);
796
797 // Just load primitive types.
798 if (std::optional<PrimType> T = Ctx.classify(T: ResultType)) {
799 TYPE_SWITCH(*T, return this->deref<T>().toAPValue(ASTCtx));
800 }
801
802 // Return the composite type.
803 APValue Result;
804 if (!Composite(ResultType, *this, Result))
805 return std::nullopt;
806 return Result;
807}
808
809IntPointer IntPointer::atOffset(const ASTContext &ASTCtx,
810 unsigned Offset) const {
811 if (!this->Desc)
812 return *this;
813 const Record *R = this->Desc->ElemRecord;
814 if (!R)
815 return *this;
816
817 const Record::Field *F = nullptr;
818 for (auto &It : R->fields()) {
819 if (It.Offset == Offset) {
820 F = &It;
821 break;
822 }
823 }
824 if (!F)
825 return *this;
826
827 const FieldDecl *FD = F->Decl;
828 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
829 unsigned FieldIndex = FD->getFieldIndex();
830 uint64_t FieldOffset =
831 ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex))
832 .getQuantity();
833 return IntPointer{.Desc: F->Desc, .Value: this->Value + FieldOffset};
834}
835
836IntPointer IntPointer::baseCast(const ASTContext &ASTCtx,
837 unsigned BaseOffset) const {
838 if (!Desc) {
839 assert(Value == 0);
840 return *this;
841 }
842 const Record *R = Desc->ElemRecord;
843 const Descriptor *BaseDesc = nullptr;
844
845 // This iterates over bases and checks for the proper offset. That's
846 // potentially slow but this case really shouldn't happen a lot.
847 for (const Record::Base &B : R->bases()) {
848 if (B.Offset == BaseOffset) {
849 BaseDesc = B.Desc;
850 break;
851 }
852 }
853 assert(BaseDesc);
854
855 // Adjust the offset value based on the information from the record layout.
856 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: R->getDecl());
857 CharUnits BaseLayoutOffset =
858 Layout.getBaseClassOffset(Base: cast<CXXRecordDecl>(Val: BaseDesc->asDecl()));
859
860 return {.Desc: BaseDesc, .Value: Value + BaseLayoutOffset.getQuantity()};
861}
862

source code of clang/lib/AST/ByteCode/Pointer.cpp