1//===--- Pointer.cpp - Types for the constexpr VM ---------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Pointer.h"
10#include "Boolean.h"
11#include "Context.h"
12#include "Floating.h"
13#include "Function.h"
14#include "Integral.h"
15#include "InterpBlock.h"
16#include "MemberPointer.h"
17#include "PrimType.h"
18#include "Record.h"
19#include "clang/AST/ExprCXX.h"
20#include "clang/AST/RecordLayout.h"
21
22using namespace clang;
23using namespace clang::interp;
24
25Pointer::Pointer(Block *Pointee)
26 : Pointer(Pointee, Pointee->getDescriptor()->getMetadataSize(),
27 Pointee->getDescriptor()->getMetadataSize()) {}
28
29Pointer::Pointer(Block *Pointee, uint64_t BaseAndOffset)
30 : Pointer(Pointee, BaseAndOffset, BaseAndOffset) {}
31
32Pointer::Pointer(const Pointer &P)
33 : Offset(P.Offset), PointeeStorage(P.PointeeStorage),
34 StorageKind(P.StorageKind) {
35
36 if (isBlockPointer() && PointeeStorage.BS.Pointee)
37 PointeeStorage.BS.Pointee->addPointer(P: this);
38}
39
40Pointer::Pointer(Block *Pointee, unsigned Base, uint64_t Offset)
41 : Offset(Offset), StorageKind(Storage::Block) {
42 assert((Base == RootPtrMark || Base % alignof(void *) == 0) && "wrong base");
43
44 PointeeStorage.BS = {.Pointee: Pointee, .Base: Base};
45
46 if (Pointee)
47 Pointee->addPointer(P: this);
48}
49
50Pointer::Pointer(Pointer &&P)
51 : Offset(P.Offset), PointeeStorage(P.PointeeStorage),
52 StorageKind(P.StorageKind) {
53
54 if (StorageKind == Storage::Block && PointeeStorage.BS.Pointee)
55 PointeeStorage.BS.Pointee->replacePointer(Old: &P, New: this);
56}
57
58Pointer::~Pointer() {
59 if (!isBlockPointer())
60 return;
61
62 if (Block *Pointee = PointeeStorage.BS.Pointee) {
63 Pointee->removePointer(P: this);
64 PointeeStorage.BS.Pointee = nullptr;
65 Pointee->cleanup();
66 }
67}
68
69void Pointer::operator=(const Pointer &P) {
70 // If the current storage type is Block, we need to remove
71 // this pointer from the block.
72 if (isBlockPointer()) {
73 if (P.isBlockPointer() && this->block() == P.block()) {
74 Offset = P.Offset;
75 PointeeStorage.BS.Base = P.PointeeStorage.BS.Base;
76 return;
77 }
78
79 if (Block *Pointee = PointeeStorage.BS.Pointee) {
80 Pointee->removePointer(P: this);
81 PointeeStorage.BS.Pointee = nullptr;
82 Pointee->cleanup();
83 }
84 }
85
86 StorageKind = P.StorageKind;
87 Offset = P.Offset;
88
89 if (P.isBlockPointer()) {
90 PointeeStorage.BS = P.PointeeStorage.BS;
91 PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee;
92
93 if (PointeeStorage.BS.Pointee)
94 PointeeStorage.BS.Pointee->addPointer(P: this);
95 } else if (P.isIntegralPointer()) {
96 PointeeStorage.Int = P.PointeeStorage.Int;
97 } else if (P.isFunctionPointer()) {
98 PointeeStorage.Fn = P.PointeeStorage.Fn;
99 } else if (P.isTypeidPointer()) {
100 PointeeStorage.Typeid = P.PointeeStorage.Typeid;
101 } else {
102 assert(false && "Unhandled storage kind");
103 }
104}
105
106void Pointer::operator=(Pointer &&P) {
107 // If the current storage type is Block, we need to remove
108 // this pointer from the block.
109 if (isBlockPointer()) {
110 if (P.isBlockPointer() && this->block() == P.block()) {
111 Offset = P.Offset;
112 PointeeStorage.BS.Base = P.PointeeStorage.BS.Base;
113 return;
114 }
115
116 if (Block *Pointee = PointeeStorage.BS.Pointee) {
117 assert(P.block() != this->block());
118 Pointee->removePointer(P: this);
119 PointeeStorage.BS.Pointee = nullptr;
120 Pointee->cleanup();
121 }
122 }
123
124 StorageKind = P.StorageKind;
125 Offset = P.Offset;
126
127 if (P.isBlockPointer()) {
128 PointeeStorage.BS = P.PointeeStorage.BS;
129 PointeeStorage.BS.Pointee = P.PointeeStorage.BS.Pointee;
130
131 if (PointeeStorage.BS.Pointee)
132 PointeeStorage.BS.Pointee->addPointer(P: this);
133 } else if (P.isIntegralPointer()) {
134 PointeeStorage.Int = P.PointeeStorage.Int;
135 } else if (P.isFunctionPointer()) {
136 PointeeStorage.Fn = P.PointeeStorage.Fn;
137 } else if (P.isTypeidPointer()) {
138 PointeeStorage.Typeid = P.PointeeStorage.Typeid;
139 } else {
140 assert(false && "Unhandled storage kind");
141 }
142}
143
144APValue Pointer::toAPValue(const ASTContext &ASTCtx) const {
145 llvm::SmallVector<APValue::LValuePathEntry, 5> Path;
146
147 if (isZero())
148 return APValue(static_cast<const Expr *>(nullptr), CharUnits::Zero(), Path,
149 /*IsOnePastEnd=*/false, /*IsNullPtr=*/true);
150 if (isIntegralPointer())
151 return APValue(static_cast<const Expr *>(nullptr),
152 CharUnits::fromQuantity(Quantity: asIntPointer().Value + this->Offset),
153 Path,
154 /*IsOnePastEnd=*/false, /*IsNullPtr=*/false);
155 if (isFunctionPointer()) {
156 const FunctionPointer &FP = asFunctionPointer();
157 if (const FunctionDecl *FD = FP.getFunction()->getDecl())
158 return APValue(FD, CharUnits::fromQuantity(Quantity: Offset), {},
159 /*OnePastTheEnd=*/false, /*IsNull=*/false);
160 return APValue(FP.getFunction()->getExpr(), CharUnits::fromQuantity(Quantity: Offset),
161 {},
162 /*OnePastTheEnd=*/false, /*IsNull=*/false);
163 }
164
165 if (isTypeidPointer()) {
166 TypeInfoLValue TypeInfo(PointeeStorage.Typeid.TypePtr);
167 return APValue(
168 APValue::LValueBase::getTypeInfo(
169 LV: TypeInfo, TypeInfo: QualType(PointeeStorage.Typeid.TypeInfoType, 0)),
170 CharUnits::Zero(), {},
171 /*OnePastTheEnd=*/false, /*IsNull=*/false);
172 }
173
174 // Build the lvalue base from the block.
175 const Descriptor *Desc = getDeclDesc();
176 APValue::LValueBase Base;
177 if (const auto *VD = Desc->asValueDecl())
178 Base = VD;
179 else if (const auto *E = Desc->asExpr()) {
180 // Create a DynamicAlloc base of the right type.
181 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
182 QualType AllocatedType;
183 if (NewExpr->isArray()) {
184 assert(Desc->isArray());
185 APInt ArraySize(64, static_cast<uint64_t>(Desc->getNumElems()),
186 /*IsSigned=*/false);
187 AllocatedType =
188 ASTCtx.getConstantArrayType(EltTy: NewExpr->getAllocatedType(), ArySize: ArraySize,
189 SizeExpr: nullptr, ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
190 } else {
191 AllocatedType = NewExpr->getAllocatedType();
192 }
193 // FIXME: Suboptimal counting of dynamic allocations. Move this to Context
194 // or InterpState?
195 static int ReportedDynamicAllocs = 0;
196 DynamicAllocLValue DA(ReportedDynamicAllocs++);
197 Base = APValue::LValueBase::getDynamicAlloc(LV: DA, Type: AllocatedType);
198 } else {
199 Base = E;
200 }
201 } else
202 llvm_unreachable("Invalid allocation type");
203
204 if (isUnknownSizeArray())
205 return APValue(Base, CharUnits::Zero(), Path,
206 /*IsOnePastEnd=*/isOnePastEnd(), /*IsNullPtr=*/false);
207
208 CharUnits Offset = CharUnits::Zero();
209
210 auto getFieldOffset = [&](const FieldDecl *FD) -> CharUnits {
211 // This shouldn't happen, but if it does, don't crash inside
212 // getASTRecordLayout.
213 if (FD->getParent()->isInvalidDecl())
214 return CharUnits::Zero();
215 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
216 unsigned FieldIndex = FD->getFieldIndex();
217 return ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex));
218 };
219
220 bool UsePath = true;
221 if (const ValueDecl *VD = getDeclDesc()->asValueDecl();
222 VD && VD->getType()->isLValueReferenceType())
223 UsePath = false;
224
225 // Build the path into the object.
226 bool OnePastEnd = isOnePastEnd();
227 Pointer Ptr = *this;
228 while (Ptr.isField() || Ptr.isArrayElement()) {
229
230 if (Ptr.isArrayRoot()) {
231 // An array root may still be an array element itself.
232 if (Ptr.isArrayElement()) {
233 Ptr = Ptr.expand();
234 const Descriptor *Desc = Ptr.getFieldDesc();
235 unsigned Index = Ptr.getIndex();
236 QualType ElemType = Desc->getElemQualType();
237 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
238 if (Ptr.getArray().getType()->isArrayType())
239 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
240 Ptr = Ptr.getArray();
241 } else {
242 const Descriptor *Desc = Ptr.getFieldDesc();
243 const auto *Dcl = Desc->asDecl();
244 Path.push_back(Elt: APValue::LValuePathEntry({Dcl, /*IsVirtual=*/false}));
245
246 if (const auto *FD = dyn_cast_if_present<FieldDecl>(Val: Dcl))
247 Offset += getFieldOffset(FD);
248
249 Ptr = Ptr.getBase();
250 }
251 } else if (Ptr.isArrayElement()) {
252 Ptr = Ptr.expand();
253 const Descriptor *Desc = Ptr.getFieldDesc();
254 unsigned Index;
255 if (Ptr.isOnePastEnd()) {
256 Index = Ptr.getArray().getNumElems();
257 OnePastEnd = false;
258 } else
259 Index = Ptr.getIndex();
260
261 QualType ElemType = Desc->getElemQualType();
262 if (const auto *RD = ElemType->getAsRecordDecl();
263 RD && !RD->getDefinition()) {
264 // Ignore this for the offset.
265 } else {
266 Offset += (Index * ASTCtx.getTypeSizeInChars(T: ElemType));
267 }
268 if (Ptr.getArray().getType()->isArrayType())
269 Path.push_back(Elt: APValue::LValuePathEntry::ArrayIndex(Index));
270 Ptr = Ptr.getArray();
271 } else {
272 const Descriptor *Desc = Ptr.getFieldDesc();
273 bool IsVirtual = false;
274
275 // Create a path entry for the field.
276 if (const auto *BaseOrMember = Desc->asDecl()) {
277 if (const auto *FD = dyn_cast<FieldDecl>(Val: BaseOrMember)) {
278 Ptr = Ptr.getBase();
279 Offset += getFieldOffset(FD);
280 } else if (const auto *RD = dyn_cast<CXXRecordDecl>(Val: BaseOrMember)) {
281 IsVirtual = Ptr.isVirtualBaseClass();
282 Ptr = Ptr.getBase();
283 const Record *BaseRecord = Ptr.getRecord();
284
285 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(
286 cast<CXXRecordDecl>(Val: BaseRecord->getDecl()));
287 if (IsVirtual)
288 Offset += Layout.getVBaseClassOffset(VBase: RD);
289 else
290 Offset += Layout.getBaseClassOffset(Base: RD);
291
292 } else {
293 Ptr = Ptr.getBase();
294 }
295 Path.push_back(Elt: APValue::LValuePathEntry({BaseOrMember, IsVirtual}));
296 continue;
297 }
298 llvm_unreachable("Invalid field type");
299 }
300 }
301
302 // We assemble the LValuePath starting from the innermost pointer to the
303 // outermost one. SO in a.b.c, the first element in Path will refer to
304 // the field 'c', while later code expects it to refer to 'a'.
305 // Just invert the order of the elements.
306 std::reverse(first: Path.begin(), last: Path.end());
307
308 if (UsePath)
309 return APValue(Base, Offset, Path, OnePastEnd);
310
311 return APValue(Base, Offset, APValue::NoLValuePath());
312}
313
314void Pointer::print(llvm::raw_ostream &OS) const {
315 switch (StorageKind) {
316 case Storage::Block: {
317 const Block *B = PointeeStorage.BS.Pointee;
318 OS << "(Block) " << B << " {";
319
320 if (isRoot())
321 OS << "rootptr(" << PointeeStorage.BS.Base << "), ";
322 else
323 OS << PointeeStorage.BS.Base << ", ";
324
325 if (isElementPastEnd())
326 OS << "pastend, ";
327 else
328 OS << Offset << ", ";
329
330 if (B)
331 OS << B->getSize();
332 else
333 OS << "nullptr";
334 OS << "}";
335 } break;
336 case Storage::Int:
337 OS << "(Int) {";
338 OS << PointeeStorage.Int.Value << " + " << Offset << ", "
339 << PointeeStorage.Int.Desc;
340 OS << "}";
341 break;
342 case Storage::Fn:
343 OS << "(Fn) { " << asFunctionPointer().getFunction() << " + " << Offset
344 << " }";
345 break;
346 case Storage::Typeid:
347 OS << "(Typeid) { " << (const void *)asTypeidPointer().TypePtr << ", "
348 << (const void *)asTypeidPointer().TypeInfoType << " + " << Offset
349 << "}";
350 }
351}
352
353/// Compute an integer that can be used to compare this pointer to
354/// another one.
355size_t Pointer::computeOffsetForComparison() const {
356 if (!isBlockPointer())
357 return Offset;
358
359 size_t Result = 0;
360 Pointer P = *this;
361 while (!P.isRoot()) {
362 if (P.isArrayRoot()) {
363 P = P.getBase();
364 continue;
365 }
366 if (P.isArrayElement()) {
367 P = P.expand();
368 Result += (P.getIndex() * P.elemSize());
369 P = P.getArray();
370 continue;
371 }
372
373 if (const Record *R = P.getBase().getRecord(); R && R->isUnion()) {
374 // Direct child of a union - all have offset 0.
375 P = P.getBase();
376 continue;
377 }
378
379 Result += P.getInlineDesc()->Offset;
380 P = P.getBase();
381 }
382
383 return Result;
384}
385
386std::string Pointer::toDiagnosticString(const ASTContext &Ctx) const {
387 if (isZero())
388 return "nullptr";
389
390 if (isIntegralPointer())
391 return (Twine("&(") + Twine(asIntPointer().Value + Offset) + ")").str();
392
393 if (isFunctionPointer())
394 return asFunctionPointer().toDiagnosticString(Ctx);
395
396 return toAPValue(ASTCtx: Ctx).getAsString(Ctx, Ty: getType());
397}
398
399bool Pointer::isInitialized() const {
400 if (!isBlockPointer())
401 return true;
402
403 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) {
404 const GlobalInlineDescriptor &GD =
405 *reinterpret_cast<const GlobalInlineDescriptor *>(block()->rawData());
406 return GD.InitState == GlobalInitState::Initialized;
407 }
408
409 assert(PointeeStorage.BS.Pointee &&
410 "Cannot check if null pointer was initialized");
411 const Descriptor *Desc = getFieldDesc();
412 assert(Desc);
413 if (Desc->isPrimitiveArray()) {
414 if (isStatic() && PointeeStorage.BS.Base == 0)
415 return true;
416
417 InitMapPtr &IM = getInitMap();
418
419 if (!IM)
420 return false;
421
422 if (IM->first)
423 return true;
424
425 return IM->second->isElementInitialized(I: getIndex());
426 }
427
428 if (asBlockPointer().Base == 0)
429 return true;
430
431 // Field has its bit in an inline descriptor.
432 return getInlineDesc()->IsInitialized;
433}
434
435void Pointer::initialize() const {
436 if (!isBlockPointer())
437 return;
438
439 assert(PointeeStorage.BS.Pointee && "Cannot initialize null pointer");
440
441 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor)) {
442 GlobalInlineDescriptor &GD = *reinterpret_cast<GlobalInlineDescriptor *>(
443 asBlockPointer().Pointee->rawData());
444 GD.InitState = GlobalInitState::Initialized;
445 return;
446 }
447
448 const Descriptor *Desc = getFieldDesc();
449 assert(Desc);
450 if (Desc->isPrimitiveArray()) {
451 // Primitive global arrays don't have an initmap.
452 if (isStatic() && PointeeStorage.BS.Base == 0)
453 return;
454
455 // Nothing to do for these.
456 if (Desc->getNumElems() == 0)
457 return;
458
459 InitMapPtr &IM = getInitMap();
460 if (!IM)
461 IM =
462 std::make_pair(x: false, y: std::make_shared<InitMap>(args: Desc->getNumElems()));
463
464 assert(IM);
465
466 // All initialized.
467 if (IM->first)
468 return;
469
470 if (IM->second->initializeElement(I: getIndex())) {
471 IM->first = true;
472 IM->second.reset();
473 }
474 return;
475 }
476
477 // Field has its bit in an inline descriptor.
478 assert(PointeeStorage.BS.Base != 0 &&
479 "Only composite fields can be initialised");
480 getInlineDesc()->IsInitialized = true;
481}
482
483void Pointer::activate() const {
484 // Field has its bit in an inline descriptor.
485 assert(PointeeStorage.BS.Base != 0 &&
486 "Only composite fields can be activated");
487
488 if (isRoot() && PointeeStorage.BS.Base == sizeof(GlobalInlineDescriptor))
489 return;
490 if (!getInlineDesc()->InUnion)
491 return;
492
493 auto activate = [](Pointer &P) -> void {
494 P.getInlineDesc()->IsActive = true;
495 };
496 auto deactivate = [](Pointer &P) -> void {
497 P.getInlineDesc()->IsActive = false;
498 };
499
500 // Unions might be nested etc., so find the topmost Pointer that's
501 // not in a union anymore.
502 Pointer UnionPtr = getBase();
503 while (!UnionPtr.isRoot() && UnionPtr.inUnion())
504 UnionPtr = UnionPtr.getBase();
505
506 assert(UnionPtr.getFieldDesc()->isUnion());
507
508 const Record *UnionRecord = UnionPtr.getRecord();
509 for (const Record::Field &F : UnionRecord->fields()) {
510 Pointer FieldPtr = UnionPtr.atField(Off: F.Offset);
511 if (FieldPtr == *this) {
512 } else {
513 deactivate(FieldPtr);
514 // FIXME: Recurse.
515 }
516 }
517
518 Pointer B = *this;
519 while (B != UnionPtr) {
520 activate(B);
521 // FIXME: Need to de-activate other fields of parent records.
522 B = B.getBase();
523 }
524}
525
526void Pointer::deactivate() const {
527 // TODO: this only appears in constructors, so nothing to deactivate.
528}
529
530bool Pointer::hasSameBase(const Pointer &A, const Pointer &B) {
531 // Two null pointers always have the same base.
532 if (A.isZero() && B.isZero())
533 return true;
534
535 if (A.isIntegralPointer() && B.isIntegralPointer())
536 return true;
537 if (A.isFunctionPointer() && B.isFunctionPointer())
538 return true;
539 if (A.isTypeidPointer() && B.isTypeidPointer())
540 return true;
541
542 if (A.isIntegralPointer() || B.isIntegralPointer())
543 return A.getSource() == B.getSource();
544
545 if (A.StorageKind != B.StorageKind)
546 return false;
547
548 return A.asBlockPointer().Pointee == B.asBlockPointer().Pointee;
549}
550
551bool Pointer::pointToSameBlock(const Pointer &A, const Pointer &B) {
552 if (!A.isBlockPointer() || !B.isBlockPointer())
553 return false;
554 return A.block() == B.block();
555}
556
557bool Pointer::hasSameArray(const Pointer &A, const Pointer &B) {
558 return hasSameBase(A, B) &&
559 A.PointeeStorage.BS.Base == B.PointeeStorage.BS.Base &&
560 A.getFieldDesc()->IsArray;
561}
562
563bool Pointer::pointsToLiteral() const {
564 if (isZero() || !isBlockPointer())
565 return false;
566
567 if (block()->isDynamic())
568 return false;
569
570 const Expr *E = block()->getDescriptor()->asExpr();
571 return E && !isa<MaterializeTemporaryExpr, StringLiteral>(Val: E);
572}
573
574bool Pointer::pointsToStringLiteral() const {
575 if (isZero() || !isBlockPointer())
576 return false;
577
578 if (block()->isDynamic())
579 return false;
580
581 const Expr *E = block()->getDescriptor()->asExpr();
582 return E && isa<StringLiteral>(Val: E);
583}
584
585std::optional<std::pair<Pointer, Pointer>>
586Pointer::computeSplitPoint(const Pointer &A, const Pointer &B) {
587 if (!A.isBlockPointer() || !B.isBlockPointer())
588 return std::nullopt;
589
590 if (A.asBlockPointer().Pointee != B.asBlockPointer().Pointee)
591 return std::nullopt;
592 if (A.isRoot() && B.isRoot())
593 return std::nullopt;
594
595 if (A == B)
596 return std::make_pair(x: A, y: B);
597
598 auto getBase = [](const Pointer &P) -> Pointer {
599 if (P.isArrayElement())
600 return P.expand().getArray();
601 return P.getBase();
602 };
603
604 Pointer IterA = A;
605 Pointer IterB = B;
606 Pointer CurA = IterA;
607 Pointer CurB = IterB;
608 for (;;) {
609 if (IterA.asBlockPointer().Base > IterB.asBlockPointer().Base) {
610 CurA = IterA;
611 IterA = getBase(IterA);
612 } else {
613 CurB = IterB;
614 IterB = getBase(IterB);
615 }
616
617 if (IterA == IterB)
618 return std::make_pair(x&: CurA, y&: CurB);
619
620 if (IterA.isRoot() && IterB.isRoot())
621 return std::nullopt;
622 }
623
624 llvm_unreachable("The loop above should've returned.");
625}
626
627std::optional<APValue> Pointer::toRValue(const Context &Ctx,
628 QualType ResultType) const {
629 const ASTContext &ASTCtx = Ctx.getASTContext();
630 assert(!ResultType.isNull());
631 // Method to recursively traverse composites.
632 std::function<bool(QualType, const Pointer &, APValue &)> Composite;
633 Composite = [&Composite, &Ctx, &ASTCtx](QualType Ty, const Pointer &Ptr,
634 APValue &R) {
635 if (const auto *AT = Ty->getAs<AtomicType>())
636 Ty = AT->getValueType();
637
638 // Invalid pointers.
639 if (Ptr.isDummy() || !Ptr.isLive() || !Ptr.isBlockPointer() ||
640 Ptr.isPastEnd())
641 return false;
642
643 // Primitive values.
644 if (std::optional<PrimType> T = Ctx.classify(T: Ty)) {
645 TYPE_SWITCH(*T, R = Ptr.deref<T>().toAPValue(ASTCtx));
646 return true;
647 }
648
649 if (const auto *RT = Ty->getAs<RecordType>()) {
650 const auto *Record = Ptr.getRecord();
651 assert(Record && "Missing record descriptor");
652
653 bool Ok = true;
654 if (RT->getDecl()->isUnion()) {
655 const FieldDecl *ActiveField = nullptr;
656 APValue Value;
657 for (const auto &F : Record->fields()) {
658 const Pointer &FP = Ptr.atField(Off: F.Offset);
659 QualType FieldTy = F.Decl->getType();
660 if (FP.isActive()) {
661 if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) {
662 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
663 } else {
664 Ok &= Composite(FieldTy, FP, Value);
665 }
666 ActiveField = FP.getFieldDesc()->asFieldDecl();
667 break;
668 }
669 }
670 R = APValue(ActiveField, Value);
671 } else {
672 unsigned NF = Record->getNumFields();
673 unsigned NB = Record->getNumBases();
674 unsigned NV = Ptr.isBaseClass() ? 0 : Record->getNumVirtualBases();
675
676 R = APValue(APValue::UninitStruct(), NB, NF);
677
678 for (unsigned I = 0; I < NF; ++I) {
679 const Record::Field *FD = Record->getField(I);
680 QualType FieldTy = FD->Decl->getType();
681 const Pointer &FP = Ptr.atField(Off: FD->Offset);
682 APValue &Value = R.getStructField(i: I);
683
684 if (std::optional<PrimType> T = Ctx.classify(T: FieldTy)) {
685 TYPE_SWITCH(*T, Value = FP.deref<T>().toAPValue(ASTCtx));
686 } else {
687 Ok &= Composite(FieldTy, FP, Value);
688 }
689 }
690
691 for (unsigned I = 0; I < NB; ++I) {
692 const Record::Base *BD = Record->getBase(I);
693 QualType BaseTy = Ctx.getASTContext().getRecordType(Decl: BD->Decl);
694 const Pointer &BP = Ptr.atField(Off: BD->Offset);
695 Ok &= Composite(BaseTy, BP, R.getStructBase(i: I));
696 }
697
698 for (unsigned I = 0; I < NV; ++I) {
699 const Record::Base *VD = Record->getVirtualBase(I);
700 QualType VirtBaseTy = Ctx.getASTContext().getRecordType(Decl: VD->Decl);
701 const Pointer &VP = Ptr.atField(Off: VD->Offset);
702 Ok &= Composite(VirtBaseTy, VP, R.getStructBase(i: NB + I));
703 }
704 }
705 return Ok;
706 }
707
708 if (Ty->isIncompleteArrayType()) {
709 R = APValue(APValue::UninitArray(), 0, 0);
710 return true;
711 }
712
713 if (const auto *AT = Ty->getAsArrayTypeUnsafe()) {
714 const size_t NumElems = Ptr.getNumElems();
715 QualType ElemTy = AT->getElementType();
716 R = APValue(APValue::UninitArray{}, NumElems, NumElems);
717
718 bool Ok = true;
719 for (unsigned I = 0; I < NumElems; ++I) {
720 APValue &Slot = R.getArrayInitializedElt(I);
721 const Pointer &EP = Ptr.atIndex(Idx: I);
722 if (std::optional<PrimType> T = Ctx.classify(T: ElemTy)) {
723 TYPE_SWITCH(*T, Slot = EP.deref<T>().toAPValue(ASTCtx));
724 } else {
725 Ok &= Composite(ElemTy, EP.narrow(), Slot);
726 }
727 }
728 return Ok;
729 }
730
731 // Complex types.
732 if (const auto *CT = Ty->getAs<ComplexType>()) {
733 QualType ElemTy = CT->getElementType();
734
735 if (ElemTy->isIntegerType()) {
736 std::optional<PrimType> ElemT = Ctx.classify(T: ElemTy);
737 assert(ElemT);
738 INT_TYPE_SWITCH(*ElemT, {
739 auto V1 = Ptr.atIndex(0).deref<T>();
740 auto V2 = Ptr.atIndex(1).deref<T>();
741 R = APValue(V1.toAPSInt(), V2.toAPSInt());
742 return true;
743 });
744 } else if (ElemTy->isFloatingType()) {
745 R = APValue(Ptr.atIndex(Idx: 0).deref<Floating>().getAPFloat(),
746 Ptr.atIndex(Idx: 1).deref<Floating>().getAPFloat());
747 return true;
748 }
749 return false;
750 }
751
752 // Vector types.
753 if (const auto *VT = Ty->getAs<VectorType>()) {
754 assert(Ptr.getFieldDesc()->isPrimitiveArray());
755 QualType ElemTy = VT->getElementType();
756 PrimType ElemT = *Ctx.classify(T: ElemTy);
757
758 SmallVector<APValue> Values;
759 Values.reserve(N: VT->getNumElements());
760 for (unsigned I = 0; I != VT->getNumElements(); ++I) {
761 TYPE_SWITCH(ElemT, {
762 Values.push_back(Ptr.atIndex(I).deref<T>().toAPValue(ASTCtx));
763 });
764 }
765
766 assert(Values.size() == VT->getNumElements());
767 R = APValue(Values.data(), Values.size());
768 return true;
769 }
770
771 llvm_unreachable("invalid value to return");
772 };
773
774 // Invalid to read from.
775 if (isDummy() || !isLive() || isPastEnd())
776 return std::nullopt;
777
778 // We can return these as rvalues, but we can't deref() them.
779 if (isZero() || isIntegralPointer())
780 return toAPValue(ASTCtx);
781
782 // Just load primitive types.
783 if (std::optional<PrimType> T = Ctx.classify(T: ResultType)) {
784 TYPE_SWITCH(*T, return this->deref<T>().toAPValue(ASTCtx));
785 }
786
787 // Return the composite type.
788 APValue Result;
789 if (!Composite(ResultType, *this, Result))
790 return std::nullopt;
791 return Result;
792}
793
794IntPointer IntPointer::atOffset(const ASTContext &ASTCtx,
795 unsigned Offset) const {
796 if (!this->Desc)
797 return *this;
798 const Record *R = this->Desc->ElemRecord;
799 if (!R)
800 return *this;
801
802 const Record::Field *F = nullptr;
803 for (auto &It : R->fields()) {
804 if (It.Offset == Offset) {
805 F = &It;
806 break;
807 }
808 }
809 if (!F)
810 return *this;
811
812 const FieldDecl *FD = F->Decl;
813 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: FD->getParent());
814 unsigned FieldIndex = FD->getFieldIndex();
815 uint64_t FieldOffset =
816 ASTCtx.toCharUnitsFromBits(BitSize: Layout.getFieldOffset(FieldNo: FieldIndex))
817 .getQuantity();
818 return IntPointer{.Desc: F->Desc, .Value: this->Value + FieldOffset};
819}
820
821IntPointer IntPointer::baseCast(const ASTContext &ASTCtx,
822 unsigned BaseOffset) const {
823 if (!Desc) {
824 assert(Value == 0);
825 return *this;
826 }
827 const Record *R = Desc->ElemRecord;
828 const Descriptor *BaseDesc = nullptr;
829
830 // This iterates over bases and checks for the proper offset. That's
831 // potentially slow but this case really shouldn't happen a lot.
832 for (const Record::Base &B : R->bases()) {
833 if (B.Offset == BaseOffset) {
834 BaseDesc = B.Desc;
835 break;
836 }
837 }
838 assert(BaseDesc);
839
840 // Adjust the offset value based on the information from the record layout.
841 const ASTRecordLayout &Layout = ASTCtx.getASTRecordLayout(D: R->getDecl());
842 CharUnits BaseLayoutOffset =
843 Layout.getBaseClassOffset(Base: cast<CXXRecordDecl>(Val: BaseDesc->asDecl()));
844
845 return {.Desc: BaseDesc, .Value: Value + BaseLayoutOffset.getQuantity()};
846}
847

source code of clang/lib/AST/ByteCode/Pointer.cpp