1//===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "Interp.h"
10#include "Compiler.h"
11#include "Function.h"
12#include "InterpFrame.h"
13#include "InterpShared.h"
14#include "InterpStack.h"
15#include "Opcode.h"
16#include "PrimType.h"
17#include "Program.h"
18#include "State.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/CXXInheritance.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/Expr.h"
23#include "clang/AST/ExprCXX.h"
24#include "clang/Basic/DiagnosticSema.h"
25#include "clang/Basic/TargetInfo.h"
26#include "llvm/ADT/StringExtras.h"
27
28using namespace clang;
29using namespace clang::interp;
30
31static bool RetValue(InterpState &S, CodePtr &Pt) {
32 llvm::report_fatal_error(reason: "Interpreter cannot return values");
33}
34
35//===----------------------------------------------------------------------===//
36// Jmp, Jt, Jf
37//===----------------------------------------------------------------------===//
38
39static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
40 PC += Offset;
41 return true;
42}
43
44static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
45 if (S.Stk.pop<bool>()) {
46 PC += Offset;
47 }
48 return true;
49}
50
51static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
52 if (!S.Stk.pop<bool>()) {
53 PC += Offset;
54 }
55 return true;
56}
57
58// https://github.com/llvm/llvm-project/issues/102513
59#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
60#pragma optimize("", off)
61#endif
62// FIXME: We have the large switch over all opcodes here again, and in
63// Interpret().
64static bool BCP(InterpState &S, CodePtr &RealPC, int32_t Offset, PrimType PT) {
65 [[maybe_unused]] CodePtr PCBefore = RealPC;
66 size_t StackSizeBefore = S.Stk.size();
67
68 auto SpeculativeInterp = [&S, RealPC]() -> bool {
69 const InterpFrame *StartFrame = S.Current;
70 CodePtr PC = RealPC;
71
72 for (;;) {
73 auto Op = PC.read<Opcode>();
74 if (Op == OP_EndSpeculation)
75 return true;
76 CodePtr OpPC = PC;
77
78 switch (Op) {
79#define GET_INTERP
80#include "Opcodes.inc"
81#undef GET_INTERP
82 }
83 }
84 llvm_unreachable("We didn't see an EndSpeculation op?");
85 };
86
87 if (SpeculativeInterp()) {
88 if (PT == PT_Ptr) {
89 const auto &Ptr = S.Stk.pop<Pointer>();
90 assert(S.Stk.size() == StackSizeBefore);
91 S.Stk.push<Integral<32, true>>(
92 Args: Integral<32, true>::from(Value: CheckBCPResult(S, Ptr)));
93 } else {
94 // Pop the result from the stack and return success.
95 TYPE_SWITCH(PT, S.Stk.pop<T>(););
96 assert(S.Stk.size() == StackSizeBefore);
97 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 1));
98 }
99 } else {
100 if (!S.inConstantContext())
101 return Invalid(S, OpPC: RealPC);
102
103 S.Stk.clearTo(NewSize: StackSizeBefore);
104 S.Stk.push<Integral<32, true>>(Args: Integral<32, true>::from(Value: 0));
105 }
106
107 // RealPC should not have been modified.
108 assert(*RealPC == *PCBefore);
109
110 // Jump to end label. This is a little tricker than just RealPC += Offset
111 // because our usual jump instructions don't have any arguments, to the offset
112 // we get is a little too much and we need to subtract the size of the
113 // bool and PrimType arguments again.
114 int32_t ParamSize = align(Size: sizeof(PrimType));
115 assert(Offset >= ParamSize);
116 RealPC += Offset - ParamSize;
117
118 [[maybe_unused]] CodePtr PCCopy = RealPC;
119 assert(PCCopy.read<Opcode>() == OP_EndSpeculation);
120
121 return true;
122}
123// https://github.com/llvm/llvm-project/issues/102513
124#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
125#pragma optimize("", on)
126#endif
127
128static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
129 const ValueDecl *VD) {
130 const SourceInfo &E = S.Current->getSource(PC: OpPC);
131 S.FFDiag(SI: E, DiagId: diag::note_constexpr_var_init_unknown, ExtraNotes: 1) << VD;
132 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at) << VD->getSourceRange();
133}
134
135static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
136 const ValueDecl *VD);
137static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
138 const ValueDecl *D) {
139 // This function tries pretty hard to produce a good diagnostic. Just skip
140 // tha if nobody will see it anyway.
141 if (!S.diagnosing())
142 return false;
143
144 if (isa<ParmVarDecl>(Val: D)) {
145 if (D->getType()->isReferenceType())
146 return false;
147
148 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
149 if (S.getLangOpts().CPlusPlus11) {
150 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_function_param_value_unknown) << D;
151 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at) << D->getSourceRange();
152 } else {
153 S.FFDiag(SI: Loc);
154 }
155 return false;
156 }
157
158 if (!D->getType().isConstQualified()) {
159 diagnoseNonConstVariable(S, OpPC, VD: D);
160 } else if (const auto *VD = dyn_cast<VarDecl>(Val: D)) {
161 if (!VD->getAnyInitializer()) {
162 diagnoseMissingInitializer(S, OpPC, VD);
163 } else {
164 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
165 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
166 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
167 }
168 }
169
170 return false;
171}
172
173static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
174 const ValueDecl *VD) {
175 if (!S.diagnosing())
176 return;
177
178 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
179 if (!S.getLangOpts().CPlusPlus) {
180 S.FFDiag(SI: Loc);
181 return;
182 }
183
184 if (const auto *VarD = dyn_cast<VarDecl>(Val: VD);
185 VarD && VarD->getType().isConstQualified() &&
186 !VarD->getAnyInitializer()) {
187 diagnoseMissingInitializer(S, OpPC, VD);
188 return;
189 }
190
191 // Rather random, but this is to match the diagnostic output of the current
192 // interpreter.
193 if (isa<ObjCIvarDecl>(Val: VD))
194 return;
195
196 if (VD->getType()->isIntegralOrEnumerationType()) {
197 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_ltor_non_const_int, ExtraNotes: 1) << VD;
198 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
199 return;
200 }
201
202 S.FFDiag(SI: Loc,
203 DiagId: S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
204 : diag::note_constexpr_ltor_non_integral,
205 ExtraNotes: 1)
206 << VD << VD->getType();
207 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
208}
209
210static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
211 AccessKinds AK) {
212 if (auto ID = Ptr.getDeclID()) {
213 if (!Ptr.isStaticTemporary())
214 return true;
215
216 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
217 Val: Ptr.getDeclDesc()->asExpr());
218 if (!MTE)
219 return true;
220
221 // FIXME(perf): Since we do this check on every Load from a static
222 // temporary, it might make sense to cache the value of the
223 // isUsableInConstantExpressions call.
224 if (!MTE->isUsableInConstantExpressions(Context: S.getASTContext()) &&
225 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
226 const SourceInfo &E = S.Current->getSource(PC: OpPC);
227 S.FFDiag(SI: E, DiagId: diag::note_constexpr_access_static_temporary, ExtraNotes: 1) << AK;
228 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
229 return false;
230 }
231 }
232 return true;
233}
234
235static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
236 if (auto ID = Ptr.getDeclID()) {
237 if (!Ptr.isStatic())
238 return true;
239
240 if (S.P.getCurrentDecl() == ID)
241 return true;
242
243 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_modify_global);
244 return false;
245 }
246 return true;
247}
248
249namespace clang {
250namespace interp {
251static void popArg(InterpState &S, const Expr *Arg) {
252 PrimType Ty = S.getContext().classify(E: Arg).value_or(u: PT_Ptr);
253 TYPE_SWITCH(Ty, S.Stk.discard<T>());
254}
255
256void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
257 const Function *Func) {
258 assert(S.Current);
259 assert(Func);
260
261 if (S.Current->Caller && Func->isVariadic()) {
262 // CallExpr we're look for is at the return PC of the current function, i.e.
263 // in the caller.
264 // This code path should be executed very rarely.
265 unsigned NumVarArgs;
266 const Expr *const *Args = nullptr;
267 unsigned NumArgs = 0;
268 const Expr *CallSite = S.Current->Caller->getExpr(PC: S.Current->getRetPC());
269 if (const auto *CE = dyn_cast<CallExpr>(Val: CallSite)) {
270 Args = CE->getArgs();
271 NumArgs = CE->getNumArgs();
272 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(Val: CallSite)) {
273 Args = CE->getArgs();
274 NumArgs = CE->getNumArgs();
275 } else
276 assert(false && "Can't get arguments from that expression type");
277
278 assert(NumArgs >= Func->getNumWrittenParams());
279 NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
280 isa<CXXOperatorCallExpr>(Val: CallSite));
281 for (unsigned I = 0; I != NumVarArgs; ++I) {
282 const Expr *A = Args[NumArgs - 1 - I];
283 popArg(S, Arg: A);
284 }
285 }
286
287 // And in any case, remove the fixed parameters (the non-variadic ones)
288 // at the end.
289 for (PrimType Ty : Func->args_reverse())
290 TYPE_SWITCH(Ty, S.Stk.discard<T>());
291}
292
293bool isConstexprUnknown(const Pointer &P) {
294 if (!P.isBlockPointer())
295 return false;
296
297 if (P.isDummy())
298 return isa_and_nonnull<ParmVarDecl>(Val: P.getDeclDesc()->asValueDecl());
299
300 return P.getDeclDesc()->IsConstexprUnknown;
301}
302
303bool CheckBCPResult(InterpState &S, const Pointer &Ptr) {
304 if (Ptr.isDummy())
305 return false;
306 if (Ptr.isZero())
307 return true;
308 if (Ptr.isFunctionPointer())
309 return false;
310 if (Ptr.isIntegralPointer())
311 return true;
312 if (Ptr.isTypeidPointer())
313 return true;
314
315 if (Ptr.getType()->isAnyComplexType())
316 return true;
317
318 if (const Expr *Base = Ptr.getDeclDesc()->asExpr())
319 return isa<StringLiteral>(Val: Base) && Ptr.getIndex() == 0;
320 return false;
321}
322
323bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
324 AccessKinds AK) {
325 if (Ptr.isActive())
326 return true;
327
328 assert(Ptr.inUnion());
329 assert(Ptr.isField() && Ptr.getField());
330
331 Pointer U = Ptr.getBase();
332 Pointer C = Ptr;
333 while (!U.isRoot() && !U.isActive()) {
334 // A little arbitrary, but this is what the current interpreter does.
335 // See the AnonymousUnion test in test/AST/ByteCode/unions.cpp.
336 // GCC's output is more similar to what we would get without
337 // this condition.
338 if (U.getRecord() && U.getRecord()->isAnonymousUnion())
339 break;
340
341 C = U;
342 U = U.getBase();
343 }
344 assert(C.isField());
345
346 // Consider:
347 // union U {
348 // struct {
349 // int x;
350 // int y;
351 // } a;
352 // }
353 //
354 // When activating x, we will also activate a. If we now try to read
355 // from y, we will get to CheckActive, because y is not active. In that
356 // case, our U will be a (not a union). We return here and let later code
357 // handle this.
358 if (!U.getFieldDesc()->isUnion())
359 return true;
360
361 // Get the inactive field descriptor.
362 assert(!C.isActive());
363 const FieldDecl *InactiveField = C.getField();
364 assert(InactiveField);
365
366 // Find the active field of the union.
367 const Record *R = U.getRecord();
368 assert(R && R->isUnion() && "Not a union");
369
370 const FieldDecl *ActiveField = nullptr;
371 for (const Record::Field &F : R->fields()) {
372 const Pointer &Field = U.atField(Off: F.Offset);
373 if (Field.isActive()) {
374 ActiveField = Field.getField();
375 break;
376 }
377 }
378
379 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
380 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_inactive_union_member)
381 << AK << InactiveField << !ActiveField << ActiveField;
382 return false;
383}
384
385bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
386 if (!Ptr.isExtern())
387 return true;
388
389 if (Ptr.isInitialized() ||
390 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
391 return true;
392
393 if (S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus &&
394 Ptr.isConst())
395 return false;
396
397 const auto *VD = Ptr.getDeclDesc()->asValueDecl();
398 diagnoseNonConstVariable(S, OpPC, VD);
399 return false;
400}
401
402bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
403 if (!Ptr.isUnknownSizeArray())
404 return true;
405 const SourceInfo &E = S.Current->getSource(PC: OpPC);
406 S.FFDiag(SI: E, DiagId: diag::note_constexpr_unsized_array_indexed);
407 return false;
408}
409
410bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
411 AccessKinds AK) {
412 if (Ptr.isZero()) {
413 const auto &Src = S.Current->getSource(PC: OpPC);
414
415 if (Ptr.isField())
416 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_null_subobject) << CSK_Field;
417 else
418 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_null) << AK;
419
420 return false;
421 }
422
423 if (!Ptr.isLive()) {
424 const auto &Src = S.Current->getSource(PC: OpPC);
425
426 if (Ptr.isDynamic()) {
427 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_access_deleted_object) << AK;
428 } else if (!S.checkingPotentialConstantExpression()) {
429 bool IsTemp = Ptr.isTemporary();
430 S.FFDiag(SI: Src, DiagId: diag::note_constexpr_lifetime_ended, ExtraNotes: 1) << AK << !IsTemp;
431
432 if (IsTemp)
433 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
434 else
435 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
436 }
437
438 return false;
439 }
440
441 return true;
442}
443
444bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
445 assert(Desc);
446
447 const auto *D = Desc->asVarDecl();
448 if (!D || D == S.EvaluatingDecl || D->isConstexpr())
449 return true;
450
451 // If we're evaluating the initializer for a constexpr variable in C23, we may
452 // only read other contexpr variables. Abort here since this one isn't
453 // constexpr.
454 if (const auto *VD = dyn_cast_if_present<VarDecl>(Val: S.EvaluatingDecl);
455 VD && VD->isConstexpr() && S.getLangOpts().C23)
456 return Invalid(S, OpPC);
457
458 QualType T = D->getType();
459 bool IsConstant = T.isConstant(Ctx: S.getASTContext());
460 if (T->isIntegralOrEnumerationType()) {
461 if (!IsConstant) {
462 diagnoseNonConstVariable(S, OpPC, VD: D);
463 return false;
464 }
465 return true;
466 }
467
468 if (IsConstant) {
469 if (S.getLangOpts().CPlusPlus) {
470 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC),
471 DiagId: S.getLangOpts().CPlusPlus11
472 ? diag::note_constexpr_ltor_non_constexpr
473 : diag::note_constexpr_ltor_non_integral,
474 ExtraNotes: 1)
475 << D << T;
476 S.Note(Loc: D->getLocation(), DiagId: diag::note_declared_at);
477 } else {
478 S.CCEDiag(Loc: S.Current->getLocation(PC: OpPC));
479 }
480 return true;
481 }
482
483 if (T->isPointerOrReferenceType()) {
484 if (!T->getPointeeType().isConstant(Ctx: S.getASTContext()) ||
485 !S.getLangOpts().CPlusPlus11) {
486 diagnoseNonConstVariable(S, OpPC, VD: D);
487 return false;
488 }
489 return true;
490 }
491
492 diagnoseNonConstVariable(S, OpPC, VD: D);
493 return false;
494}
495
496static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
497 if (!Ptr.isStatic() || !Ptr.isBlockPointer())
498 return true;
499 if (!Ptr.getDeclID())
500 return true;
501 return CheckConstant(S, OpPC, Desc: Ptr.getDeclDesc());
502}
503
504bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
505 CheckSubobjectKind CSK) {
506 if (!Ptr.isZero())
507 return true;
508 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
509 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_null_subobject)
510 << CSK << S.Current->getRange(PC: OpPC);
511
512 return false;
513}
514
515bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
516 AccessKinds AK) {
517 if (!Ptr.isOnePastEnd())
518 return true;
519 if (S.getLangOpts().CPlusPlus) {
520 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
521 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_past_end)
522 << AK << S.Current->getRange(PC: OpPC);
523 }
524 return false;
525}
526
527bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
528 CheckSubobjectKind CSK) {
529 if (!Ptr.isElementPastEnd())
530 return true;
531 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
532 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
533 << CSK << S.Current->getRange(PC: OpPC);
534 return false;
535}
536
537bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
538 CheckSubobjectKind CSK) {
539 if (!Ptr.isOnePastEnd())
540 return true;
541
542 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
543 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_past_end_subobject)
544 << CSK << S.Current->getRange(PC: OpPC);
545 return false;
546}
547
548bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
549 uint32_t Offset) {
550 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
551 uint32_t PtrOffset = Ptr.getByteOffset();
552
553 // We subtract Offset from PtrOffset. The result must be at least
554 // MinOffset.
555 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
556 return true;
557
558 const auto *E = cast<CastExpr>(Val: S.Current->getExpr(PC: OpPC));
559 QualType TargetQT = E->getType()->getPointeeType();
560 QualType MostDerivedQT = Ptr.getDeclPtr().getType();
561
562 S.CCEDiag(E, DiagId: diag::note_constexpr_invalid_downcast)
563 << MostDerivedQT << TargetQT;
564
565 return false;
566}
567
568bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
569 assert(Ptr.isLive() && "Pointer is not live");
570 if (!Ptr.isConst() || Ptr.isMutable())
571 return true;
572
573 if (!Ptr.isBlockPointer())
574 return false;
575
576 // The This pointer is writable in constructors and destructors,
577 // even if isConst() returns true.
578 if (llvm::find(Range&: S.InitializingBlocks, Val: Ptr.block()))
579 return true;
580
581 const QualType Ty = Ptr.getType();
582 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
583 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_modify_const_type) << Ty;
584 return false;
585}
586
587bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
588 assert(Ptr.isLive() && "Pointer is not live");
589 if (!Ptr.isMutable())
590 return true;
591
592 // In C++14 onwards, it is permitted to read a mutable member whose
593 // lifetime began within the evaluation.
594 if (S.getLangOpts().CPlusPlus14 &&
595 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) {
596 // FIXME: This check is necessary because (of the way) we revisit
597 // variables in Compiler.cpp:visitDeclRef. Revisiting a so far
598 // unknown variable will get the same EvalID and we end up allowing
599 // reads from mutable members of it.
600 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
601 return false;
602 return true;
603 }
604
605 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
606 const FieldDecl *Field = Ptr.getField();
607 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_mutable, ExtraNotes: 1) << AK_Read << Field;
608 S.Note(Loc: Field->getLocation(), DiagId: diag::note_declared_at);
609 return false;
610}
611
612static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
613 AccessKinds AK) {
614 assert(Ptr.isLive());
615
616 if (!Ptr.isVolatile())
617 return true;
618
619 if (!S.getLangOpts().CPlusPlus)
620 return Invalid(S, OpPC);
621
622 // The reason why Ptr is volatile might be further up the hierarchy.
623 // Find that pointer.
624 Pointer P = Ptr;
625 while (!P.isRoot()) {
626 if (P.getType().isVolatileQualified())
627 break;
628 P = P.getBase();
629 }
630
631 const NamedDecl *ND = nullptr;
632 int DiagKind;
633 SourceLocation Loc;
634 if (const auto *F = P.getField()) {
635 DiagKind = 2;
636 Loc = F->getLocation();
637 ND = F;
638 } else if (auto *VD = P.getFieldDesc()->asValueDecl()) {
639 DiagKind = 1;
640 Loc = VD->getLocation();
641 ND = VD;
642 } else {
643 DiagKind = 0;
644 if (const auto *E = P.getFieldDesc()->asExpr())
645 Loc = E->getExprLoc();
646 }
647
648 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
649 DiagId: diag::note_constexpr_access_volatile_obj, ExtraNotes: 1)
650 << AK << DiagKind << ND;
651 S.Note(Loc, DiagId: diag::note_constexpr_volatile_here) << DiagKind;
652 return false;
653}
654
655bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
656 AccessKinds AK) {
657 assert(Ptr.isLive());
658
659 if (Ptr.isInitialized())
660 return true;
661
662 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
663 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) {
664
665 if (VD == S.EvaluatingDecl &&
666 !(S.getLangOpts().CPlusPlus23 && VD->getType()->isReferenceType())) {
667 if (!S.getLangOpts().CPlusPlus14 &&
668 !VD->getType().isConstant(Ctx: S.getASTContext())) {
669 // Diagnose as non-const read.
670 diagnoseNonConstVariable(S, OpPC, VD);
671 } else {
672 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
673 // Diagnose as "read of object outside its lifetime".
674 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_access_uninit)
675 << AK << /*IsIndeterminate=*/false;
676 }
677 return false;
678 }
679
680 if (VD->getAnyInitializer()) {
681 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
682 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
683 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
684 } else {
685 diagnoseMissingInitializer(S, OpPC, VD);
686 }
687 return false;
688 }
689
690 if (!S.checkingPotentialConstantExpression()) {
691 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
692 << AK << /*uninitialized=*/true << S.Current->getRange(PC: OpPC);
693 }
694 return false;
695}
696
697static bool CheckLifetime(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
698 AccessKinds AK) {
699 if (Ptr.getLifetime() == Lifetime::Started)
700 return true;
701
702 if (!S.checkingPotentialConstantExpression()) {
703 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_access_uninit)
704 << AK << /*uninitialized=*/false << S.Current->getRange(PC: OpPC);
705 }
706 return false;
707}
708
709bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
710 if (Ptr.isInitialized())
711 return true;
712
713 assert(S.getLangOpts().CPlusPlus);
714 const auto *VD = cast<VarDecl>(Val: Ptr.getDeclDesc()->asValueDecl());
715 if ((!VD->hasConstantInitialization() &&
716 VD->mightBeUsableInConstantExpressions(C: S.getASTContext())) ||
717 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
718 !VD->hasICEInitializer(Context: S.getASTContext()))) {
719 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
720 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_var_init_non_constant, ExtraNotes: 1) << VD;
721 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
722 }
723 return false;
724}
725
726static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
727 if (!Ptr.isWeak())
728 return true;
729
730 const auto *VD = Ptr.getDeclDesc()->asVarDecl();
731 assert(VD);
732 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_var_init_weak)
733 << VD;
734 S.Note(Loc: VD->getLocation(), DiagId: diag::note_declared_at);
735
736 return false;
737}
738
739bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
740 AccessKinds AK) {
741 if (!CheckLive(S, OpPC, Ptr, AK))
742 return false;
743 if (!CheckExtern(S, OpPC, Ptr))
744 return false;
745 if (!CheckConstant(S, OpPC, Ptr))
746 return false;
747 if (!CheckDummy(S, OpPC, Ptr, AK))
748 return false;
749 if (!CheckRange(S, OpPC, Ptr, AK))
750 return false;
751 if (!CheckActive(S, OpPC, Ptr, AK))
752 return false;
753 if (!CheckLifetime(S, OpPC, Ptr, AK))
754 return false;
755 if (!CheckInitialized(S, OpPC, Ptr, AK))
756 return false;
757 if (!CheckTemporary(S, OpPC, Ptr, AK))
758 return false;
759 if (!CheckWeak(S, OpPC, Ptr))
760 return false;
761 if (!CheckMutable(S, OpPC, Ptr))
762 return false;
763 if (!CheckVolatile(S, OpPC, Ptr, AK))
764 return false;
765 return true;
766}
767
768/// This is not used by any of the opcodes directly. It's used by
769/// EvalEmitter to do the final lvalue-to-rvalue conversion.
770bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
771 if (!CheckLive(S, OpPC, Ptr, AK: AK_Read))
772 return false;
773 if (!CheckConstant(S, OpPC, Ptr))
774 return false;
775
776 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Read))
777 return false;
778 if (!CheckExtern(S, OpPC, Ptr))
779 return false;
780 if (!CheckRange(S, OpPC, Ptr, AK: AK_Read))
781 return false;
782 if (!CheckActive(S, OpPC, Ptr, AK: AK_Read))
783 return false;
784 if (!CheckLifetime(S, OpPC, Ptr, AK: AK_Read))
785 return false;
786 if (!CheckInitialized(S, OpPC, Ptr, AK: AK_Read))
787 return false;
788 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Read))
789 return false;
790 if (!CheckWeak(S, OpPC, Ptr))
791 return false;
792 if (!CheckMutable(S, OpPC, Ptr))
793 return false;
794 return true;
795}
796
797bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
798 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
799 return false;
800 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Assign))
801 return false;
802 if (!CheckLifetime(S, OpPC, Ptr, AK: AK_Assign))
803 return false;
804 if (!CheckExtern(S, OpPC, Ptr))
805 return false;
806 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
807 return false;
808 if (!CheckGlobal(S, OpPC, Ptr))
809 return false;
810 if (!CheckConst(S, OpPC, Ptr))
811 return false;
812 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
813 return false;
814 return true;
815}
816
817bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
818 if (!CheckLive(S, OpPC, Ptr, AK: AK_MemberCall))
819 return false;
820 if (!Ptr.isDummy()) {
821 if (!CheckExtern(S, OpPC, Ptr))
822 return false;
823 if (!CheckRange(S, OpPC, Ptr, AK: AK_MemberCall))
824 return false;
825 }
826 return true;
827}
828
829bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
830 if (!CheckLive(S, OpPC, Ptr, AK: AK_Assign))
831 return false;
832 if (!CheckRange(S, OpPC, Ptr, AK: AK_Assign))
833 return false;
834 return true;
835}
836
837bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
838
839 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
840 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
841 S.CCEDiag(Loc, DiagId: diag::note_constexpr_virtual_call);
842 return false;
843 }
844
845 if (S.checkingPotentialConstantExpression() && S.Current->getDepth() != 0)
846 return false;
847
848 if (F->isValid() && F->hasBody() && F->isConstexpr())
849 return true;
850
851 // Implicitly constexpr.
852 if (F->isLambdaStaticInvoker())
853 return true;
854
855 // Bail out if the function declaration itself is invalid. We will
856 // have produced a relevant diagnostic while parsing it, so just
857 // note the problematic sub-expression.
858 if (F->getDecl()->isInvalidDecl())
859 return Invalid(S, OpPC);
860
861 // Diagnose failed assertions specially.
862 if (S.Current->getLocation(PC: OpPC).isMacroID() &&
863 F->getDecl()->getIdentifier()) {
864 // FIXME: Instead of checking for an implementation-defined function,
865 // check and evaluate the assert() macro.
866 StringRef Name = F->getDecl()->getName();
867 bool AssertFailed =
868 Name == "__assert_rtn" || Name == "__assert_fail" || Name == "_wassert";
869 if (AssertFailed) {
870 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
871 DiagId: diag::note_constexpr_assert_failed);
872 return false;
873 }
874 }
875
876 if (S.getLangOpts().CPlusPlus11) {
877 const FunctionDecl *DiagDecl = F->getDecl();
878
879 // Invalid decls have been diagnosed before.
880 if (DiagDecl->isInvalidDecl())
881 return false;
882
883 // If this function is not constexpr because it is an inherited
884 // non-constexpr constructor, diagnose that directly.
885 const auto *CD = dyn_cast<CXXConstructorDecl>(Val: DiagDecl);
886 if (CD && CD->isInheritingConstructor()) {
887 const auto *Inherited = CD->getInheritedConstructor().getConstructor();
888 if (!Inherited->isConstexpr())
889 DiagDecl = CD = Inherited;
890 }
891
892 // Silently reject constructors of invalid classes. The invalid class
893 // has been rejected elsewhere before.
894 if (CD && CD->getParent()->isInvalidDecl())
895 return false;
896
897 // FIXME: If DiagDecl is an implicitly-declared special member function
898 // or an inheriting constructor, we should be much more explicit about why
899 // it's not constexpr.
900 if (CD && CD->isInheritingConstructor()) {
901 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
902 DiagId: diag::note_constexpr_invalid_inhctor, ExtraNotes: 1)
903 << CD->getInheritedConstructor().getConstructor()->getParent();
904 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
905 } else {
906 // Don't emit anything if the function isn't defined and we're checking
907 // for a constant expression. It might be defined at the point we're
908 // actually calling it.
909 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
910 bool IsDefined = F->isDefined();
911 if (!IsDefined && !IsExtern && DiagDecl->isConstexpr() &&
912 S.checkingPotentialConstantExpression())
913 return false;
914
915 // If the declaration is defined, declared 'constexpr' _and_ has a body,
916 // the below diagnostic doesn't add anything useful.
917 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
918 DiagDecl->hasBody())
919 return false;
920
921 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
922 DiagId: diag::note_constexpr_invalid_function, ExtraNotes: 1)
923 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
924
925 if (DiagDecl->getDefinition())
926 S.Note(Loc: DiagDecl->getDefinition()->getLocation(),
927 DiagId: diag::note_declared_at);
928 else
929 S.Note(Loc: DiagDecl->getLocation(), DiagId: diag::note_declared_at);
930 }
931 } else {
932 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
933 DiagId: diag::note_invalid_subexpr_in_const_expr);
934 }
935
936 return false;
937}
938
939bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
940 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
941 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
942 DiagId: diag::note_constexpr_depth_limit_exceeded)
943 << S.getLangOpts().ConstexprCallDepth;
944 return false;
945 }
946
947 return true;
948}
949
950bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
951 if (!This.isZero())
952 return true;
953
954 const Expr *E = S.Current->getExpr(PC: OpPC);
955 if (S.getLangOpts().CPlusPlus11) {
956 bool IsImplicit = false;
957 if (const auto *TE = dyn_cast<CXXThisExpr>(Val: E))
958 IsImplicit = TE->isImplicit();
959 S.FFDiag(E, DiagId: diag::note_constexpr_this) << IsImplicit;
960 } else {
961 S.FFDiag(E);
962 }
963
964 return false;
965}
966
967bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
968 APFloat::opStatus Status, FPOptions FPO) {
969 // [expr.pre]p4:
970 // If during the evaluation of an expression, the result is not
971 // mathematically defined [...], the behavior is undefined.
972 // FIXME: C++ rules require us to not conform to IEEE 754 here.
973 if (Result.isNan()) {
974 const SourceInfo &E = S.Current->getSource(PC: OpPC);
975 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic)
976 << /*NaN=*/true << S.Current->getRange(PC: OpPC);
977 return S.noteUndefinedBehavior();
978 }
979
980 // In a constant context, assume that any dynamic rounding mode or FP
981 // exception state matches the default floating-point environment.
982 if (S.inConstantContext())
983 return true;
984
985 if ((Status & APFloat::opInexact) &&
986 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
987 // Inexact result means that it depends on rounding mode. If the requested
988 // mode is dynamic, the evaluation cannot be made in compile time.
989 const SourceInfo &E = S.Current->getSource(PC: OpPC);
990 S.FFDiag(SI: E, DiagId: diag::note_constexpr_dynamic_rounding);
991 return false;
992 }
993
994 if ((Status != APFloat::opOK) &&
995 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
996 FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
997 FPO.getAllowFEnvAccess())) {
998 const SourceInfo &E = S.Current->getSource(PC: OpPC);
999 S.FFDiag(SI: E, DiagId: diag::note_constexpr_float_arithmetic_strict);
1000 return false;
1001 }
1002
1003 if ((Status & APFloat::opStatus::opInvalidOp) &&
1004 FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
1005 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1006 // There is no usefully definable result.
1007 S.FFDiag(SI: E);
1008 return false;
1009 }
1010
1011 return true;
1012}
1013
1014bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
1015 if (S.getLangOpts().CPlusPlus20)
1016 return true;
1017
1018 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1019 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_new);
1020 return true;
1021}
1022
1023bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
1024 DynamicAllocator::Form AllocForm,
1025 DynamicAllocator::Form DeleteForm, const Descriptor *D,
1026 const Expr *NewExpr) {
1027 if (AllocForm == DeleteForm)
1028 return true;
1029
1030 QualType TypeToDiagnose = D->getDataType(Ctx: S.getASTContext());
1031
1032 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1033 S.FFDiag(SI: E, DiagId: diag::note_constexpr_new_delete_mismatch)
1034 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
1035 << TypeToDiagnose;
1036 S.Note(Loc: NewExpr->getExprLoc(), DiagId: diag::note_constexpr_dynamic_alloc_here)
1037 << NewExpr->getSourceRange();
1038 return false;
1039}
1040
1041bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
1042 const Pointer &Ptr) {
1043 // Regular new type(...) call.
1044 if (isa_and_nonnull<CXXNewExpr>(Val: Source))
1045 return true;
1046 // operator new.
1047 if (const auto *CE = dyn_cast_if_present<CallExpr>(Val: Source);
1048 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
1049 return true;
1050 // std::allocator.allocate() call
1051 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Val: Source);
1052 MCE && MCE->getMethodDecl()->getIdentifier()->isStr(Str: "allocate"))
1053 return true;
1054
1055 // Whatever this is, we didn't heap allocate it.
1056 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1057 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_not_heap_alloc)
1058 << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1059
1060 if (Ptr.isTemporary())
1061 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_constexpr_temporary_here);
1062 else
1063 S.Note(Loc: Ptr.getDeclLoc(), DiagId: diag::note_declared_at);
1064 return false;
1065}
1066
1067/// We aleady know the given DeclRefExpr is invalid for some reason,
1068/// now figure out why and print appropriate diagnostics.
1069bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
1070 const ValueDecl *D = DR->getDecl();
1071 return diagnoseUnknownDecl(S, OpPC, D);
1072}
1073
1074bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1075 AccessKinds AK) {
1076 if (!Ptr.isDummy())
1077 return true;
1078
1079 const Descriptor *Desc = Ptr.getDeclDesc();
1080 const ValueDecl *D = Desc->asValueDecl();
1081 if (!D)
1082 return false;
1083
1084 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
1085 return diagnoseUnknownDecl(S, OpPC, D);
1086
1087 if (AK == AK_Destroy || S.getLangOpts().CPlusPlus14) {
1088 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1089 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1090 }
1091 return false;
1092}
1093
1094bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
1095 const CallExpr *CE, unsigned ArgSize) {
1096 auto Args = ArrayRef(CE->getArgs(), CE->getNumArgs());
1097 auto NonNullArgs = collectNonNullArgs(F: F->getDecl(), Args);
1098 unsigned Offset = 0;
1099 unsigned Index = 0;
1100 for (const Expr *Arg : Args) {
1101 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
1102 const Pointer &ArgPtr = S.Stk.peek<Pointer>(Offset: ArgSize - Offset);
1103 if (ArgPtr.isZero()) {
1104 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1105 S.CCEDiag(Loc, DiagId: diag::note_non_null_attribute_failed);
1106 return false;
1107 }
1108 }
1109
1110 Offset += align(Size: primSize(Type: S.Ctx.classify(E: Arg).value_or(u: PT_Ptr)));
1111 ++Index;
1112 }
1113 return true;
1114}
1115
1116static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
1117 const Pointer &BasePtr,
1118 const Descriptor *Desc) {
1119 assert(Desc->isRecord());
1120 const Record *R = Desc->ElemRecord;
1121 assert(R);
1122
1123 if (Pointer::pointToSameBlock(A: BasePtr, B: S.Current->getThis()) &&
1124 S.Current->getFunction()->isDestructor()) {
1125 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1126 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_destroy);
1127 return false;
1128 }
1129
1130 // Destructor of this record.
1131 if (const CXXDestructorDecl *Dtor = R->getDestructor();
1132 Dtor && !Dtor->isTrivial()) {
1133 const Function *DtorFunc = S.getContext().getOrCreateFunction(FuncDecl: Dtor);
1134 if (!DtorFunc)
1135 return false;
1136
1137 S.Stk.push<Pointer>(Args: BasePtr);
1138 if (!Call(S, OpPC, Func: DtorFunc, VarArgSize: 0))
1139 return false;
1140 }
1141 return true;
1142}
1143
1144static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
1145 assert(B);
1146 const Descriptor *Desc = B->getDescriptor();
1147
1148 if (Desc->isPrimitive() || Desc->isPrimitiveArray())
1149 return true;
1150
1151 assert(Desc->isRecord() || Desc->isCompositeArray());
1152
1153 if (Desc->isCompositeArray()) {
1154 unsigned N = Desc->getNumElems();
1155 if (N == 0)
1156 return true;
1157 const Descriptor *ElemDesc = Desc->ElemDesc;
1158 assert(ElemDesc->isRecord());
1159
1160 Pointer RP(const_cast<Block *>(B));
1161 for (int I = static_cast<int>(N) - 1; I >= 0; --I) {
1162 if (!runRecordDestructor(S, OpPC, BasePtr: RP.atIndex(Idx: I).narrow(), Desc: ElemDesc))
1163 return false;
1164 }
1165 return true;
1166 }
1167
1168 assert(Desc->isRecord());
1169 return runRecordDestructor(S, OpPC, BasePtr: Pointer(const_cast<Block *>(B)), Desc);
1170}
1171
1172static bool hasVirtualDestructor(QualType T) {
1173 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1174 if (const CXXDestructorDecl *DD = RD->getDestructor())
1175 return DD->isVirtual();
1176 return false;
1177}
1178
1179bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1180 bool IsGlobalDelete) {
1181 if (!CheckDynamicMemoryAllocation(S, OpPC))
1182 return false;
1183
1184 DynamicAllocator &Allocator = S.getAllocator();
1185
1186 const Expr *Source = nullptr;
1187 const Block *BlockToDelete = nullptr;
1188 {
1189 // Extra scope for this so the block doesn't have this pointer
1190 // pointing to it when we destroy it.
1191 Pointer Ptr = S.Stk.pop<Pointer>();
1192
1193 // Deleteing nullptr is always fine.
1194 if (Ptr.isZero())
1195 return true;
1196
1197 // Remove base casts.
1198 QualType InitialType = Ptr.getType();
1199 while (Ptr.isBaseClass())
1200 Ptr = Ptr.getBase();
1201
1202 Source = Ptr.getDeclDesc()->asExpr();
1203 BlockToDelete = Ptr.block();
1204
1205 // Check that new[]/delete[] or new/delete were used, not a mixture.
1206 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1207 if (std::optional<DynamicAllocator::Form> AllocForm =
1208 Allocator.getAllocationForm(Source)) {
1209 DynamicAllocator::Form DeleteForm =
1210 DeleteIsArrayForm ? DynamicAllocator::Form::Array
1211 : DynamicAllocator::Form::NonArray;
1212 if (!CheckNewDeleteForms(S, OpPC, AllocForm: *AllocForm, DeleteForm, D: BlockDesc,
1213 NewExpr: Source))
1214 return false;
1215 }
1216
1217 // For the non-array case, the types must match if the static type
1218 // does not have a virtual destructor.
1219 if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1220 !hasVirtualDestructor(T: InitialType)) {
1221 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1222 DiagId: diag::note_constexpr_delete_base_nonvirt_dtor)
1223 << InitialType << Ptr.getType();
1224 return false;
1225 }
1226
1227 if (!Ptr.isRoot() || Ptr.isOnePastEnd() ||
1228 (Ptr.isArrayElement() && Ptr.getIndex() != 0)) {
1229 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1230 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_delete_subobject)
1231 << Ptr.toDiagnosticString(Ctx: S.getASTContext()) << Ptr.isOnePastEnd();
1232 return false;
1233 }
1234
1235 if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1236 return false;
1237
1238 // For a class type with a virtual destructor, the selected operator delete
1239 // is the one looked up when building the destructor.
1240 if (!DeleteIsArrayForm && !IsGlobalDelete) {
1241 QualType AllocType = Ptr.getType();
1242 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1243 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1244 if (const CXXDestructorDecl *DD = RD->getDestructor())
1245 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1246 return nullptr;
1247 };
1248
1249 if (const FunctionDecl *VirtualDelete =
1250 getVirtualOperatorDelete(AllocType);
1251 VirtualDelete &&
1252 !VirtualDelete
1253 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1254 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1255 DiagId: diag::note_constexpr_new_non_replaceable)
1256 << isa<CXXMethodDecl>(Val: VirtualDelete) << VirtualDelete;
1257 return false;
1258 }
1259 }
1260 }
1261 assert(Source);
1262 assert(BlockToDelete);
1263
1264 // Invoke destructors before deallocating the memory.
1265 if (!RunDestructors(S, OpPC, B: BlockToDelete))
1266 return false;
1267
1268 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1269 // Nothing has been deallocated, this must be a double-delete.
1270 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1271 S.FFDiag(SI: Loc, DiagId: diag::note_constexpr_double_delete);
1272 return false;
1273 }
1274
1275 return true;
1276}
1277
1278void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1279 const APSInt &Value) {
1280 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1281 return;
1282
1283 llvm::APInt Min;
1284 llvm::APInt Max;
1285 ED->getValueRange(Max, Min);
1286 --Max;
1287
1288 if (ED->getNumNegativeBits() &&
1289 (Max.slt(RHS: Value.getSExtValue()) || Min.sgt(RHS: Value.getSExtValue()))) {
1290 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1291 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1292 << llvm::toString(I: Value, Radix: 10) << Min.getSExtValue() << Max.getSExtValue()
1293 << ED;
1294 } else if (!ED->getNumNegativeBits() && Max.ult(RHS: Value.getZExtValue())) {
1295 const SourceLocation &Loc = S.Current->getLocation(PC: OpPC);
1296 S.CCEDiag(Loc, DiagId: diag::note_constexpr_unscoped_enum_out_of_range)
1297 << llvm::toString(I: Value, Radix: 10) << Min.getZExtValue() << Max.getZExtValue()
1298 << ED;
1299 }
1300}
1301
1302bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1303 assert(T);
1304 assert(!S.getLangOpts().CPlusPlus23);
1305
1306 // C++1y: A constant initializer for an object o [...] may also invoke
1307 // constexpr constructors for o and its subobjects even if those objects
1308 // are of non-literal class types.
1309 //
1310 // C++11 missed this detail for aggregates, so classes like this:
1311 // struct foo_t { union { int i; volatile int j; } u; };
1312 // are not (obviously) initializable like so:
1313 // __attribute__((__require_constant_initialization__))
1314 // static const foo_t x = {{0}};
1315 // because "i" is a subobject with non-literal initialization (due to the
1316 // volatile member of the union). See:
1317 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1318 // Therefore, we use the C++1y behavior.
1319
1320 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1321 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1322 return true;
1323 }
1324
1325 const Expr *E = S.Current->getExpr(PC: OpPC);
1326 if (S.getLangOpts().CPlusPlus11)
1327 S.FFDiag(E, DiagId: diag::note_constexpr_nonliteral) << E->getType();
1328 else
1329 S.FFDiag(E, DiagId: diag::note_invalid_subexpr_in_const_expr);
1330 return false;
1331}
1332
1333static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
1334 uint32_t Off) {
1335 if (S.getLangOpts().CPlusPlus && S.inConstantContext() &&
1336 !CheckNull(S, OpPC, Ptr, CSK: CSK_Field))
1337 return false;
1338
1339 if (!CheckRange(S, OpPC, Ptr, CSK: CSK_Field))
1340 return false;
1341 if (!CheckArray(S, OpPC, Ptr))
1342 return false;
1343 if (!CheckSubobject(S, OpPC, Ptr, CSK: CSK_Field))
1344 return false;
1345
1346 if (Ptr.isIntegralPointer()) {
1347 S.Stk.push<Pointer>(Args: Ptr.asIntPointer().atOffset(ASTCtx: S.getASTContext(), Offset: Off));
1348 return true;
1349 }
1350
1351 if (!Ptr.isBlockPointer()) {
1352 // FIXME: The only time we (seem to) get here is when trying to access a
1353 // field of a typeid pointer. In that case, we're supposed to diagnose e.g.
1354 // `typeid(int).name`, but we currently diagnose `&typeid(int)`.
1355 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1356 DiagId: diag::note_constexpr_access_unreadable_object)
1357 << AK_Read << Ptr.toDiagnosticString(Ctx: S.getASTContext());
1358 return false;
1359 }
1360
1361 if ((Ptr.getByteOffset() + Off) >= Ptr.block()->getSize())
1362 return false;
1363
1364 S.Stk.push<Pointer>(Args: Ptr.atField(Off));
1365 return true;
1366}
1367
1368bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) {
1369 const auto &Ptr = S.Stk.peek<Pointer>();
1370 return getField(S, OpPC, Ptr, Off);
1371}
1372
1373bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) {
1374 const auto &Ptr = S.Stk.pop<Pointer>();
1375 return getField(S, OpPC, Ptr, Off);
1376}
1377
1378static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1379 const Pointer &ThisPtr) {
1380 assert(Func->isConstructor());
1381
1382 if (Func->getParentDecl()->isInvalidDecl())
1383 return false;
1384
1385 const Descriptor *D = ThisPtr.getFieldDesc();
1386 // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1387 // subobject of a composite array.
1388 if (!D->ElemRecord)
1389 return true;
1390
1391 if (D->ElemRecord->getNumVirtualBases() == 0)
1392 return true;
1393
1394 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC), DiagId: diag::note_constexpr_virtual_base)
1395 << Func->getParentDecl();
1396 return false;
1397}
1398
1399bool CheckDestructor(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
1400 if (!CheckLive(S, OpPC, Ptr, AK: AK_Destroy))
1401 return false;
1402 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Destroy))
1403 return false;
1404 if (!CheckRange(S, OpPC, Ptr, AK: AK_Destroy))
1405 return false;
1406
1407 // Can't call a dtor on a global variable.
1408 if (Ptr.block()->isStatic()) {
1409 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1410 S.FFDiag(SI: E, DiagId: diag::note_constexpr_modify_global);
1411 return false;
1412 }
1413 return CheckActive(S, OpPC, Ptr, AK: AK_Destroy);
1414}
1415
1416static void compileFunction(InterpState &S, const Function *Func) {
1417 Compiler<ByteCodeEmitter>(S.getContext(), S.P)
1418 .compileFunc(FuncDecl: Func->getDecl()->getMostRecentDecl(),
1419 Func: const_cast<Function *>(Func));
1420}
1421
1422bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1423 uint32_t VarArgSize) {
1424 if (Func->hasThisPointer()) {
1425 size_t ArgSize = Func->getArgSize() + VarArgSize;
1426 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1427 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1428
1429 // If the current function is a lambda static invoker and
1430 // the function we're about to call is a lambda call operator,
1431 // skip the CheckInvoke, since the ThisPtr is a null pointer
1432 // anyway.
1433 if (!(S.Current->getFunction() &&
1434 S.Current->getFunction()->isLambdaStaticInvoker() &&
1435 Func->isLambdaCallOperator())) {
1436 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1437 return false;
1438 }
1439
1440 if (S.checkingPotentialConstantExpression())
1441 return false;
1442 }
1443
1444 if (!Func->isFullyCompiled())
1445 compileFunction(S, Func);
1446
1447 if (!CheckCallable(S, OpPC, F: Func))
1448 return false;
1449
1450 if (!CheckCallDepth(S, OpPC))
1451 return false;
1452
1453 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1454 InterpFrame *FrameBefore = S.Current;
1455 S.Current = NewFrame.get();
1456
1457 // Note that we cannot assert(CallResult.hasValue()) here since
1458 // Ret() above only sets the APValue if the curent frame doesn't
1459 // have a caller set.
1460 if (Interpret(S)) {
1461 NewFrame.release(); // Frame was delete'd already.
1462 assert(S.Current == FrameBefore);
1463 return true;
1464 }
1465
1466 // Interpreting the function failed somehow. Reset to
1467 // previous state.
1468 S.Current = FrameBefore;
1469 return false;
1470}
1471bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1472 uint32_t VarArgSize) {
1473 assert(Func);
1474 auto cleanup = [&]() -> bool {
1475 cleanupAfterFunctionCall(S, OpPC, Func);
1476 return false;
1477 };
1478
1479 if (Func->hasThisPointer()) {
1480 size_t ArgSize = Func->getArgSize() + VarArgSize;
1481 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1482
1483 const Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1484
1485 // C++23 [expr.const]p5.6
1486 // an invocation of a virtual function ([class.virtual]) for an object whose
1487 // dynamic type is constexpr-unknown;
1488 if (ThisPtr.isDummy() && Func->isVirtual())
1489 return false;
1490
1491 // If the current function is a lambda static invoker and
1492 // the function we're about to call is a lambda call operator,
1493 // skip the CheckInvoke, since the ThisPtr is a null pointer
1494 // anyway.
1495 if (S.Current->getFunction() &&
1496 S.Current->getFunction()->isLambdaStaticInvoker() &&
1497 Func->isLambdaCallOperator()) {
1498 assert(ThisPtr.isZero());
1499 } else {
1500 if (!CheckInvoke(S, OpPC, Ptr: ThisPtr))
1501 return cleanup();
1502 if (!Func->isConstructor() && !Func->isDestructor() &&
1503 !Func->isCopyOrMoveOperator() &&
1504 !CheckActive(S, OpPC, Ptr: ThisPtr, AK: AK_MemberCall))
1505 return false;
1506 }
1507
1508 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1509 return false;
1510 if (Func->isDestructor() && !CheckDestructor(S, OpPC, Ptr: ThisPtr))
1511 return false;
1512
1513 if (Func->isConstructor() || Func->isDestructor())
1514 S.InitializingBlocks.push_back(Elt: ThisPtr.block());
1515 }
1516
1517 if (!Func->isFullyCompiled())
1518 compileFunction(S, Func);
1519
1520 if (!CheckCallable(S, OpPC, F: Func))
1521 return cleanup();
1522
1523 // FIXME: The isConstructor() check here is not always right. The current
1524 // constant evaluator is somewhat inconsistent in when it allows a function
1525 // call when checking for a constant expression.
1526 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1527 !Func->isConstructor())
1528 return cleanup();
1529
1530 if (!CheckCallDepth(S, OpPC))
1531 return cleanup();
1532
1533 auto NewFrame = std::make_unique<InterpFrame>(args&: S, args&: Func, args&: OpPC, args&: VarArgSize);
1534 InterpFrame *FrameBefore = S.Current;
1535 S.Current = NewFrame.get();
1536
1537 InterpStateCCOverride CCOverride(S, Func->isImmediate());
1538 // Note that we cannot assert(CallResult.hasValue()) here since
1539 // Ret() above only sets the APValue if the curent frame doesn't
1540 // have a caller set.
1541 bool Success = Interpret(S);
1542 // Remove initializing block again.
1543 if (Func->isConstructor() || Func->isDestructor())
1544 S.InitializingBlocks.pop_back();
1545
1546 if (!Success) {
1547 // Interpreting the function failed somehow. Reset to
1548 // previous state.
1549 S.Current = FrameBefore;
1550 return false;
1551 }
1552
1553 NewFrame.release(); // Frame was delete'd already.
1554 assert(S.Current == FrameBefore);
1555 return true;
1556}
1557
1558bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1559 uint32_t VarArgSize) {
1560 assert(Func->hasThisPointer());
1561 assert(Func->isVirtual());
1562 size_t ArgSize = Func->getArgSize() + VarArgSize;
1563 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(Type: PT_Ptr) : 0);
1564 Pointer &ThisPtr = S.Stk.peek<Pointer>(Offset: ThisOffset);
1565 const FunctionDecl *Callee = Func->getDecl();
1566
1567 if (!Func->isFullyCompiled())
1568 compileFunction(S, Func);
1569
1570 // C++2a [class.abstract]p6:
1571 // the effect of making a virtual call to a pure virtual function [...] is
1572 // undefined
1573 if (Callee->isPureVirtual()) {
1574 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_pure_virtual_call,
1575 ExtraNotes: 1)
1576 << Callee;
1577 S.Note(Loc: Callee->getLocation(), DiagId: diag::note_declared_at);
1578 return false;
1579 }
1580
1581 const CXXRecordDecl *DynamicDecl = nullptr;
1582 {
1583 Pointer TypePtr = ThisPtr;
1584 while (TypePtr.isBaseClass())
1585 TypePtr = TypePtr.getBase();
1586
1587 QualType DynamicType = TypePtr.getType();
1588 if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1589 DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1590 else
1591 DynamicDecl = DynamicType->getAsCXXRecordDecl();
1592 }
1593 assert(DynamicDecl);
1594
1595 const auto *StaticDecl = cast<CXXRecordDecl>(Val: Func->getParentDecl());
1596 const auto *InitialFunction = cast<CXXMethodDecl>(Val: Callee);
1597 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1598 DynamicDecl, StaticDecl, InitialFunction);
1599
1600 if (Overrider != InitialFunction) {
1601 // DR1872: An instantiated virtual constexpr function can't be called in a
1602 // constant expression (prior to C++20). We can still constant-fold such a
1603 // call.
1604 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1605 const Expr *E = S.Current->getExpr(PC: OpPC);
1606 S.CCEDiag(E, DiagId: diag::note_constexpr_virtual_call) << E->getSourceRange();
1607 }
1608
1609 Func = S.getContext().getOrCreateFunction(FuncDecl: Overrider);
1610
1611 const CXXRecordDecl *ThisFieldDecl =
1612 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1613 if (Func->getParentDecl()->isDerivedFrom(Base: ThisFieldDecl)) {
1614 // If the function we call is further DOWN the hierarchy than the
1615 // FieldDesc of our pointer, just go up the hierarchy of this field
1616 // the furthest we can go.
1617 while (ThisPtr.isBaseClass())
1618 ThisPtr = ThisPtr.getBase();
1619 }
1620 }
1621
1622 if (!Call(S, OpPC, Func, VarArgSize))
1623 return false;
1624
1625 // Covariant return types. The return type of Overrider is a pointer
1626 // or reference to a class type.
1627 if (Overrider != InitialFunction &&
1628 Overrider->getReturnType()->isPointerOrReferenceType() &&
1629 InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1630 QualType OverriderPointeeType =
1631 Overrider->getReturnType()->getPointeeType();
1632 QualType InitialPointeeType =
1633 InitialFunction->getReturnType()->getPointeeType();
1634 // We've called Overrider above, but calling code expects us to return what
1635 // InitialFunction returned. According to the rules for covariant return
1636 // types, what InitialFunction returns needs to be a base class of what
1637 // Overrider returns. So, we need to do an upcast here.
1638 unsigned Offset = S.getContext().collectBaseOffset(
1639 BaseDecl: InitialPointeeType->getAsRecordDecl(),
1640 DerivedDecl: OverriderPointeeType->getAsRecordDecl());
1641 return GetPtrBasePop(S, OpPC, Off: Offset, /*IsNullOK=*/NullOK: true);
1642 }
1643
1644 return true;
1645}
1646
1647bool CallBI(InterpState &S, CodePtr OpPC, const CallExpr *CE,
1648 uint32_t BuiltinID) {
1649 // A little arbitrary, but the current interpreter allows evaluation
1650 // of builtin functions in this mode, with some exceptions.
1651 if (BuiltinID == Builtin::BI__builtin_operator_new &&
1652 S.checkingPotentialConstantExpression())
1653 return false;
1654
1655 return InterpretBuiltin(S, OpPC, Call: CE, BuiltinID);
1656}
1657
1658bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1659 const CallExpr *CE) {
1660 const Pointer &Ptr = S.Stk.pop<Pointer>();
1661
1662 if (Ptr.isZero()) {
1663 const auto *E = cast<CallExpr>(Val: S.Current->getExpr(PC: OpPC));
1664 S.FFDiag(E, DiagId: diag::note_constexpr_null_callee)
1665 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1666 return false;
1667 }
1668
1669 if (!Ptr.isFunctionPointer())
1670 return Invalid(S, OpPC);
1671
1672 const FunctionPointer &FuncPtr = Ptr.asFunctionPointer();
1673 const Function *F = FuncPtr.getFunction();
1674 assert(F);
1675 // Don't allow calling block pointers.
1676 if (!F->getDecl())
1677 return Invalid(S, OpPC);
1678
1679 // This happens when the call expression has been cast to
1680 // something else, but we don't support that.
1681 if (S.Ctx.classify(T: F->getDecl()->getReturnType()) !=
1682 S.Ctx.classify(T: CE->getCallReturnType(Ctx: S.getASTContext())))
1683 return false;
1684
1685 // Check argument nullability state.
1686 if (F->hasNonNullAttr()) {
1687 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1688 return false;
1689 }
1690
1691 assert(ArgSize >= F->getWrittenArgSize());
1692 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1693
1694 // We need to do this explicitly here since we don't have the necessary
1695 // information to do it automatically.
1696 if (F->isThisPointerExplicit())
1697 VarArgSize -= align(Size: primSize(Type: PT_Ptr));
1698
1699 if (F->isVirtual())
1700 return CallVirt(S, OpPC, Func: F, VarArgSize);
1701
1702 return Call(S, OpPC, Func: F, VarArgSize);
1703}
1704
1705static void startLifetimeRecurse(const Pointer &Ptr) {
1706 if (const Record *R = Ptr.getRecord()) {
1707 Ptr.startLifetime();
1708 for (const Record::Field &Fi : R->fields())
1709 startLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1710 return;
1711 }
1712
1713 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1714 FieldDesc->isCompositeArray()) {
1715 assert(Ptr.getLifetime() == Lifetime::Started);
1716 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1717 startLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1718 return;
1719 }
1720
1721 Ptr.startLifetime();
1722}
1723
1724bool StartLifetime(InterpState &S, CodePtr OpPC) {
1725 const auto &Ptr = S.Stk.peek<Pointer>();
1726 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1727 return false;
1728 startLifetimeRecurse(Ptr: Ptr.narrow());
1729 return true;
1730}
1731
1732// FIXME: It might be better to the recursing as part of the generated code for
1733// a destructor?
1734static void endLifetimeRecurse(const Pointer &Ptr) {
1735 if (const Record *R = Ptr.getRecord()) {
1736 Ptr.endLifetime();
1737 for (const Record::Field &Fi : R->fields())
1738 endLifetimeRecurse(Ptr: Ptr.atField(Off: Fi.Offset));
1739 return;
1740 }
1741
1742 if (const Descriptor *FieldDesc = Ptr.getFieldDesc();
1743 FieldDesc->isCompositeArray()) {
1744 // No endLifetime() for array roots.
1745 assert(Ptr.getLifetime() == Lifetime::Started);
1746 for (unsigned I = 0; I != FieldDesc->getNumElems(); ++I)
1747 endLifetimeRecurse(Ptr: Ptr.atIndex(Idx: I).narrow());
1748 return;
1749 }
1750
1751 Ptr.endLifetime();
1752}
1753
1754/// Ends the lifetime of the peek'd pointer.
1755bool EndLifetime(InterpState &S, CodePtr OpPC) {
1756 const auto &Ptr = S.Stk.peek<Pointer>();
1757 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1758 return false;
1759 endLifetimeRecurse(Ptr: Ptr.narrow());
1760 return true;
1761}
1762
1763/// Ends the lifetime of the pop'd pointer.
1764bool EndLifetimePop(InterpState &S, CodePtr OpPC) {
1765 const auto &Ptr = S.Stk.pop<Pointer>();
1766 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Destroy))
1767 return false;
1768 endLifetimeRecurse(Ptr: Ptr.narrow());
1769 return true;
1770}
1771
1772bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1773 std::optional<uint64_t> ArraySize) {
1774 const Pointer &Ptr = S.Stk.peek<Pointer>();
1775
1776 // Similar to CheckStore(), but with the additional CheckTemporary() call and
1777 // the AccessKinds are different.
1778 if (!CheckTemporary(S, OpPC, Ptr, AK: AK_Construct))
1779 return false;
1780 if (!CheckLive(S, OpPC, Ptr, AK: AK_Construct))
1781 return false;
1782 if (!CheckDummy(S, OpPC, Ptr, AK: AK_Construct))
1783 return false;
1784
1785 // CheckLifetime for this and all base pointers.
1786 for (Pointer P = Ptr;;) {
1787 if (!CheckLifetime(S, OpPC, Ptr: P, AK: AK_Construct))
1788 return false;
1789
1790 if (P.isRoot())
1791 break;
1792 P = P.getBase();
1793 }
1794 if (!CheckExtern(S, OpPC, Ptr))
1795 return false;
1796 if (!CheckRange(S, OpPC, Ptr, AK: AK_Construct))
1797 return false;
1798 if (!CheckGlobal(S, OpPC, Ptr))
1799 return false;
1800 if (!CheckConst(S, OpPC, Ptr))
1801 return false;
1802 if (!S.inConstantContext() && isConstexprUnknown(P: Ptr))
1803 return false;
1804
1805 if (!InvalidNewDeleteExpr(S, OpPC, E))
1806 return false;
1807
1808 const auto *NewExpr = cast<CXXNewExpr>(Val: E);
1809 QualType StorageType = Ptr.getFieldDesc()->getDataType(Ctx: S.getASTContext());
1810 const ASTContext &ASTCtx = S.getASTContext();
1811 QualType AllocType;
1812 if (ArraySize) {
1813 AllocType = ASTCtx.getConstantArrayType(
1814 EltTy: NewExpr->getAllocatedType(),
1815 ArySize: APInt(64, static_cast<uint64_t>(*ArraySize), false), SizeExpr: nullptr,
1816 ASM: ArraySizeModifier::Normal, IndexTypeQuals: 0);
1817 } else {
1818 AllocType = NewExpr->getAllocatedType();
1819 }
1820
1821 unsigned StorageSize = 1;
1822 unsigned AllocSize = 1;
1823 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: AllocType))
1824 AllocSize = CAT->getZExtSize();
1825 if (const auto *CAT = dyn_cast<ConstantArrayType>(Val&: StorageType))
1826 StorageSize = CAT->getZExtSize();
1827
1828 if (AllocSize > StorageSize ||
1829 !ASTCtx.hasSimilarType(T1: ASTCtx.getBaseElementType(QT: AllocType),
1830 T2: ASTCtx.getBaseElementType(QT: StorageType))) {
1831 S.FFDiag(Loc: S.Current->getLocation(PC: OpPC),
1832 DiagId: diag::note_constexpr_placement_new_wrong_type)
1833 << StorageType << AllocType;
1834 return false;
1835 }
1836
1837 // Can't activate fields in a union, unless the direct base is the union.
1838 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
1839 return CheckActive(S, OpPC, Ptr, AK: AK_Construct);
1840
1841 return true;
1842}
1843
1844bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1845 assert(E);
1846
1847 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(Val: E)) {
1848 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1849
1850 if (NewExpr->getNumPlacementArgs() > 0) {
1851 // This is allowed pre-C++26, but only an std function.
1852 if (S.getLangOpts().CPlusPlus26 || S.Current->isStdFunction())
1853 return true;
1854 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
1855 << /*C++26 feature*/ 1 << E->getSourceRange();
1856 } else if (
1857 !OperatorNew
1858 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1859 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1860 DiagId: diag::note_constexpr_new_non_replaceable)
1861 << isa<CXXMethodDecl>(Val: OperatorNew) << OperatorNew;
1862 return false;
1863 } else if (!S.getLangOpts().CPlusPlus26 &&
1864 NewExpr->getNumPlacementArgs() == 1 &&
1865 !OperatorNew->isReservedGlobalPlacementOperator()) {
1866 if (!S.getLangOpts().CPlusPlus26) {
1867 S.FFDiag(SI: S.Current->getSource(PC: OpPC), DiagId: diag::note_constexpr_new_placement)
1868 << /*Unsupported*/ 0 << E->getSourceRange();
1869 return false;
1870 }
1871 return true;
1872 }
1873 } else {
1874 const auto *DeleteExpr = cast<CXXDeleteExpr>(Val: E);
1875 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1876 if (!OperatorDelete
1877 ->isUsableAsGlobalAllocationFunctionInConstantEvaluation()) {
1878 S.FFDiag(SI: S.Current->getSource(PC: OpPC),
1879 DiagId: diag::note_constexpr_new_non_replaceable)
1880 << isa<CXXMethodDecl>(Val: OperatorDelete) << OperatorDelete;
1881 return false;
1882 }
1883 }
1884
1885 return false;
1886}
1887
1888bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1889 const FixedPoint &FP) {
1890 const Expr *E = S.Current->getExpr(PC: OpPC);
1891 if (S.checkingForUndefinedBehavior()) {
1892 S.getASTContext().getDiagnostics().Report(
1893 Loc: E->getExprLoc(), DiagID: diag::warn_fixedpoint_constant_overflow)
1894 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
1895 }
1896 S.CCEDiag(E, DiagId: diag::note_constexpr_overflow)
1897 << FP.toDiagnosticString(Ctx: S.getASTContext()) << E->getType();
1898 return S.noteUndefinedBehavior();
1899}
1900
1901bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1902 const SourceInfo &Loc = S.Current->getSource(PC: OpPC);
1903 S.FFDiag(SI: Loc,
1904 DiagId: diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1905 << Index;
1906 return false;
1907}
1908
1909bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1910 const Pointer &Ptr, unsigned BitWidth) {
1911 if (Ptr.isDummy())
1912 return false;
1913 if (Ptr.isFunctionPointer())
1914 return true;
1915
1916 const SourceInfo &E = S.Current->getSource(PC: OpPC);
1917 S.CCEDiag(SI: E, DiagId: diag::note_constexpr_invalid_cast)
1918 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(PC: OpPC);
1919
1920 if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1921 // Only allow based lvalue casts if they are lossless.
1922 if (S.getASTContext().getTargetInfo().getPointerWidth(AddrSpace: LangAS::Default) !=
1923 BitWidth)
1924 return Invalid(S, OpPC);
1925 }
1926 return true;
1927}
1928
1929bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1930 const Pointer &Ptr = S.Stk.pop<Pointer>();
1931
1932 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1933 return false;
1934
1935 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
1936 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
1937
1938 S.Stk.push<IntegralAP<false>>(Args&: Result);
1939 return true;
1940}
1941
1942bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1943 const Pointer &Ptr = S.Stk.pop<Pointer>();
1944
1945 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1946 return false;
1947
1948 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
1949 Result.copy(V: APInt(BitWidth, Ptr.getIntegerRepresentation()));
1950
1951 S.Stk.push<IntegralAP<true>>(Args&: Result);
1952 return true;
1953}
1954
1955bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
1956 bool TargetIsUCharOrByte) {
1957 // This is always fine.
1958 if (!HasIndeterminateBits)
1959 return true;
1960
1961 // Indeterminate bits can only be bitcast to unsigned char or std::byte.
1962 if (TargetIsUCharOrByte)
1963 return true;
1964
1965 const Expr *E = S.Current->getExpr(PC: OpPC);
1966 QualType ExprType = E->getType();
1967 S.FFDiag(E, DiagId: diag::note_constexpr_bit_cast_indet_dest)
1968 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
1969 return false;
1970}
1971
1972bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr,
1973 const Type *TypeInfoType) {
1974 S.Stk.push<Pointer>(Args&: TypePtr, Args&: TypeInfoType);
1975 return true;
1976}
1977
1978bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) {
1979 const auto &P = S.Stk.pop<Pointer>();
1980
1981 if (!P.isBlockPointer())
1982 return false;
1983
1984 // Pick the most-derived type.
1985 const Type *T = P.getDeclPtr().getType().getTypePtr();
1986 // ... unless we're currently constructing this object.
1987 // FIXME: We have a similar check to this in more places.
1988 if (S.Current->getFunction()) {
1989 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
1990 if (const Function *Func = Frame->getFunction();
1991 Func && (Func->isConstructor() || Func->isDestructor()) &&
1992 P.block() == Frame->getThis().block()) {
1993 T = Func->getParentDecl()->getTypeForDecl();
1994 break;
1995 }
1996 }
1997 }
1998
1999 S.Stk.push<Pointer>(Args: T->getCanonicalTypeUnqualified().getTypePtr(),
2000 Args&: TypeInfoType);
2001 return true;
2002}
2003
2004bool DiagTypeid(InterpState &S, CodePtr OpPC) {
2005 const auto *E = cast<CXXTypeidExpr>(Val: S.Current->getExpr(PC: OpPC));
2006 S.CCEDiag(E, DiagId: diag::note_constexpr_typeid_polymorphic)
2007 << E->getExprOperand()->getType()
2008 << E->getExprOperand()->getSourceRange();
2009 return false;
2010}
2011
2012bool arePotentiallyOverlappingStringLiterals(const Pointer &LHS,
2013 const Pointer &RHS) {
2014 unsigned LHSOffset = LHS.getIndex();
2015 unsigned RHSOffset = RHS.getIndex();
2016 unsigned LHSLength = (LHS.getNumElems() - 1) * LHS.elemSize();
2017 unsigned RHSLength = (RHS.getNumElems() - 1) * RHS.elemSize();
2018
2019 StringRef LHSStr((const char *)LHS.atIndex(Idx: 0).getRawAddress(), LHSLength);
2020 StringRef RHSStr((const char *)RHS.atIndex(Idx: 0).getRawAddress(), RHSLength);
2021 int32_t IndexDiff = RHSOffset - LHSOffset;
2022 if (IndexDiff < 0) {
2023 if (static_cast<int32_t>(LHSLength) < -IndexDiff)
2024 return false;
2025 LHSStr = LHSStr.drop_front(N: -IndexDiff);
2026 } else {
2027 if (static_cast<int32_t>(RHSLength) < IndexDiff)
2028 return false;
2029 RHSStr = RHSStr.drop_front(N: IndexDiff);
2030 }
2031
2032 unsigned ShorterCharWidth;
2033 StringRef Shorter;
2034 StringRef Longer;
2035 if (LHSLength < RHSLength) {
2036 ShorterCharWidth = LHS.elemSize();
2037 Shorter = LHSStr;
2038 Longer = RHSStr;
2039 } else {
2040 ShorterCharWidth = RHS.elemSize();
2041 Shorter = RHSStr;
2042 Longer = LHSStr;
2043 }
2044
2045 // The null terminator isn't included in the string data, so check for it
2046 // manually. If the longer string doesn't have a null terminator where the
2047 // shorter string ends, they aren't potentially overlapping.
2048 for (unsigned NullByte : llvm::seq(Size: ShorterCharWidth)) {
2049 if (Shorter.size() + NullByte >= Longer.size())
2050 break;
2051 if (Longer[Shorter.size() + NullByte])
2052 return false;
2053 }
2054 return Shorter == Longer.take_front(N: Shorter.size());
2055}
2056
2057static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr,
2058 PrimType T) {
2059
2060 if (T == PT_IntAPS) {
2061 auto &Val = Ptr.deref<IntegralAP<true>>();
2062 if (!Val.singleWord()) {
2063 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2064 Val.take(NewMemory);
2065 }
2066 } else if (T == PT_IntAP) {
2067 auto &Val = Ptr.deref<IntegralAP<false>>();
2068 if (!Val.singleWord()) {
2069 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2070 Val.take(NewMemory);
2071 }
2072 } else if (T == PT_Float) {
2073 auto &Val = Ptr.deref<Floating>();
2074 if (!Val.singleWord()) {
2075 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2076 Val.take(NewMemory);
2077 }
2078 }
2079}
2080
2081template <typename T>
2082static void copyPrimitiveMemory(InterpState &S, const Pointer &Ptr) {
2083 assert(needsAlloc<T>());
2084 auto &Val = Ptr.deref<T>();
2085 if (!Val.singleWord()) {
2086 uint64_t *NewMemory = new (S.P) uint64_t[Val.numWords()];
2087 Val.take(NewMemory);
2088 }
2089}
2090
2091static void finishGlobalRecurse(InterpState &S, const Pointer &Ptr) {
2092 if (const Record *R = Ptr.getRecord()) {
2093 for (const Record::Field &Fi : R->fields()) {
2094 if (Fi.Desc->isPrimitive()) {
2095 TYPE_SWITCH_ALLOC(Fi.Desc->getPrimType(), {
2096 copyPrimitiveMemory<T>(S, Ptr.atField(Fi.Offset));
2097 });
2098 copyPrimitiveMemory(S, Ptr: Ptr.atField(Off: Fi.Offset), T: Fi.Desc->getPrimType());
2099 } else
2100 finishGlobalRecurse(S, Ptr: Ptr.atField(Off: Fi.Offset));
2101 }
2102 return;
2103 }
2104
2105 if (const Descriptor *D = Ptr.getFieldDesc(); D && D->isArray()) {
2106 unsigned NumElems = D->getNumElems();
2107 if (NumElems == 0)
2108 return;
2109
2110 if (D->isPrimitiveArray()) {
2111 PrimType PT = D->getPrimType();
2112 if (!needsAlloc(T: PT))
2113 return;
2114 assert(NumElems >= 1);
2115 const Pointer EP = Ptr.atIndex(Idx: 0);
2116 bool AllSingleWord = true;
2117 TYPE_SWITCH_ALLOC(PT, {
2118 if (!EP.deref<T>().singleWord()) {
2119 copyPrimitiveMemory<T>(S, EP);
2120 AllSingleWord = false;
2121 }
2122 });
2123 if (AllSingleWord)
2124 return;
2125 for (unsigned I = 1; I != D->getNumElems(); ++I) {
2126 const Pointer EP = Ptr.atIndex(Idx: I);
2127 copyPrimitiveMemory(S, Ptr: EP, T: PT);
2128 }
2129 } else {
2130 assert(D->isCompositeArray());
2131 for (unsigned I = 0; I != D->getNumElems(); ++I) {
2132 const Pointer EP = Ptr.atIndex(Idx: I).narrow();
2133 finishGlobalRecurse(S, Ptr: EP);
2134 }
2135 }
2136 }
2137}
2138
2139bool FinishInitGlobal(InterpState &S, CodePtr OpPC) {
2140 const Pointer &Ptr = S.Stk.pop<Pointer>();
2141
2142 finishGlobalRecurse(S, Ptr);
2143 if (Ptr.canBeInitialized()) {
2144 Ptr.initialize();
2145 Ptr.activate();
2146 }
2147
2148 return true;
2149}
2150
2151// https://github.com/llvm/llvm-project/issues/102513
2152#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2153#pragma optimize("", off)
2154#endif
2155bool Interpret(InterpState &S) {
2156 // The current stack frame when we started Interpret().
2157 // This is being used by the ops to determine wheter
2158 // to return from this function and thus terminate
2159 // interpretation.
2160 const InterpFrame *StartFrame = S.Current;
2161 assert(!S.Current->isRoot());
2162 CodePtr PC = S.Current->getPC();
2163
2164 // Empty program.
2165 if (!PC)
2166 return true;
2167
2168 for (;;) {
2169 auto Op = PC.read<Opcode>();
2170 CodePtr OpPC = PC;
2171
2172 switch (Op) {
2173#define GET_INTERP
2174#include "Opcodes.inc"
2175#undef GET_INTERP
2176 }
2177 }
2178}
2179// https://github.com/llvm/llvm-project/issues/102513
2180#if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
2181#pragma optimize("", on)
2182#endif
2183
2184} // namespace interp
2185} // namespace clang
2186

source code of clang/lib/AST/ByteCode/Interp.cpp