1//===-- Transfer.cpp --------------------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines transfer functions that evaluate program statements and
10// update an environment accordingly.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Analysis/FlowSensitive/Transfer.h"
15#include "clang/AST/Decl.h"
16#include "clang/AST/DeclBase.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/AST/Expr.h"
19#include "clang/AST/ExprCXX.h"
20#include "clang/AST/OperationKinds.h"
21#include "clang/AST/Stmt.h"
22#include "clang/AST/StmtVisitor.h"
23#include "clang/Analysis/FlowSensitive/ASTOps.h"
24#include "clang/Analysis/FlowSensitive/AdornedCFG.h"
25#include "clang/Analysis/FlowSensitive/DataflowAnalysisContext.h"
26#include "clang/Analysis/FlowSensitive/DataflowEnvironment.h"
27#include "clang/Analysis/FlowSensitive/NoopAnalysis.h"
28#include "clang/Analysis/FlowSensitive/RecordOps.h"
29#include "clang/Analysis/FlowSensitive/Value.h"
30#include "clang/Basic/Builtins.h"
31#include "clang/Basic/OperatorKinds.h"
32#include "llvm/Support/Casting.h"
33#include <assert.h>
34#include <cassert>
35
36#define DEBUG_TYPE "dataflow"
37
38namespace clang {
39namespace dataflow {
40
41const Environment *StmtToEnvMap::getEnvironment(const Stmt &S) const {
42 const CFGBlock *Block = ACFG.blockForStmt(S);
43 if (Block == nullptr) {
44 assert(false);
45 return nullptr;
46 }
47 if (!ACFG.isBlockReachable(B: *Block))
48 return nullptr;
49 if (Block->getBlockID() == CurBlockID)
50 return &CurState.Env;
51 const auto &State = BlockToState[Block->getBlockID()];
52 if (!(State))
53 return nullptr;
54 return &State->Env;
55}
56
57static BoolValue &evaluateBooleanEquality(const Expr &LHS, const Expr &RHS,
58 Environment &Env) {
59 Value *LHSValue = Env.getValue(E: LHS);
60 Value *RHSValue = Env.getValue(E: RHS);
61
62 // When two unsupported values are compared, both are nullptr. Only supported
63 // values should evaluate to equal.
64 if (LHSValue == RHSValue && LHSValue)
65 return Env.getBoolLiteralValue(Value: true);
66
67 // Special case: `NullPtrLiteralExpr == itself`. When both sides are untyped
68 // nullptr, they do not have an assigned Value, but they compare equal.
69 if (LHS.getType()->isNullPtrType() && RHS.getType()->isNullPtrType())
70 return Env.getBoolLiteralValue(Value: true);
71
72 if (auto *LHSBool = dyn_cast_or_null<BoolValue>(Val: LHSValue))
73 if (auto *RHSBool = dyn_cast_or_null<BoolValue>(Val: RHSValue))
74 return Env.makeIff(LHS&: *LHSBool, RHS&: *RHSBool);
75
76 if (auto *LHSPtr = dyn_cast_or_null<PointerValue>(Val: LHSValue))
77 if (auto *RHSPtr = dyn_cast_or_null<PointerValue>(Val: RHSValue))
78 // If the storage locations are the same, the pointers definitely compare
79 // the same. If the storage locations are different, they may still alias,
80 // so we fall through to the case below that returns an atom.
81 if (&LHSPtr->getPointeeLoc() == &RHSPtr->getPointeeLoc())
82 return Env.getBoolLiteralValue(Value: true);
83
84 return Env.makeAtomicBoolValue();
85}
86
87static BoolValue &unpackValue(BoolValue &V, Environment &Env) {
88 if (auto *Top = llvm::dyn_cast<TopBoolValue>(Val: &V)) {
89 auto &A = Env.getDataflowAnalysisContext().arena();
90 return A.makeBoolValue(A.makeAtomRef(A: Top->getAtom()));
91 }
92 return V;
93}
94
95// Unpacks the value (if any) associated with `E` and updates `E` to the new
96// value, if any unpacking occured. Also, does the lvalue-to-rvalue conversion,
97// by skipping past the reference.
98static Value *maybeUnpackLValueExpr(const Expr &E, Environment &Env) {
99 auto *Loc = Env.getStorageLocation(E);
100 if (Loc == nullptr)
101 return nullptr;
102 auto *Val = Env.getValue(Loc: *Loc);
103
104 auto *B = dyn_cast_or_null<BoolValue>(Val);
105 if (B == nullptr)
106 return Val;
107
108 auto &UnpackedVal = unpackValue(V&: *B, Env);
109 if (&UnpackedVal == Val)
110 return Val;
111 Env.setValue(Loc: *Loc, Val&: UnpackedVal);
112 return &UnpackedVal;
113}
114
115static void propagateValue(const Expr &From, const Expr &To, Environment &Env) {
116 if (From.getType()->isRecordType())
117 return;
118 if (auto *Val = Env.getValue(E: From))
119 Env.setValue(E: To, Val&: *Val);
120}
121
122static void propagateStorageLocation(const Expr &From, const Expr &To,
123 Environment &Env) {
124 if (auto *Loc = Env.getStorageLocation(E: From))
125 Env.setStorageLocation(E: To, Loc&: *Loc);
126}
127
128// Propagates the value or storage location of `From` to `To` in cases where
129// `From` may be either a glvalue or a prvalue. `To` must be a glvalue iff
130// `From` is a glvalue.
131static void propagateValueOrStorageLocation(const Expr &From, const Expr &To,
132 Environment &Env) {
133 assert(From.isGLValue() == To.isGLValue());
134 if (From.isGLValue())
135 propagateStorageLocation(From, To, Env);
136 else
137 propagateValue(From, To, Env);
138}
139
140namespace {
141
142class TransferVisitor : public ConstStmtVisitor<TransferVisitor> {
143public:
144 TransferVisitor(const StmtToEnvMap &StmtToEnv, Environment &Env,
145 Environment::ValueModel &Model)
146 : StmtToEnv(StmtToEnv), Env(Env), Model(Model) {}
147
148 void VisitBinaryOperator(const BinaryOperator *S) {
149 const Expr *LHS = S->getLHS();
150 assert(LHS != nullptr);
151
152 const Expr *RHS = S->getRHS();
153 assert(RHS != nullptr);
154
155 // Do compound assignments up-front, as there are so many of them and we
156 // don't want to list all of them in the switch statement below.
157 // To avoid generating unnecessary values, we don't create a new value but
158 // instead leave it to the specific analysis to do this if desired.
159 if (S->isCompoundAssignmentOp())
160 propagateStorageLocation(*S->getLHS(), *S, Env);
161
162 switch (S->getOpcode()) {
163 case BO_Assign: {
164 auto *LHSLoc = Env.getStorageLocation(E: *LHS);
165 if (LHSLoc == nullptr)
166 break;
167
168 auto *RHSVal = Env.getValue(E: *RHS);
169 if (RHSVal == nullptr)
170 break;
171
172 // Assign a value to the storage location of the left-hand side.
173 Env.setValue(Loc: *LHSLoc, Val&: *RHSVal);
174
175 // Assign a storage location for the whole expression.
176 Env.setStorageLocation(*S, *LHSLoc);
177 break;
178 }
179 case BO_LAnd:
180 case BO_LOr: {
181 BoolValue &LHSVal = getLogicOperatorSubExprValue(SubExpr: *LHS);
182 BoolValue &RHSVal = getLogicOperatorSubExprValue(SubExpr: *RHS);
183
184 if (S->getOpcode() == BO_LAnd)
185 Env.setValue(*S, Env.makeAnd(LHS&: LHSVal, RHS&: RHSVal));
186 else
187 Env.setValue(*S, Env.makeOr(LHS&: LHSVal, RHS&: RHSVal));
188 break;
189 }
190 case BO_NE:
191 case BO_EQ: {
192 auto &LHSEqRHSValue = evaluateBooleanEquality(LHS: *LHS, RHS: *RHS, Env);
193 Env.setValue(*S, S->getOpcode() == BO_EQ ? LHSEqRHSValue
194 : Env.makeNot(Val&: LHSEqRHSValue));
195 break;
196 }
197 case BO_Comma: {
198 propagateValueOrStorageLocation(*RHS, *S, Env);
199 break;
200 }
201 default:
202 break;
203 }
204 }
205
206 void VisitDeclRefExpr(const DeclRefExpr *S) {
207 const ValueDecl *VD = S->getDecl();
208 assert(VD != nullptr);
209
210 // Some `DeclRefExpr`s aren't glvalues, so we can't associate them with a
211 // `StorageLocation`, and there's also no sensible `Value` that we can
212 // assign to them. Examples:
213 // - Non-static member variables
214 // - Non static member functions
215 // Note: Member operators are an exception to this, but apparently only
216 // if the `DeclRefExpr` is used within the callee of a
217 // `CXXOperatorCallExpr`. In other cases, for example when applying the
218 // address-of operator, the `DeclRefExpr` is a prvalue.
219 if (!S->isGLValue())
220 return;
221
222 auto *DeclLoc = Env.getStorageLocation(D: *VD);
223 if (DeclLoc == nullptr)
224 return;
225
226 Env.setStorageLocation(*S, *DeclLoc);
227 }
228
229 void VisitDeclStmt(const DeclStmt *S) {
230 // Group decls are converted into single decls in the CFG so the cast below
231 // is safe.
232 const auto &D = *cast<VarDecl>(Val: S->getSingleDecl());
233
234 ProcessVarDecl(D);
235 }
236
237 void ProcessVarDecl(const VarDecl &D) {
238 // Static local vars are already initialized in `Environment`.
239 if (D.hasGlobalStorage())
240 return;
241
242 // If this is the holding variable for a `BindingDecl`, we may already
243 // have a storage location set up -- so check. (See also explanation below
244 // where we process the `BindingDecl`.)
245 if (D.getType()->isReferenceType() && Env.getStorageLocation(D) != nullptr)
246 return;
247
248 assert(Env.getStorageLocation(D) == nullptr);
249
250 Env.setStorageLocation(D, Env.createObject(D));
251
252 // `DecompositionDecl` must be handled after we've interpreted the loc
253 // itself, because the binding expression refers back to the
254 // `DecompositionDecl` (even though it has no written name).
255 if (const auto *Decomp = dyn_cast<DecompositionDecl>(Val: &D)) {
256 // If VarDecl is a DecompositionDecl, evaluate each of its bindings. This
257 // needs to be evaluated after initializing the values in the storage for
258 // VarDecl, as the bindings refer to them.
259 // FIXME: Add support for ArraySubscriptExpr.
260 // FIXME: Consider adding AST nodes used in BindingDecls to the CFG.
261 for (const auto *B : Decomp->bindings()) {
262 if (auto *ME = dyn_cast_or_null<MemberExpr>(Val: B->getBinding())) {
263 auto *DE = dyn_cast_or_null<DeclRefExpr>(Val: ME->getBase());
264 if (DE == nullptr)
265 continue;
266
267 // ME and its base haven't been visited because they aren't included
268 // in the statements of the CFG basic block.
269 VisitDeclRefExpr(S: DE);
270 VisitMemberExpr(S: ME);
271
272 if (auto *Loc = Env.getStorageLocation(*ME))
273 Env.setStorageLocation(*B, *Loc);
274 } else if (auto *VD = B->getHoldingVar()) {
275 // Holding vars are used to back the `BindingDecl`s of tuple-like
276 // types. The holding var declarations appear after the
277 // `DecompositionDecl`, so we have to explicitly process them here
278 // to know their storage location. They will be processed a second
279 // time when we visit their `VarDecl`s, so we have code that protects
280 // against this above.
281 ProcessVarDecl(D: *VD);
282 auto *VDLoc = Env.getStorageLocation(*VD);
283 assert(VDLoc != nullptr);
284 Env.setStorageLocation(*B, *VDLoc);
285 }
286 }
287 }
288 }
289
290 void VisitImplicitCastExpr(const ImplicitCastExpr *S) {
291 const Expr *SubExpr = S->getSubExpr();
292 assert(SubExpr != nullptr);
293
294 switch (S->getCastKind()) {
295 case CK_IntegralToBoolean: {
296 // This cast creates a new, boolean value from the integral value. We
297 // model that with a fresh value in the environment, unless it's already a
298 // boolean.
299 if (auto *SubExprVal =
300 dyn_cast_or_null<BoolValue>(Env.getValue(*SubExpr)))
301 Env.setValue(*S, *SubExprVal);
302 else
303 // FIXME: If integer modeling is added, then update this code to create
304 // the boolean based on the integer model.
305 Env.setValue(*S, Env.makeAtomicBoolValue());
306 break;
307 }
308
309 case CK_LValueToRValue: {
310 // When an L-value is used as an R-value, it may result in sharing, so we
311 // need to unpack any nested `Top`s.
312 auto *SubExprVal = maybeUnpackLValueExpr(E: *SubExpr, Env);
313 if (SubExprVal == nullptr)
314 break;
315
316 Env.setValue(*S, *SubExprVal);
317 break;
318 }
319
320 case CK_IntegralCast:
321 // FIXME: This cast creates a new integral value from the
322 // subexpression. But, because we don't model integers, we don't
323 // distinguish between this new value and the underlying one. If integer
324 // modeling is added, then update this code to create a fresh location and
325 // value.
326 case CK_UncheckedDerivedToBase:
327 case CK_ConstructorConversion:
328 case CK_UserDefinedConversion:
329 // FIXME: Add tests that excercise CK_UncheckedDerivedToBase,
330 // CK_ConstructorConversion, and CK_UserDefinedConversion.
331 case CK_NoOp: {
332 // FIXME: Consider making `Environment::getStorageLocation` skip noop
333 // expressions (this and other similar expressions in the file) instead
334 // of assigning them storage locations.
335 propagateValueOrStorageLocation(*SubExpr, *S, Env);
336 break;
337 }
338 case CK_NullToPointer: {
339 auto &NullPointerVal =
340 Env.getOrCreateNullPointerValue(PointeeType: S->getType()->getPointeeType());
341 Env.setValue(*S, NullPointerVal);
342 break;
343 }
344 case CK_NullToMemberPointer:
345 // FIXME: Implement pointers to members. For now, don't associate a value
346 // with this expression.
347 break;
348 case CK_FunctionToPointerDecay: {
349 StorageLocation *PointeeLoc = Env.getStorageLocation(E: *SubExpr);
350 if (PointeeLoc == nullptr)
351 break;
352
353 Env.setValue(*S, Env.create<PointerValue>(args&: *PointeeLoc));
354 break;
355 }
356 case CK_BuiltinFnToFnPtr:
357 // Despite its name, the result type of `BuiltinFnToFnPtr` is a function,
358 // not a function pointer. In addition, builtin functions can only be
359 // called directly; it is not legal to take their address. We therefore
360 // don't need to create a value or storage location for them.
361 break;
362 default:
363 break;
364 }
365 }
366
367 void VisitUnaryOperator(const UnaryOperator *S) {
368 const Expr *SubExpr = S->getSubExpr();
369 assert(SubExpr != nullptr);
370
371 switch (S->getOpcode()) {
372 case UO_Deref: {
373 const auto *SubExprVal = Env.get<PointerValue>(E: *SubExpr);
374 if (SubExprVal == nullptr)
375 break;
376
377 Env.setStorageLocation(*S, SubExprVal->getPointeeLoc());
378 break;
379 }
380 case UO_AddrOf: {
381 // FIXME: Model pointers to members.
382 if (S->getType()->isMemberPointerType())
383 break;
384
385 if (StorageLocation *PointeeLoc = Env.getStorageLocation(E: *SubExpr))
386 Env.setValue(*S, Env.create<PointerValue>(args&: *PointeeLoc));
387 break;
388 }
389 case UO_LNot: {
390 auto *SubExprVal = dyn_cast_or_null<BoolValue>(Val: Env.getValue(E: *SubExpr));
391 if (SubExprVal == nullptr)
392 break;
393
394 Env.setValue(*S, Env.makeNot(Val&: *SubExprVal));
395 break;
396 }
397 case UO_PreInc:
398 case UO_PreDec:
399 // Propagate the storage location and clear out any value associated with
400 // it (to represent the fact that the value has definitely changed).
401 // To avoid generating unnecessary values, we leave it to the specific
402 // analysis to create a new value if desired.
403 propagateStorageLocation(*S->getSubExpr(), *S, Env);
404 if (StorageLocation *Loc = Env.getStorageLocation(E: *S->getSubExpr()))
405 Env.clearValue(Loc: *Loc);
406 break;
407 case UO_PostInc:
408 case UO_PostDec:
409 // Propagate the old value, then clear out any value associated with the
410 // storage location (to represent the fact that the value has definitely
411 // changed). See above for rationale.
412 propagateValue(*S->getSubExpr(), *S, Env);
413 if (StorageLocation *Loc = Env.getStorageLocation(E: *S->getSubExpr()))
414 Env.clearValue(Loc: *Loc);
415 break;
416 default:
417 break;
418 }
419 }
420
421 void VisitCXXThisExpr(const CXXThisExpr *S) {
422 auto *ThisPointeeLoc = Env.getThisPointeeStorageLocation();
423 if (ThisPointeeLoc == nullptr)
424 // Unions are not supported yet, and will not have a location for the
425 // `this` expression's pointee.
426 return;
427
428 Env.setValue(*S, Env.create<PointerValue>(args&: *ThisPointeeLoc));
429 }
430
431 void VisitCXXNewExpr(const CXXNewExpr *S) {
432 if (Value *Val = Env.createValue(Type: S->getType()))
433 Env.setValue(*S, *Val);
434 }
435
436 void VisitCXXDeleteExpr(const CXXDeleteExpr *S) {
437 // Empty method.
438 // We consciously don't do anything on deletes. Diagnosing double deletes
439 // (for example) should be done by a specific analysis, not by the
440 // framework.
441 }
442
443 void VisitReturnStmt(const ReturnStmt *S) {
444 if (!Env.getDataflowAnalysisContext().getOptions().ContextSensitiveOpts)
445 return;
446
447 auto *Ret = S->getRetValue();
448 if (Ret == nullptr)
449 return;
450
451 if (Ret->isPRValue()) {
452 if (Ret->getType()->isRecordType())
453 return;
454
455 auto *Val = Env.getValue(E: *Ret);
456 if (Val == nullptr)
457 return;
458
459 // FIXME: Model NRVO.
460 Env.setReturnValue(Val);
461 } else {
462 auto *Loc = Env.getStorageLocation(E: *Ret);
463 if (Loc == nullptr)
464 return;
465
466 // FIXME: Model NRVO.
467 Env.setReturnStorageLocation(Loc);
468 }
469 }
470
471 void VisitMemberExpr(const MemberExpr *S) {
472 ValueDecl *Member = S->getMemberDecl();
473 assert(Member != nullptr);
474
475 // FIXME: Consider assigning pointer values to function member expressions.
476 if (Member->isFunctionOrFunctionTemplate())
477 return;
478
479 // FIXME: if/when we add support for modeling enums, use that support here.
480 if (isa<EnumConstantDecl>(Val: Member))
481 return;
482
483 if (auto *D = dyn_cast<VarDecl>(Val: Member)) {
484 if (D->hasGlobalStorage()) {
485 auto *VarDeclLoc = Env.getStorageLocation(*D);
486 if (VarDeclLoc == nullptr)
487 return;
488
489 Env.setStorageLocation(*S, *VarDeclLoc);
490 return;
491 }
492 }
493
494 RecordStorageLocation *BaseLoc = getBaseObjectLocation(ME: *S, Env);
495 if (BaseLoc == nullptr)
496 return;
497
498 auto *MemberLoc = BaseLoc->getChild(D: *Member);
499 if (MemberLoc == nullptr)
500 return;
501 Env.setStorageLocation(*S, *MemberLoc);
502 }
503
504 void VisitCXXDefaultArgExpr(const CXXDefaultArgExpr *S) {
505 const Expr *ArgExpr = S->getExpr();
506 assert(ArgExpr != nullptr);
507 propagateValueOrStorageLocation(*ArgExpr, *S, Env);
508
509 if (S->isPRValue() && S->getType()->isRecordType()) {
510 auto &Loc = Env.getResultObjectLocation(*S);
511 Env.initializeFieldsWithValues(Loc);
512 }
513 }
514
515 void VisitCXXDefaultInitExpr(const CXXDefaultInitExpr *S) {
516 const Expr *InitExpr = S->getExpr();
517 assert(InitExpr != nullptr);
518
519 // If this is a prvalue of record type, the handler for `*InitExpr` (if one
520 // exists) will initialize the result object; there is no value to propgate
521 // here.
522 if (S->getType()->isRecordType() && S->isPRValue())
523 return;
524
525 propagateValueOrStorageLocation(*InitExpr, *S, Env);
526 }
527
528 void VisitCXXConstructExpr(const CXXConstructExpr *S) {
529 const CXXConstructorDecl *ConstructorDecl = S->getConstructor();
530 assert(ConstructorDecl != nullptr);
531
532 // `CXXConstructExpr` can have array type if default-initializing an array
533 // of records. We don't handle this specifically beyond potentially inlining
534 // the call.
535 if (!S->getType()->isRecordType()) {
536 transferInlineCall(S, ConstructorDecl);
537 return;
538 }
539
540 RecordStorageLocation &Loc = Env.getResultObjectLocation(*S);
541
542 if (ConstructorDecl->isCopyOrMoveConstructor()) {
543 // It is permissible for a copy/move constructor to have additional
544 // parameters as long as they have default arguments defined for them.
545 assert(S->getNumArgs() != 0);
546
547 const Expr *Arg = S->getArg(Arg: 0);
548 assert(Arg != nullptr);
549
550 auto *ArgLoc = Env.get<RecordStorageLocation>(E: *Arg);
551 if (ArgLoc == nullptr)
552 return;
553
554 // Even if the copy/move constructor call is elidable, we choose to copy
555 // the record in all cases (which isn't wrong, just potentially not
556 // optimal).
557 copyRecord(Src&: *ArgLoc, Dst&: Loc, Env);
558 return;
559 }
560
561 Env.initializeFieldsWithValues(Loc, S->getType());
562
563 transferInlineCall(S, ConstructorDecl);
564 }
565
566 void VisitCXXOperatorCallExpr(const CXXOperatorCallExpr *S) {
567 if (S->getOperator() == OO_Equal) {
568 assert(S->getNumArgs() == 2);
569
570 const Expr *Arg0 = S->getArg(0);
571 assert(Arg0 != nullptr);
572
573 const Expr *Arg1 = S->getArg(1);
574 assert(Arg1 != nullptr);
575
576 // Evaluate only copy and move assignment operators.
577 const auto *Method =
578 dyn_cast_or_null<CXXMethodDecl>(S->getDirectCallee());
579 if (!Method)
580 return;
581 if (!Method->isCopyAssignmentOperator() &&
582 !Method->isMoveAssignmentOperator())
583 return;
584
585 RecordStorageLocation *LocSrc = nullptr;
586 if (Arg1->isPRValue()) {
587 LocSrc = &Env.getResultObjectLocation(RecordPRValue: *Arg1);
588 } else {
589 LocSrc = Env.get<RecordStorageLocation>(E: *Arg1);
590 }
591 auto *LocDst = Env.get<RecordStorageLocation>(E: *Arg0);
592
593 if (LocSrc == nullptr || LocDst == nullptr)
594 return;
595
596 copyRecord(*LocSrc, *LocDst, Env);
597
598 // The assignment operator can have an arbitrary return type. We model the
599 // return value only if the return type is the same as or a base class of
600 // the destination type.
601 if (S->getType().getCanonicalType().getUnqualifiedType() !=
602 LocDst->getType().getCanonicalType().getUnqualifiedType()) {
603 auto ReturnDecl = S->getType()->getAsCXXRecordDecl();
604 auto DstDecl = LocDst->getType()->getAsCXXRecordDecl();
605 if (ReturnDecl == nullptr || DstDecl == nullptr)
606 return;
607 if (!DstDecl->isDerivedFrom(ReturnDecl))
608 return;
609 }
610
611 if (S->isGLValue())
612 Env.setStorageLocation(*S, *LocDst);
613 else
614 copyRecord(*LocDst, Env.getResultObjectLocation(*S), Env);
615
616 return;
617 }
618
619 // `CXXOperatorCallExpr` can be a prvalue. Call `VisitCallExpr`() to
620 // initialize the prvalue's fields with values.
621 VisitCallExpr(S);
622 }
623
624 void VisitCXXRewrittenBinaryOperator(const CXXRewrittenBinaryOperator *RBO) {
625 propagateValue(*RBO->getSemanticForm(), *RBO, Env);
626 }
627
628 void VisitCallExpr(const CallExpr *S) {
629 // Of clang's builtins, only `__builtin_expect` is handled explicitly, since
630 // others (like trap, debugtrap, and unreachable) are handled by CFG
631 // construction.
632 if (S->isCallToStdMove()) {
633 assert(S->getNumArgs() == 1);
634
635 const Expr *Arg = S->getArg(Arg: 0);
636 assert(Arg != nullptr);
637
638 auto *ArgLoc = Env.getStorageLocation(E: *Arg);
639 if (ArgLoc == nullptr)
640 return;
641
642 Env.setStorageLocation(*S, *ArgLoc);
643 } else if (S->getDirectCallee() != nullptr &&
644 S->getDirectCallee()->getBuiltinID() ==
645 Builtin::BI__builtin_expect) {
646 assert(S->getNumArgs() > 0);
647 assert(S->getArg(0) != nullptr);
648 auto *ArgVal = Env.getValue(E: *S->getArg(Arg: 0));
649 if (ArgVal == nullptr)
650 return;
651 Env.setValue(*S, *ArgVal);
652 } else if (const FunctionDecl *F = S->getDirectCallee()) {
653 transferInlineCall(S, F);
654
655 // If this call produces a prvalue of record type, initialize its fields
656 // with values.
657 if (S->getType()->isRecordType() && S->isPRValue()) {
658 RecordStorageLocation &Loc = Env.getResultObjectLocation(*S);
659 Env.initializeFieldsWithValues(Loc);
660 }
661 }
662 }
663
664 void VisitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *S) {
665 const Expr *SubExpr = S->getSubExpr();
666 assert(SubExpr != nullptr);
667
668 StorageLocation &Loc = Env.createStorageLocation(*S);
669 Env.setStorageLocation(*S, Loc);
670
671 if (SubExpr->getType()->isRecordType())
672 // Nothing else left to do -- we initialized the record when transferring
673 // `SubExpr`.
674 return;
675
676 if (Value *SubExprVal = Env.getValue(E: *SubExpr))
677 Env.setValue(Loc, Val&: *SubExprVal);
678 }
679
680 void VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *S) {
681 const Expr *SubExpr = S->getSubExpr();
682 assert(SubExpr != nullptr);
683
684 propagateValue(*SubExpr, *S, Env);
685 }
686
687 void VisitCXXStaticCastExpr(const CXXStaticCastExpr *S) {
688 if (S->getCastKind() == CK_NoOp) {
689 const Expr *SubExpr = S->getSubExpr();
690 assert(SubExpr != nullptr);
691
692 propagateValueOrStorageLocation(*SubExpr, *S, Env);
693 }
694 }
695
696 void VisitConditionalOperator(const ConditionalOperator *S) {
697 const Environment *TrueEnv = StmtToEnv.getEnvironment(*S->getTrueExpr());
698 const Environment *FalseEnv = StmtToEnv.getEnvironment(*S->getFalseExpr());
699
700 if (TrueEnv == nullptr || FalseEnv == nullptr) {
701 // If the true or false branch is dead, we may not have an environment for
702 // it. We could handle this specifically by forwarding the value or
703 // location of the live branch, but this case is rare enough that this
704 // probably isn't worth the additional complexity.
705 return;
706 }
707
708 if (S->isGLValue()) {
709 StorageLocation *TrueLoc = TrueEnv->getStorageLocation(E: *S->getTrueExpr());
710 StorageLocation *FalseLoc =
711 FalseEnv->getStorageLocation(E: *S->getFalseExpr());
712 if (TrueLoc == FalseLoc && TrueLoc != nullptr)
713 Env.setStorageLocation(*S, *TrueLoc);
714 } else if (!S->getType()->isRecordType()) {
715 // The conditional operator can evaluate to either of the values of the
716 // two branches. To model this, join these two values together to yield
717 // the result of the conditional operator.
718 // Note: Most joins happen in `computeBlockInputState()`, but this case is
719 // different:
720 // - `computeBlockInputState()` (which in turn calls `Environment::join()`
721 // joins values associated with the _same_ expression or storage
722 // location, then associates the joined value with that expression or
723 // storage location. This join has nothing to do with transfer --
724 // instead, it joins together the results of performing transfer on two
725 // different blocks.
726 // - Here, we join values associated with _different_ expressions (the
727 // true and false branch), then associate the joined value with a third
728 // expression (the conditional operator itself). This join is what it
729 // means to perform transfer on the conditional operator.
730 if (Value *Val = Environment::joinValues(
731 Ty: S->getType(), Val1: TrueEnv->getValue(E: *S->getTrueExpr()), Env1: *TrueEnv,
732 Val2: FalseEnv->getValue(E: *S->getFalseExpr()), Env2: *FalseEnv, JoinedEnv&: Env, Model))
733 Env.setValue(*S, *Val);
734 }
735 }
736
737 void VisitInitListExpr(const InitListExpr *S) {
738 QualType Type = S->getType();
739
740 if (!Type->isRecordType()) {
741 // Until array initialization is implemented, we skip arrays and don't
742 // need to care about cases where `getNumInits() > 1`.
743 if (!Type->isArrayType() && S->getNumInits() == 1)
744 propagateValueOrStorageLocation(*S->getInit(Init: 0), *S, Env);
745 return;
746 }
747
748 // If the initializer list is transparent, there's nothing to do.
749 if (S->isSemanticForm() && S->isTransparent())
750 return;
751
752 RecordStorageLocation &Loc = Env.getResultObjectLocation(*S);
753
754 // Initialization of base classes and fields of record type happens when we
755 // visit the nested `CXXConstructExpr` or `InitListExpr` for that base class
756 // or field. We therefore only need to deal with fields of non-record type
757 // here.
758
759 RecordInitListHelper InitListHelper(S);
760
761 for (auto [Field, Init] : InitListHelper.field_inits()) {
762 if (Field->getType()->isRecordType())
763 continue;
764 if (Field->getType()->isReferenceType()) {
765 assert(Field->getType().getCanonicalType()->getPointeeType() ==
766 Init->getType().getCanonicalType());
767 Loc.setChild(D: *Field, Loc: &Env.createObject(Field->getType(), Init));
768 continue;
769 }
770 assert(Field->getType().getCanonicalType().getUnqualifiedType() ==
771 Init->getType().getCanonicalType().getUnqualifiedType());
772 StorageLocation *FieldLoc = Loc.getChild(*Field);
773 // Locations for non-reference fields must always be non-null.
774 assert(FieldLoc != nullptr);
775 Value *Val = Env.getValue(E: *Init);
776 if (Val == nullptr && isa<ImplicitValueInitExpr>(Val: Init) &&
777 Init->getType()->isPointerType())
778 Val =
779 &Env.getOrCreateNullPointerValue(PointeeType: Init->getType()->getPointeeType());
780 if (Val == nullptr)
781 Val = Env.createValue(Type: Field->getType());
782 if (Val != nullptr)
783 Env.setValue(Loc: *FieldLoc, Val&: *Val);
784 }
785
786 for (const auto &[FieldName, FieldLoc] : Loc.synthetic_fields()) {
787 QualType FieldType = FieldLoc->getType();
788 if (FieldType->isRecordType()) {
789 Env.initializeFieldsWithValues(*cast<RecordStorageLocation>(FieldLoc));
790 } else {
791 if (Value *Val = Env.createValue(FieldType))
792 Env.setValue(*FieldLoc, *Val);
793 }
794 }
795
796 // FIXME: Implement array initialization.
797 }
798
799 void VisitCXXBoolLiteralExpr(const CXXBoolLiteralExpr *S) {
800 Env.setValue(*S, Env.getBoolLiteralValue(Value: S->getValue()));
801 }
802
803 void VisitIntegerLiteral(const IntegerLiteral *S) {
804 Env.setValue(*S, Env.getIntLiteralValue(Value: S->getValue()));
805 }
806
807 void VisitParenExpr(const ParenExpr *S) {
808 // The CFG does not contain `ParenExpr` as top-level statements in basic
809 // blocks, however manual traversal to sub-expressions may encounter them.
810 // Redirect to the sub-expression.
811 auto *SubExpr = S->getSubExpr();
812 assert(SubExpr != nullptr);
813 Visit(SubExpr);
814 }
815
816 void VisitExprWithCleanups(const ExprWithCleanups *S) {
817 // The CFG does not contain `ExprWithCleanups` as top-level statements in
818 // basic blocks, however manual traversal to sub-expressions may encounter
819 // them. Redirect to the sub-expression.
820 auto *SubExpr = S->getSubExpr();
821 assert(SubExpr != nullptr);
822 Visit(SubExpr);
823 }
824
825private:
826 /// Returns the value for the sub-expression `SubExpr` of a logic operator.
827 BoolValue &getLogicOperatorSubExprValue(const Expr &SubExpr) {
828 // `SubExpr` and its parent logic operator might be part of different basic
829 // blocks. We try to access the value that is assigned to `SubExpr` in the
830 // corresponding environment.
831 if (const Environment *SubExprEnv = StmtToEnv.getEnvironment(SubExpr))
832 if (auto *Val =
833 dyn_cast_or_null<BoolValue>(SubExprEnv->getValue(SubExpr)))
834 return *Val;
835
836 // The sub-expression may lie within a basic block that isn't reachable,
837 // even if we need it to evaluate the current (reachable) expression
838 // (see https://discourse.llvm.org/t/70775). In this case, visit `SubExpr`
839 // within the current environment and then try to get the value that gets
840 // assigned to it.
841 if (Env.getValue(E: SubExpr) == nullptr)
842 Visit(&SubExpr);
843 if (auto *Val = dyn_cast_or_null<BoolValue>(Val: Env.getValue(E: SubExpr)))
844 return *Val;
845
846 // If the value of `SubExpr` is still unknown, we create a fresh symbolic
847 // boolean value for it.
848 return Env.makeAtomicBoolValue();
849 }
850
851 // If context sensitivity is enabled, try to analyze the body of the callee
852 // `F` of `S`. The type `E` must be either `CallExpr` or `CXXConstructExpr`.
853 template <typename E>
854 void transferInlineCall(const E *S, const FunctionDecl *F) {
855 const auto &Options = Env.getDataflowAnalysisContext().getOptions();
856 if (!(Options.ContextSensitiveOpts &&
857 Env.canDescend(MaxDepth: Options.ContextSensitiveOpts->Depth, Callee: F)))
858 return;
859
860 const AdornedCFG *ACFG = Env.getDataflowAnalysisContext().getAdornedCFG(F);
861 if (!ACFG)
862 return;
863
864 // FIXME: We don't support context-sensitive analysis of recursion, so
865 // we should return early here if `F` is the same as the `FunctionDecl`
866 // holding `S` itself.
867
868 auto ExitBlock = ACFG->getCFG().getExit().getBlockID();
869
870 auto CalleeEnv = Env.pushCall(S);
871
872 // FIXME: Use the same analysis as the caller for the callee. Note,
873 // though, that doing so would require support for changing the analysis's
874 // ASTContext.
875 auto Analysis = NoopAnalysis(ACFG->getDecl().getASTContext(),
876 DataflowAnalysisOptions{.BuiltinOpts: Options});
877
878 auto BlockToOutputState =
879 dataflow::runDataflowAnalysis(*ACFG, Analysis, CalleeEnv);
880 assert(BlockToOutputState);
881 assert(ExitBlock < BlockToOutputState->size());
882
883 auto &ExitState = (*BlockToOutputState)[ExitBlock];
884 assert(ExitState);
885
886 Env.popCall(S, ExitState->Env);
887 }
888
889 const StmtToEnvMap &StmtToEnv;
890 Environment &Env;
891 Environment::ValueModel &Model;
892};
893
894} // namespace
895
896void transfer(const StmtToEnvMap &StmtToEnv, const Stmt &S, Environment &Env,
897 Environment::ValueModel &Model) {
898 TransferVisitor(StmtToEnv, Env, Model).Visit(&S);
899}
900
901} // namespace dataflow
902} // namespace clang
903

Provided by KDAB

Privacy Policy
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more

source code of clang/lib/Analysis/FlowSensitive/Transfer.cpp