1/*
2 * Copyright 2017 WebAssembly Community Group participants
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef wasm_ir_effects_h
18#define wasm_ir_effects_h
19
20#include "ir/intrinsics.h"
21#include "pass.h"
22#include "wasm-traversal.h"
23
24namespace wasm {
25
26// Analyze various possible effects.
27
28class EffectAnalyzer {
29public:
30 EffectAnalyzer(const PassOptions& passOptions, Module& module)
31 : ignoreImplicitTraps(passOptions.ignoreImplicitTraps),
32 trapsNeverHappen(passOptions.trapsNeverHappen),
33 funcEffectsMap(passOptions.funcEffectsMap), module(module),
34 features(module.features) {}
35
36 EffectAnalyzer(const PassOptions& passOptions,
37 Module& module,
38 Expression* ast)
39 : EffectAnalyzer(passOptions, module) {
40 walk(ast);
41 }
42
43 EffectAnalyzer(const PassOptions& passOptions, Module& module, Function* func)
44 : EffectAnalyzer(passOptions, module) {
45 walk(func);
46 }
47
48 bool ignoreImplicitTraps;
49 bool trapsNeverHappen;
50 std::shared_ptr<FuncEffectsMap> funcEffectsMap;
51 Module& module;
52 FeatureSet features;
53
54 // Walk an expression and all its children.
55 void walk(Expression* ast) {
56 InternalAnalyzer(*this).walk(ast);
57 post();
58 }
59
60 // Visit an expression, without any children.
61 void visit(Expression* ast) {
62 InternalAnalyzer(*this).visit(ast);
63 post();
64 }
65
66 // Walk an entire function body. This will ignore effects that are not
67 // noticeable from the perspective of the caller, that is, effects that are
68 // only noticeable during the call, but "vanish" when the call stack is
69 // unwound.
70 void walk(Function* func) {
71 walk(ast: func->body);
72
73 // We can ignore branching out of the function body - this can only be
74 // a return, and that is only noticeable in the function, not outside.
75 branchesOut = false;
76
77 // When the function exits, changes to locals cannot be noticed any more.
78 localsWritten.clear();
79 localsRead.clear();
80 }
81
82 // Core effect tracking
83
84 // Definitely branches out of this expression, or does a return, etc.
85 // breakTargets tracks individual targets, which we may eventually see are
86 // internal, while this is set when we see something that will definitely
87 // not be internal, or is otherwise special like an infinite loop (which
88 // does not technically branch "out", but it does break the normal assumption
89 // of control flow proceeding normally).
90 bool branchesOut = false;
91 bool calls = false;
92 std::set<Index> localsRead;
93 std::set<Index> localsWritten;
94 std::set<Name> mutableGlobalsRead;
95 std::set<Name> globalsWritten;
96 bool readsMemory = false;
97 bool writesMemory = false;
98 bool readsTable = false;
99 bool writesTable = false;
100 // TODO: More specific type-based alias analysis, and not just at the
101 // struct/array level.
102 bool readsMutableStruct = false;
103 bool writesStruct = false;
104 bool readsArray = false;
105 bool writesArray = false;
106 // A trap, either from an unreachable instruction, or from an implicit trap
107 // that we do not ignore (see below).
108 //
109 // Note that we ignore trap differences, so it is ok to reorder traps with
110 // each other, but it is not ok to remove them or reorder them with other
111 // effects in a noticeable way.
112 //
113 // Note also that we ignore *optional* runtime-specific traps: we only
114 // consider as trapping something that will trap in *all* VMs, and *all* the
115 // time. For example, a single allocation might trap in a VM in a particular
116 // execution, if it happens to run out of memory just there, but that is not
117 // enough for us to mark it as having a trap effect. (Note that not marking
118 // each allocation as possibly trapping has the nice benefit of making it
119 // possible to eliminate an allocation whose result is not captured.) OTOH, we
120 // *do* mark a potentially infinite number of allocations as trapping, as all
121 // VMs would trap eventually, and the same for potentially infinite recursion,
122 // etc.
123 bool trap = false;
124 // A trap from an instruction like a load or div/rem, which may trap on corner
125 // cases. If we do not ignore implicit traps then these are counted as a trap.
126 bool implicitTrap = false;
127 // An atomic load/store/RMW/Cmpxchg or an operator that has a defined ordering
128 // wrt atomics (e.g. memory.grow)
129 bool isAtomic = false;
130 bool throws_ = false;
131 // The nested depth of try-catch_all. If an instruction that may throw is
132 // inside an inner try-catch_all, we don't mark it as 'throws_', because it
133 // will be caught by an inner catch_all. We only count 'try's with a
134 // 'catch_all' because instructions within a 'try' without a 'catch_all' can
135 // still throw outside of the try.
136 size_t tryDepth = 0;
137 // The nested depth of catch. This is necessary to track danglng pops.
138 size_t catchDepth = 0;
139 // If this expression contains 'pop's that are not enclosed in 'catch' body.
140 // For example, (drop (pop i32)) should set this to true.
141 bool danglingPop = false;
142 // Whether this code may "hang" and not eventually complete. An infinite loop,
143 // or a continuation that is never continued, are examples of that.
144 bool mayNotReturn = false;
145
146 // Helper functions to check for various effect types
147
148 bool accessesLocal() const {
149 return localsRead.size() + localsWritten.size() > 0;
150 }
151 bool accessesMutableGlobal() const {
152 return globalsWritten.size() + mutableGlobalsRead.size() > 0;
153 }
154 bool accessesMemory() const { return calls || readsMemory || writesMemory; }
155 bool accessesTable() const { return calls || readsTable || writesTable; }
156 bool accessesMutableStruct() const {
157 return calls || readsMutableStruct || writesStruct;
158 }
159 bool accessesArray() const { return calls || readsArray || writesArray; }
160 bool throws() const { return throws_ || !delegateTargets.empty(); }
161 // Check whether this may transfer control flow to somewhere outside of this
162 // expression (aside from just flowing out normally). That includes a break
163 // or a throw (if the throw is not known to be caught inside this expression;
164 // note that if the throw is not caught in this expression then it might be
165 // caught in this function but outside of this expression, or it might not be
166 // caught in the function at all, which would mean control flow cannot be
167 // transferred inside the function, but this expression does not know that).
168 bool transfersControlFlow() const {
169 return branchesOut || throws() || hasExternalBreakTargets();
170 }
171
172 // Changes something in globally-stored state.
173 bool writesGlobalState() const {
174 return globalsWritten.size() || writesMemory || writesTable ||
175 writesStruct || writesArray || isAtomic || calls;
176 }
177 bool readsMutableGlobalState() const {
178 return mutableGlobalsRead.size() || readsMemory || readsTable ||
179 readsMutableStruct || readsArray || isAtomic || calls;
180 }
181
182 bool hasNonTrapSideEffects() const {
183 return localsWritten.size() > 0 || danglingPop || writesGlobalState() ||
184 throws() || transfersControlFlow() || mayNotReturn;
185 }
186
187 bool hasSideEffects() const { return trap || hasNonTrapSideEffects(); }
188
189 // Check if there are side effects, and they are of a kind that cannot be
190 // removed by optimization passes.
191 //
192 // The difference between this and hasSideEffects is subtle, and only related
193 // to trapsNeverHappen - if trapsNeverHappen then any trap we see is removable
194 // by optimizations. In general, you should call hasSideEffects, and only call
195 // this method if you are certain that it is a place that would not perform an
196 // unsafe transformation with a trap. Specifically, if a pass calls this
197 // and gets the result that there are no unremovable side effects, then it
198 // must either
199 //
200 // 1. Remove any side effects present, if any, so they no longer exist.
201 // 2. Keep the code exactly where it is.
202 //
203 // If instead of 1&2 a pass kept the side effect and also reordered the code
204 // with other things, then that could be bad, as the side effect might have
205 // been behind a condition that avoids it occurring.
206 //
207 // TODO: Go through the optimizer and use this in all places that do not move
208 // code around.
209 bool hasUnremovableSideEffects() const {
210 return hasNonTrapSideEffects() || (trap && !trapsNeverHappen);
211 }
212
213 bool hasAnything() const {
214 return hasSideEffects() || accessesLocal() || readsMutableGlobalState();
215 }
216
217 // check if we break to anything external from ourselves
218 bool hasExternalBreakTargets() const { return !breakTargets.empty(); }
219
220 // Checks if these effects would invalidate another set of effects (e.g., if
221 // we write, we invalidate someone that reads).
222 //
223 // This assumes the things whose effects we are comparing will both execute,
224 // at least if neither of them transfers control flow away. That is, we assume
225 // that there is no transfer of control flow *between* them: we are comparing
226 // things appear after each other, perhaps with some other code in the middle,
227 // but that code does not transfer control flow. It is not valid to call this
228 // method in other situations, like this:
229 //
230 // A
231 // (br_if 0 (local.get 0)) ;; this may transfer control flow away
232 // B
233 //
234 // Calling this method in that situation is invalid because only A may
235 // execute and not B. The following are examples of situations where it is
236 // valid to call this method:
237 //
238 // A
239 // ;; nothing in between them at all
240 // B
241 //
242 // A
243 // (local.set 0 (i32.const 0)) ;; something in between without a possible
244 // ;; control flow transfer
245 // B
246 //
247 // That the things being compared both execute only matters in the case of
248 // traps-never-happen: in that mode we can move traps but only if doing so
249 // would not make them start to appear when they did not. In the second
250 // example we can't reorder A and B if B traps, but in the first example we
251 // can reorder them even if B traps (even if A has a global effect like a
252 // global.set, since we assume B does not trap in traps-never-happen).
253 bool invalidates(const EffectAnalyzer& other) {
254 if ((transfersControlFlow() && other.hasSideEffects()) ||
255 (other.transfersControlFlow() && hasSideEffects()) ||
256 ((writesMemory || calls) && other.accessesMemory()) ||
257 ((other.writesMemory || other.calls) && accessesMemory()) ||
258 ((writesTable || calls) && other.accessesTable()) ||
259 ((other.writesTable || other.calls) && accessesTable()) ||
260 ((writesStruct || calls) && other.accessesMutableStruct()) ||
261 ((other.writesStruct || other.calls) && accessesMutableStruct()) ||
262 ((writesArray || calls) && other.accessesArray()) ||
263 ((other.writesArray || other.calls) && accessesArray()) ||
264 (danglingPop || other.danglingPop)) {
265 return true;
266 }
267 // All atomics are sequentially consistent for now, and ordered wrt other
268 // memory references.
269 if ((isAtomic && other.accessesMemory()) ||
270 (other.isAtomic && accessesMemory())) {
271 return true;
272 }
273 for (auto local : localsWritten) {
274 if (other.localsRead.count(local) || other.localsWritten.count(local)) {
275 return true;
276 }
277 }
278 for (auto local : localsRead) {
279 if (other.localsWritten.count(local)) {
280 return true;
281 }
282 }
283 if ((other.calls && accessesMutableGlobal()) ||
284 (calls && other.accessesMutableGlobal())) {
285 return true;
286 }
287 for (auto global : globalsWritten) {
288 if (other.mutableGlobalsRead.count(global) ||
289 other.globalsWritten.count(global)) {
290 return true;
291 }
292 }
293 for (auto global : mutableGlobalsRead) {
294 if (other.globalsWritten.count(global)) {
295 return true;
296 }
297 }
298 // Note that the above includes disallowing the reordering of a trap with an
299 // exception (as an exception can transfer control flow inside the current
300 // function, so transfersControlFlow would be true) - while we allow the
301 // reordering of traps with each other, we do not reorder exceptions with
302 // anything.
303 assert(!((trap && other.throws()) || (throws() && other.trap)));
304 // We can't reorder an implicit trap in a way that could alter what global
305 // state is modified. However, in trapsNeverHappen mode we assume traps do
306 // not occur in practice, which lets us ignore this, at least in the case
307 // that the code executes. As mentioned above, we assume that there is no
308 // transfer of control flow between the things we are comparing, so all we
309 // need to do is check for such transfers in them.
310 if (!trapsNeverHappen || transfersControlFlow() ||
311 other.transfersControlFlow()) {
312 if ((trap && other.writesGlobalState()) ||
313 (other.trap && writesGlobalState())) {
314 return true;
315 }
316 }
317 return false;
318 }
319
320 void mergeIn(const EffectAnalyzer& other) {
321 branchesOut = branchesOut || other.branchesOut;
322 calls = calls || other.calls;
323 readsMemory = readsMemory || other.readsMemory;
324 writesMemory = writesMemory || other.writesMemory;
325 readsTable = readsTable || other.readsTable;
326 writesTable = writesTable || other.writesTable;
327 readsMutableStruct = readsMutableStruct || other.readsMutableStruct;
328 writesStruct = writesStruct || other.writesStruct;
329 readsArray = readsArray || other.readsArray;
330 writesArray = writesArray || other.writesArray;
331 trap = trap || other.trap;
332 implicitTrap = implicitTrap || other.implicitTrap;
333 trapsNeverHappen = trapsNeverHappen || other.trapsNeverHappen;
334 isAtomic = isAtomic || other.isAtomic;
335 throws_ = throws_ || other.throws_;
336 danglingPop = danglingPop || other.danglingPop;
337 for (auto i : other.localsRead) {
338 localsRead.insert(i);
339 }
340 for (auto i : other.localsWritten) {
341 localsWritten.insert(i);
342 }
343 for (auto i : other.mutableGlobalsRead) {
344 mutableGlobalsRead.insert(i);
345 }
346 for (auto i : other.globalsWritten) {
347 globalsWritten.insert(i);
348 }
349 for (auto i : other.breakTargets) {
350 breakTargets.insert(i);
351 }
352 for (auto i : other.delegateTargets) {
353 delegateTargets.insert(i);
354 }
355 }
356
357 // the checks above happen after the node's children were processed, in the
358 // order of execution we must also check for control flow that happens before
359 // the children, i.e., loops
360 bool checkPre(Expression* curr) {
361 if (curr->is<Loop>()) {
362 branchesOut = true;
363 return true;
364 }
365 return false;
366 }
367
368 bool checkPost(Expression* curr) {
369 visit(ast: curr);
370 if (curr->is<Loop>()) {
371 branchesOut = true;
372 }
373 return hasAnything();
374 }
375
376 std::set<Name> breakTargets;
377 std::set<Name> delegateTargets;
378
379private:
380 struct InternalAnalyzer
381 : public PostWalker<InternalAnalyzer, OverriddenVisitor<InternalAnalyzer>> {
382
383 EffectAnalyzer& parent;
384
385 InternalAnalyzer(EffectAnalyzer& parent) : parent(parent) {}
386
387 static void scan(InternalAnalyzer* self, Expression** currp) {
388 Expression* curr = *currp;
389 // We need to decrement try depth before catch starts, so handle it
390 // separately
391 if (curr->is<Try>()) {
392 self->pushTask(doVisitTry, currp);
393 self->pushTask(doEndCatch, currp);
394 auto& catchBodies = curr->cast<Try>()->catchBodies;
395 for (int i = int(catchBodies.size()) - 1; i >= 0; i--) {
396 self->pushTask(scan, &catchBodies[i]);
397 }
398 self->pushTask(doStartCatch, currp);
399 self->pushTask(scan, &curr->cast<Try>()->body);
400 self->pushTask(doStartTry, currp);
401 return;
402 }
403 PostWalker<InternalAnalyzer, OverriddenVisitor<InternalAnalyzer>>::scan(
404 self, currp);
405 }
406
407 static void doStartTry(InternalAnalyzer* self, Expression** currp) {
408 Try* curr = (*currp)->cast<Try>();
409 // We only count 'try's with a 'catch_all' because instructions within a
410 // 'try' without a 'catch_all' can still throw outside of the try.
411 if (curr->hasCatchAll()) {
412 self->parent.tryDepth++;
413 }
414 }
415
416 static void doStartCatch(InternalAnalyzer* self, Expression** currp) {
417 Try* curr = (*currp)->cast<Try>();
418 // This is conservative. When an inner try-delegate targets the current
419 // expression, even if the try-delegate's body can't throw, we consider
420 // the current expression can throw for simplicity, unless the current
421 // expression is not inside a try-catch_all. It is hard to figure out
422 // whether the original try-delegate's body throws or not at this point.
423 if (curr->name.is()) {
424 if (self->parent.delegateTargets.count(curr->name) &&
425 self->parent.tryDepth == 0) {
426 self->parent.throws_ = true;
427 }
428 self->parent.delegateTargets.erase(curr->name);
429 }
430 // We only count 'try's with a 'catch_all' because instructions within a
431 // 'try' without a 'catch_all' can still throw outside of the try.
432 if (curr->hasCatchAll()) {
433 assert(self->parent.tryDepth > 0 && "try depth cannot be negative");
434 self->parent.tryDepth--;
435 }
436 self->parent.catchDepth++;
437 }
438
439 static void doEndCatch(InternalAnalyzer* self, Expression** currp) {
440 assert(self->parent.catchDepth > 0 && "catch depth cannot be negative");
441 self->parent.catchDepth--;
442 }
443
444 void visitBlock(Block* curr) {
445 if (curr->name.is()) {
446 parent.breakTargets.erase(curr->name); // these were internal breaks
447 }
448 }
449 void visitIf(If* curr) {}
450 void visitLoop(Loop* curr) {
451 if (curr->name.is() && parent.breakTargets.erase(curr->name) > 0) {
452 parent.mayNotReturn = true;
453 }
454 }
455 void visitBreak(Break* curr) { parent.breakTargets.insert(curr->name); }
456 void visitSwitch(Switch* curr) {
457 for (auto name : curr->targets) {
458 parent.breakTargets.insert(name);
459 }
460 parent.breakTargets.insert(curr->default_);
461 }
462
463 void visitCall(Call* curr) {
464 // call.without.effects has no effects.
465 if (Intrinsics(parent.module).isCallWithoutEffects(curr)) {
466 return;
467 }
468
469 if (curr->isReturn) {
470 parent.branchesOut = true;
471 }
472
473 if (parent.funcEffectsMap) {
474 auto iter = parent.funcEffectsMap->find(curr->target);
475 if (iter != parent.funcEffectsMap->end()) {
476 // We have effect information for this call target, and can just use
477 // that. The one change we may want to make is to remove throws_, if
478 // the target function throws and we know that will be caught anyhow,
479 // the same as the code below for the general path.
480 const auto& targetEffects = iter->second;
481 if (targetEffects.throws_ && parent.tryDepth > 0) {
482 auto filteredEffects = targetEffects;
483 filteredEffects.throws_ = false;
484 parent.mergeIn(other: filteredEffects);
485 } else {
486 // Just merge in all the effects.
487 parent.mergeIn(other: targetEffects);
488 }
489 return;
490 }
491 }
492
493 parent.calls = true;
494 // When EH is enabled, any call can throw.
495 if (parent.features.hasExceptionHandling() && parent.tryDepth == 0) {
496 parent.throws_ = true;
497 }
498 }
499 void visitCallIndirect(CallIndirect* curr) {
500 parent.calls = true;
501 if (parent.features.hasExceptionHandling() && parent.tryDepth == 0) {
502 parent.throws_ = true;
503 }
504 if (curr->isReturn) {
505 parent.branchesOut = true;
506 }
507 }
508 void visitLocalGet(LocalGet* curr) {
509 parent.localsRead.insert(curr->index);
510 }
511 void visitLocalSet(LocalSet* curr) {
512 parent.localsWritten.insert(curr->index);
513 }
514 void visitGlobalGet(GlobalGet* curr) {
515 if (parent.module.getGlobal(name: curr->name)->mutable_ == Mutable) {
516 parent.mutableGlobalsRead.insert(curr->name);
517 }
518 }
519 void visitGlobalSet(GlobalSet* curr) {
520 parent.globalsWritten.insert(curr->name);
521 }
522 void visitLoad(Load* curr) {
523 parent.readsMemory = true;
524 parent.isAtomic |= curr->isAtomic;
525 parent.implicitTrap = true;
526 }
527 void visitStore(Store* curr) {
528 parent.writesMemory = true;
529 parent.isAtomic |= curr->isAtomic;
530 parent.implicitTrap = true;
531 }
532 void visitAtomicRMW(AtomicRMW* curr) {
533 parent.readsMemory = true;
534 parent.writesMemory = true;
535 parent.isAtomic = true;
536 parent.implicitTrap = true;
537 }
538 void visitAtomicCmpxchg(AtomicCmpxchg* curr) {
539 parent.readsMemory = true;
540 parent.writesMemory = true;
541 parent.isAtomic = true;
542 parent.implicitTrap = true;
543 }
544 void visitAtomicWait(AtomicWait* curr) {
545 parent.readsMemory = true;
546 // AtomicWait doesn't strictly write memory, but it does modify the
547 // waiters list associated with the specified address, which we can think
548 // of as a write.
549 parent.writesMemory = true;
550 parent.isAtomic = true;
551 parent.implicitTrap = true;
552 }
553 void visitAtomicNotify(AtomicNotify* curr) {
554 // AtomicNotify doesn't strictly write memory, but it does modify the
555 // waiters list associated with the specified address, which we can think
556 // of as a write.
557 parent.readsMemory = true;
558 parent.writesMemory = true;
559 parent.isAtomic = true;
560 parent.implicitTrap = true;
561 }
562 void visitAtomicFence(AtomicFence* curr) {
563 // AtomicFence should not be reordered with any memory operations, so we
564 // set these to true.
565 parent.readsMemory = true;
566 parent.writesMemory = true;
567 parent.isAtomic = true;
568 }
569 void visitSIMDExtract(SIMDExtract* curr) {}
570 void visitSIMDReplace(SIMDReplace* curr) {}
571 void visitSIMDShuffle(SIMDShuffle* curr) {}
572 void visitSIMDTernary(SIMDTernary* curr) {}
573 void visitSIMDShift(SIMDShift* curr) {}
574 void visitSIMDLoad(SIMDLoad* curr) {
575 parent.readsMemory = true;
576 parent.implicitTrap = true;
577 }
578 void visitSIMDLoadStoreLane(SIMDLoadStoreLane* curr) {
579 if (curr->isLoad()) {
580 parent.readsMemory = true;
581 } else {
582 parent.writesMemory = true;
583 }
584 parent.implicitTrap = true;
585 }
586 void visitMemoryInit(MemoryInit* curr) {
587 parent.writesMemory = true;
588 parent.implicitTrap = true;
589 }
590 void visitDataDrop(DataDrop* curr) {
591 // data.drop does not actually write memory, but it does alter the size of
592 // a segment, which can be noticeable later by memory.init, so we need to
593 // mark it as having a global side effect of some kind.
594 parent.writesMemory = true;
595 parent.implicitTrap = true;
596 }
597 void visitMemoryCopy(MemoryCopy* curr) {
598 parent.readsMemory = true;
599 parent.writesMemory = true;
600 parent.implicitTrap = true;
601 }
602 void visitMemoryFill(MemoryFill* curr) {
603 parent.writesMemory = true;
604 parent.implicitTrap = true;
605 }
606 void visitConst(Const* curr) {}
607 void visitUnary(Unary* curr) {
608 switch (curr->op) {
609 case TruncSFloat32ToInt32:
610 case TruncSFloat32ToInt64:
611 case TruncUFloat32ToInt32:
612 case TruncUFloat32ToInt64:
613 case TruncSFloat64ToInt32:
614 case TruncSFloat64ToInt64:
615 case TruncUFloat64ToInt32:
616 case TruncUFloat64ToInt64: {
617 parent.implicitTrap = true;
618 break;
619 }
620 default: {
621 }
622 }
623 }
624 void visitBinary(Binary* curr) {
625 switch (curr->op) {
626 case DivSInt32:
627 case DivUInt32:
628 case RemSInt32:
629 case RemUInt32:
630 case DivSInt64:
631 case DivUInt64:
632 case RemSInt64:
633 case RemUInt64: {
634 // div and rem may contain implicit trap only if RHS is
635 // non-constant or constant which equal zero or -1 for
636 // signed divisions. Reminder traps only with zero
637 // divider.
638 if (auto* c = curr->right->dynCast<Const>()) {
639 if (c->value.isZero()) {
640 parent.implicitTrap = true;
641 } else if ((curr->op == DivSInt32 || curr->op == DivSInt64) &&
642 c->value.getInteger() == -1LL) {
643 parent.implicitTrap = true;
644 }
645 } else {
646 parent.implicitTrap = true;
647 }
648 break;
649 }
650 default: {
651 }
652 }
653 }
654 void visitSelect(Select* curr) {}
655 void visitDrop(Drop* curr) {}
656 void visitReturn(Return* curr) { parent.branchesOut = true; }
657 void visitMemorySize(MemorySize* curr) {
658 // memory.size accesses the size of the memory, and thus can be modeled as
659 // reading memory
660 parent.readsMemory = true;
661 // Atomics are sequentially consistent with memory.size.
662 parent.isAtomic = true;
663 }
664 void visitMemoryGrow(MemoryGrow* curr) {
665 // TODO: find out if calls is necessary here
666 parent.calls = true;
667 // memory.grow technically does a read-modify-write operation on the
668 // memory size in the successful case, modifying the set of valid
669 // addresses, and just a read operation in the failure case
670 parent.readsMemory = true;
671 parent.writesMemory = true;
672 // Atomics are also sequentially consistent with memory.grow.
673 parent.isAtomic = true;
674 }
675 void visitRefNull(RefNull* curr) {}
676 void visitRefIsNull(RefIsNull* curr) {}
677 void visitRefFunc(RefFunc* curr) {}
678 void visitRefEq(RefEq* curr) {}
679 void visitTableGet(TableGet* curr) {
680 parent.readsTable = true;
681 parent.implicitTrap = true;
682 }
683 void visitTableSet(TableSet* curr) {
684 parent.writesTable = true;
685 parent.implicitTrap = true;
686 }
687 void visitTableSize(TableSize* curr) { parent.readsTable = true; }
688 void visitTableGrow(TableGrow* curr) {
689 // table.grow technically does a read-modify-write operation on the
690 // table size in the successful case, modifying the set of valid
691 // indices, and just a read operation in the failure case
692 parent.readsTable = true;
693 parent.writesTable = true;
694 }
695 void visitTry(Try* curr) {
696 if (curr->delegateTarget.is()) {
697 parent.delegateTargets.insert(curr->delegateTarget);
698 }
699 }
700 void visitThrow(Throw* curr) {
701 if (parent.tryDepth == 0) {
702 parent.throws_ = true;
703 }
704 }
705 void visitRethrow(Rethrow* curr) {
706 if (parent.tryDepth == 0) {
707 parent.throws_ = true;
708 }
709 // traps when the arg is null
710 parent.implicitTrap = true;
711 }
712 void visitNop(Nop* curr) {}
713 void visitUnreachable(Unreachable* curr) { parent.trap = true; }
714 void visitPop(Pop* curr) {
715 if (parent.catchDepth == 0) {
716 parent.danglingPop = true;
717 }
718 }
719 void visitTupleMake(TupleMake* curr) {}
720 void visitTupleExtract(TupleExtract* curr) {}
721 void visitI31New(I31New* curr) {}
722 void visitI31Get(I31Get* curr) {
723 // traps when the ref is null
724 if (curr->i31->type.isNullable()) {
725 parent.implicitTrap = true;
726 }
727 }
728 void visitCallRef(CallRef* curr) {
729 if (curr->target->type.isNull()) {
730 parent.trap = true;
731 return;
732 }
733 parent.calls = true;
734 if (parent.features.hasExceptionHandling() && parent.tryDepth == 0) {
735 parent.throws_ = true;
736 }
737 if (curr->isReturn) {
738 parent.branchesOut = true;
739 }
740 // traps when the call target is null
741 if (curr->target->type.isNullable()) {
742 parent.implicitTrap = true;
743 }
744 }
745 void visitRefTest(RefTest* curr) {}
746 void visitRefCast(RefCast* curr) {
747 // Traps if the ref is not null and the cast fails.
748 parent.implicitTrap = true;
749 }
750 void visitBrOn(BrOn* curr) { parent.breakTargets.insert(curr->name); }
751 void visitStructNew(StructNew* curr) {}
752 void visitStructGet(StructGet* curr) {
753 if (curr->ref->type == Type::unreachable) {
754 return;
755 }
756 if (curr->ref->type.isNull()) {
757 parent.trap = true;
758 return;
759 }
760 if (curr->ref->type.getHeapType()
761 .getStruct()
762 .fields[curr->index]
763 .mutable_ == Mutable) {
764 parent.readsMutableStruct = true;
765 }
766 // traps when the arg is null
767 if (curr->ref->type.isNullable()) {
768 parent.implicitTrap = true;
769 }
770 }
771 void visitStructSet(StructSet* curr) {
772 if (curr->ref->type.isNull()) {
773 parent.trap = true;
774 return;
775 }
776 parent.writesStruct = true;
777 // traps when the arg is null
778 if (curr->ref->type.isNullable()) {
779 parent.implicitTrap = true;
780 }
781 }
782 void visitArrayNew(ArrayNew* curr) {}
783 void visitArrayNewData(ArrayNewData* curr) {
784 // Traps on out of bounds access to segments or access to dropped
785 // segments.
786 parent.implicitTrap = true;
787 }
788 void visitArrayNewElem(ArrayNewElem* curr) {
789 // Traps on out of bounds access to segments or access to dropped
790 // segments.
791 parent.implicitTrap = true;
792 }
793 void visitArrayNewFixed(ArrayNewFixed* curr) {}
794 void visitArrayGet(ArrayGet* curr) {
795 if (curr->ref->type.isNull()) {
796 parent.trap = true;
797 return;
798 }
799 parent.readsArray = true;
800 // traps when the arg is null or the index out of bounds
801 parent.implicitTrap = true;
802 }
803 void visitArraySet(ArraySet* curr) {
804 if (curr->ref->type.isNull()) {
805 parent.trap = true;
806 return;
807 }
808 parent.writesArray = true;
809 // traps when the arg is null or the index out of bounds
810 parent.implicitTrap = true;
811 }
812 void visitArrayLen(ArrayLen* curr) {
813 if (curr->ref->type.isNull()) {
814 parent.trap = true;
815 return;
816 }
817 // traps when the arg is null
818 if (curr->ref->type.isNullable()) {
819 parent.implicitTrap = true;
820 }
821 }
822 void visitArrayCopy(ArrayCopy* curr) {
823 if (curr->destRef->type.isNull() || curr->srcRef->type.isNull()) {
824 parent.trap = true;
825 return;
826 }
827 parent.readsArray = true;
828 parent.writesArray = true;
829 // traps when a ref is null, or when out of bounds.
830 parent.implicitTrap = true;
831 }
832 void visitArrayFill(ArrayFill* curr) {
833 if (curr->ref->type.isNull()) {
834 parent.trap = true;
835 return;
836 }
837 parent.writesArray = true;
838 // Traps when the destination is null or when out of bounds.
839 parent.implicitTrap = true;
840 }
841 template<typename ArrayInit> void visitArrayInit(ArrayInit* curr) {
842 if (curr->ref->type.isNull()) {
843 parent.trap = true;
844 return;
845 }
846 parent.writesArray = true;
847 // Traps when the destination is null, when out of bounds in source or
848 // destination, or when the source segment has been dropped.
849 parent.implicitTrap = true;
850 }
851 void visitArrayInitData(ArrayInitData* curr) { visitArrayInit(curr); }
852 void visitArrayInitElem(ArrayInitElem* curr) { visitArrayInit(curr); }
853 void visitRefAs(RefAs* curr) {
854 if (curr->op == ExternInternalize || curr->op == ExternExternalize) {
855 // These conversions are infallible.
856 return;
857 }
858 // traps when the arg is not valid
859 parent.implicitTrap = true;
860 // Note: We could be more precise here and report the lack of a possible
861 // trap if the input is non-nullable (and also of the right kind for
862 // RefAsFunc etc.). However, we have optimization passes that will
863 // remove a RefAs in such a case (in OptimizeInstructions, and also
864 // Vacuum in trapsNeverHappen mode), so duplicating that code here would
865 // only help until the next time those optimizations run. As a tradeoff,
866 // we keep the code here simpler, but it does mean another optimization
867 // cycle may be needed in some cases.
868 }
869 void visitStringNew(StringNew* curr) {
870 // traps when out of bounds in linear memory or ref is null
871 parent.implicitTrap = true;
872 switch (curr->op) {
873 case StringNewUTF8:
874 case StringNewWTF8:
875 case StringNewLossyUTF8:
876 case StringNewWTF16:
877 parent.readsMemory = true;
878 break;
879 case StringNewUTF8Array:
880 case StringNewWTF8Array:
881 case StringNewLossyUTF8Array:
882 case StringNewWTF16Array:
883 parent.readsArray = true;
884 break;
885 default: {
886 }
887 }
888 }
889 void visitStringConst(StringConst* curr) {}
890 void visitStringMeasure(StringMeasure* curr) {
891 // traps when ref is null.
892 parent.implicitTrap = true;
893 }
894 void visitStringEncode(StringEncode* curr) {
895 // traps when ref is null or we write out of bounds.
896 parent.implicitTrap = true;
897 switch (curr->op) {
898 case StringEncodeUTF8:
899 case StringEncodeLossyUTF8:
900 case StringEncodeWTF8:
901 case StringEncodeWTF16:
902 parent.writesMemory = true;
903 break;
904 case StringEncodeUTF8Array:
905 case StringEncodeLossyUTF8Array:
906 case StringEncodeWTF8Array:
907 case StringEncodeWTF16Array:
908 parent.writesArray = true;
909 break;
910 default: {
911 }
912 }
913 }
914 void visitStringConcat(StringConcat* curr) {
915 // traps when an input is null.
916 parent.implicitTrap = true;
917 }
918 void visitStringEq(StringEq* curr) {}
919 void visitStringAs(StringAs* curr) {
920 // traps when ref is null.
921 parent.implicitTrap = true;
922 }
923 void visitStringWTF8Advance(StringWTF8Advance* curr) {
924 // traps when ref is null.
925 parent.implicitTrap = true;
926 }
927 void visitStringWTF16Get(StringWTF16Get* curr) {
928 // traps when ref is null.
929 parent.implicitTrap = true;
930 }
931 void visitStringIterNext(StringIterNext* curr) {
932 // traps when ref is null.
933 parent.implicitTrap = true;
934 // modifies state in the iterator. we model that as accessing heap memory
935 // in an array atm TODO consider adding a new effect type for this (we
936 // added one for arrays because struct/array operations often interleave,
937 // say with vtable accesses, but it's not clear adding overhead to this
938 // class is worth it for string iters)
939 parent.readsArray = true;
940 parent.writesArray = true;
941 }
942 void visitStringIterMove(StringIterMove* curr) {
943 // traps when ref is null.
944 parent.implicitTrap = true;
945 // see StringIterNext.
946 parent.readsArray = true;
947 parent.writesArray = true;
948 }
949 void visitStringSliceWTF(StringSliceWTF* curr) {
950 // traps when ref is null.
951 parent.implicitTrap = true;
952 }
953 void visitStringSliceIter(StringSliceIter* curr) {
954 // traps when ref is null.
955 parent.implicitTrap = true;
956 }
957 };
958
959public:
960 // Helpers
961
962 // See comment on invalidate() for the assumptions on the inputs here.
963 static bool canReorder(const PassOptions& passOptions,
964 Module& module,
965 Expression* a,
966 Expression* b) {
967 EffectAnalyzer aEffects(passOptions, module, a);
968 EffectAnalyzer bEffects(passOptions, module, b);
969 return !aEffects.invalidates(other: bEffects);
970 }
971
972 // C-API
973
974 enum SideEffects : uint32_t {
975 None = 0,
976 Branches = 1 << 0,
977 Calls = 1 << 1,
978 ReadsLocal = 1 << 2,
979 WritesLocal = 1 << 3,
980 ReadsGlobal = 1 << 4,
981 WritesGlobal = 1 << 5,
982 ReadsMemory = 1 << 6,
983 WritesMemory = 1 << 7,
984 ReadsTable = 1 << 8,
985 WritesTable = 1 << 9,
986 ImplicitTrap = 1 << 10,
987 IsAtomic = 1 << 11,
988 Throws = 1 << 12,
989 DanglingPop = 1 << 13,
990 TrapsNeverHappen = 1 << 14,
991 Any = (1 << 15) - 1
992 };
993 uint32_t getSideEffects() const {
994 uint32_t effects = 0;
995 if (branchesOut || hasExternalBreakTargets()) {
996 effects |= SideEffects::Branches;
997 }
998 if (calls) {
999 effects |= SideEffects::Calls;
1000 }
1001 if (localsRead.size() > 0) {
1002 effects |= SideEffects::ReadsLocal;
1003 }
1004 if (localsWritten.size() > 0) {
1005 effects |= SideEffects::WritesLocal;
1006 }
1007 if (mutableGlobalsRead.size()) {
1008 effects |= SideEffects::ReadsGlobal;
1009 }
1010 if (globalsWritten.size() > 0) {
1011 effects |= SideEffects::WritesGlobal;
1012 }
1013 if (readsMemory) {
1014 effects |= SideEffects::ReadsMemory;
1015 }
1016 if (writesMemory) {
1017 effects |= SideEffects::WritesMemory;
1018 }
1019 if (readsTable) {
1020 effects |= SideEffects::ReadsTable;
1021 }
1022 if (writesTable) {
1023 effects |= SideEffects::WritesTable;
1024 }
1025 if (implicitTrap) {
1026 effects |= SideEffects::ImplicitTrap;
1027 }
1028 if (trapsNeverHappen) {
1029 effects |= SideEffects::TrapsNeverHappen;
1030 }
1031 if (isAtomic) {
1032 effects |= SideEffects::IsAtomic;
1033 }
1034 if (throws_) {
1035 effects |= SideEffects::Throws;
1036 }
1037 if (danglingPop) {
1038 effects |= SideEffects::DanglingPop;
1039 }
1040 return effects;
1041 }
1042
1043 // Ignores all forms of control flow transfers: breaks, returns, and
1044 // exceptions. (Note that traps are not considered relevant here - a trap does
1045 // not just transfer control flow, but can be seen as halting the entire
1046 // program.)
1047 //
1048 // This function matches transfersControlFlow(), that is, after calling this
1049 // method transfersControlFlow() will always return false.
1050 void ignoreControlFlowTransfers() {
1051 branchesOut = false;
1052 breakTargets.clear();
1053 throws_ = false;
1054 delegateTargets.clear();
1055 assert(!transfersControlFlow());
1056 }
1057
1058private:
1059 void post() {
1060 assert(tryDepth == 0);
1061
1062 if (ignoreImplicitTraps) {
1063 implicitTrap = false;
1064 } else if (implicitTrap) {
1065 trap = true;
1066 }
1067 }
1068};
1069
1070// Calculate effects only on the node itself (shallowly), and not on
1071// children.
1072class ShallowEffectAnalyzer : public EffectAnalyzer {
1073public:
1074 ShallowEffectAnalyzer(const PassOptions& passOptions,
1075 Module& module,
1076 Expression* ast = nullptr)
1077 : EffectAnalyzer(passOptions, module) {
1078 if (ast) {
1079 visit(ast);
1080 }
1081 }
1082};
1083
1084} // namespace wasm
1085
1086#endif // wasm_ir_effects_h
1087

source code of dart_sdk/third_party/binaryen/src/src/ir/effects.h