1 | //===-- ThreadPlanStepOut.cpp ---------------------------------------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | |
9 | #include "lldb/Target/ThreadPlanStepOut.h" |
10 | #include "lldb/Breakpoint/Breakpoint.h" |
11 | #include "lldb/Core/Value.h" |
12 | #include "lldb/Core/ValueObjectConstResult.h" |
13 | #include "lldb/Symbol/Block.h" |
14 | #include "lldb/Symbol/Function.h" |
15 | #include "lldb/Symbol/Symbol.h" |
16 | #include "lldb/Symbol/Type.h" |
17 | #include "lldb/Target/ABI.h" |
18 | #include "lldb/Target/Process.h" |
19 | #include "lldb/Target/RegisterContext.h" |
20 | #include "lldb/Target/StopInfo.h" |
21 | #include "lldb/Target/Target.h" |
22 | #include "lldb/Target/ThreadPlanStepOverRange.h" |
23 | #include "lldb/Target/ThreadPlanStepThrough.h" |
24 | #include "lldb/Utility/LLDBLog.h" |
25 | #include "lldb/Utility/Log.h" |
26 | |
27 | #include <memory> |
28 | |
29 | using namespace lldb; |
30 | using namespace lldb_private; |
31 | |
32 | uint32_t ThreadPlanStepOut::s_default_flag_values = 0; |
33 | |
34 | // ThreadPlanStepOut: Step out of the current frame |
35 | ThreadPlanStepOut::ThreadPlanStepOut( |
36 | Thread &thread, SymbolContext *context, bool first_insn, bool stop_others, |
37 | Vote report_stop_vote, Vote report_run_vote, uint32_t frame_idx, |
38 | LazyBool step_out_avoids_code_without_debug_info, |
39 | bool continue_to_next_branch, bool gather_return_value) |
40 | : ThreadPlan(ThreadPlan::eKindStepOut, "Step out" , thread, report_stop_vote, |
41 | report_run_vote), |
42 | ThreadPlanShouldStopHere(this), m_step_from_insn(LLDB_INVALID_ADDRESS), |
43 | m_return_bp_id(LLDB_INVALID_BREAK_ID), |
44 | m_return_addr(LLDB_INVALID_ADDRESS), m_stop_others(stop_others), |
45 | m_immediate_step_from_function(nullptr), |
46 | m_calculate_return_value(gather_return_value) { |
47 | Log *log = GetLog(mask: LLDBLog::Step); |
48 | SetFlagsToDefault(); |
49 | SetupAvoidNoDebug(step_out_avoids_code_without_debug_info); |
50 | |
51 | m_step_from_insn = thread.GetRegisterContext()->GetPC(fail_value: 0); |
52 | |
53 | uint32_t return_frame_index = frame_idx + 1; |
54 | StackFrameSP return_frame_sp(thread.GetStackFrameAtIndex(idx: return_frame_index)); |
55 | StackFrameSP immediate_return_from_sp(thread.GetStackFrameAtIndex(idx: frame_idx)); |
56 | |
57 | if (!return_frame_sp || !immediate_return_from_sp) |
58 | return; // we can't do anything here. ValidatePlan() will return false. |
59 | |
60 | // While stepping out, behave as-if artificial frames are not present. |
61 | while (return_frame_sp->IsArtificial()) { |
62 | m_stepped_past_frames.push_back(x: return_frame_sp); |
63 | |
64 | ++return_frame_index; |
65 | return_frame_sp = thread.GetStackFrameAtIndex(idx: return_frame_index); |
66 | |
67 | // We never expect to see an artificial frame without a regular ancestor. |
68 | // If this happens, log the issue and defensively refuse to step out. |
69 | if (!return_frame_sp) { |
70 | LLDB_LOG(log, "Can't step out of frame with artificial ancestors" ); |
71 | return; |
72 | } |
73 | } |
74 | |
75 | m_step_out_to_id = return_frame_sp->GetStackID(); |
76 | m_immediate_step_from_id = immediate_return_from_sp->GetStackID(); |
77 | |
78 | // If the frame directly below the one we are returning to is inlined, we |
79 | // have to be a little more careful. It is non-trivial to determine the real |
80 | // "return code address" for an inlined frame, so we have to work our way to |
81 | // that frame and then step out. |
82 | if (immediate_return_from_sp->IsInlined()) { |
83 | if (frame_idx > 0) { |
84 | // First queue a plan that gets us to this inlined frame, and when we get |
85 | // there we'll queue a second plan that walks us out of this frame. |
86 | m_step_out_to_inline_plan_sp = std::make_shared<ThreadPlanStepOut>( |
87 | args&: thread, args: nullptr, args: false, args&: stop_others, args: eVoteNoOpinion, args: eVoteNoOpinion, |
88 | args: frame_idx - 1, args: eLazyBoolNo, args&: continue_to_next_branch); |
89 | static_cast<ThreadPlanStepOut *>(m_step_out_to_inline_plan_sp.get()) |
90 | ->SetShouldStopHereCallbacks(callbacks: nullptr, baton: nullptr); |
91 | m_step_out_to_inline_plan_sp->SetPrivate(true); |
92 | } else { |
93 | // If we're already at the inlined frame we're stepping through, then |
94 | // just do that now. |
95 | QueueInlinedStepPlan(queue_now: false); |
96 | } |
97 | } else { |
98 | // Find the return address and set a breakpoint there: |
99 | // FIXME - can we do this more securely if we know first_insn? |
100 | |
101 | Address return_address(return_frame_sp->GetFrameCodeAddress()); |
102 | if (continue_to_next_branch) { |
103 | SymbolContext return_address_sc; |
104 | AddressRange range; |
105 | Address return_address_decr_pc = return_address; |
106 | if (return_address_decr_pc.GetOffset() > 0) |
107 | return_address_decr_pc.Slide(offset: -1); |
108 | |
109 | return_address_decr_pc.CalculateSymbolContext( |
110 | sc: &return_address_sc, resolve_scope: lldb::eSymbolContextLineEntry); |
111 | if (return_address_sc.line_entry.IsValid()) { |
112 | const bool include_inlined_functions = false; |
113 | range = return_address_sc.line_entry.GetSameLineContiguousAddressRange( |
114 | include_inlined_functions); |
115 | if (range.GetByteSize() > 0) { |
116 | return_address = m_process.AdvanceAddressToNextBranchInstruction( |
117 | default_stop_addr: return_address, range_bounds: range); |
118 | } |
119 | } |
120 | } |
121 | m_return_addr = return_address.GetLoadAddress(target: &m_process.GetTarget()); |
122 | |
123 | if (m_return_addr == LLDB_INVALID_ADDRESS) |
124 | return; |
125 | |
126 | // Perform some additional validation on the return address. |
127 | uint32_t permissions = 0; |
128 | if (!m_process.GetLoadAddressPermissions(load_addr: m_return_addr, permissions)) { |
129 | LLDB_LOGF(log, "ThreadPlanStepOut(%p): Return address (0x%" PRIx64 |
130 | ") permissions not found." , static_cast<void *>(this), |
131 | m_return_addr); |
132 | } else if (!(permissions & ePermissionsExecutable)) { |
133 | m_constructor_errors.Printf(format: "Return address (0x%" PRIx64 |
134 | ") did not point to executable memory." , |
135 | m_return_addr); |
136 | LLDB_LOGF(log, "ThreadPlanStepOut(%p): %s" , static_cast<void *>(this), |
137 | m_constructor_errors.GetData()); |
138 | return; |
139 | } |
140 | |
141 | Breakpoint *return_bp = |
142 | GetTarget().CreateBreakpoint(load_addr: m_return_addr, internal: true, request_hardware: false).get(); |
143 | |
144 | if (return_bp != nullptr) { |
145 | if (return_bp->IsHardware() && !return_bp->HasResolvedLocations()) |
146 | m_could_not_resolve_hw_bp = true; |
147 | return_bp->SetThreadID(m_tid); |
148 | m_return_bp_id = return_bp->GetID(); |
149 | return_bp->SetBreakpointKind("step-out" ); |
150 | } |
151 | |
152 | if (immediate_return_from_sp) { |
153 | const SymbolContext &sc = |
154 | immediate_return_from_sp->GetSymbolContext(resolve_scope: eSymbolContextFunction); |
155 | if (sc.function) { |
156 | m_immediate_step_from_function = sc.function; |
157 | } |
158 | } |
159 | } |
160 | } |
161 | |
162 | void ThreadPlanStepOut::SetupAvoidNoDebug( |
163 | LazyBool step_out_avoids_code_without_debug_info) { |
164 | bool avoid_nodebug = true; |
165 | switch (step_out_avoids_code_without_debug_info) { |
166 | case eLazyBoolYes: |
167 | avoid_nodebug = true; |
168 | break; |
169 | case eLazyBoolNo: |
170 | avoid_nodebug = false; |
171 | break; |
172 | case eLazyBoolCalculate: |
173 | avoid_nodebug = GetThread().GetStepOutAvoidsNoDebug(); |
174 | break; |
175 | } |
176 | if (avoid_nodebug) |
177 | GetFlags().Set(ThreadPlanShouldStopHere::eStepOutAvoidNoDebug); |
178 | else |
179 | GetFlags().Clear(mask: ThreadPlanShouldStopHere::eStepOutAvoidNoDebug); |
180 | } |
181 | |
182 | void ThreadPlanStepOut::DidPush() { |
183 | Thread &thread = GetThread(); |
184 | if (m_step_out_to_inline_plan_sp) |
185 | thread.QueueThreadPlan(plan_sp&: m_step_out_to_inline_plan_sp, abort_other_plans: false); |
186 | else if (m_step_through_inline_plan_sp) |
187 | thread.QueueThreadPlan(plan_sp&: m_step_through_inline_plan_sp, abort_other_plans: false); |
188 | } |
189 | |
190 | ThreadPlanStepOut::~ThreadPlanStepOut() { |
191 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) |
192 | GetTarget().RemoveBreakpointByID(break_id: m_return_bp_id); |
193 | } |
194 | |
195 | void ThreadPlanStepOut::GetDescription(Stream *s, |
196 | lldb::DescriptionLevel level) { |
197 | if (level == lldb::eDescriptionLevelBrief) |
198 | s->Printf(format: "step out" ); |
199 | else { |
200 | if (m_step_out_to_inline_plan_sp) |
201 | s->Printf(format: "Stepping out to inlined frame so we can walk through it." ); |
202 | else if (m_step_through_inline_plan_sp) |
203 | s->Printf(format: "Stepping out by stepping through inlined function." ); |
204 | else { |
205 | s->Printf(format: "Stepping out from " ); |
206 | Address tmp_address; |
207 | if (tmp_address.SetLoadAddress(load_addr: m_step_from_insn, target: &GetTarget())) { |
208 | tmp_address.Dump(s, exe_scope: &m_process, style: Address::DumpStyleResolvedDescription, |
209 | fallback_style: Address::DumpStyleLoadAddress); |
210 | } else { |
211 | s->Printf(format: "address 0x%" PRIx64 "" , (uint64_t)m_step_from_insn); |
212 | } |
213 | |
214 | // FIXME: find some useful way to present the m_return_id, since there may |
215 | // be multiple copies of the |
216 | // same function on the stack. |
217 | |
218 | s->Printf(format: " returning to frame at " ); |
219 | if (tmp_address.SetLoadAddress(load_addr: m_return_addr, target: &GetTarget())) { |
220 | tmp_address.Dump(s, exe_scope: &m_process, style: Address::DumpStyleResolvedDescription, |
221 | fallback_style: Address::DumpStyleLoadAddress); |
222 | } else { |
223 | s->Printf(format: "address 0x%" PRIx64 "" , (uint64_t)m_return_addr); |
224 | } |
225 | |
226 | if (level == eDescriptionLevelVerbose) |
227 | s->Printf(format: " using breakpoint site %d" , m_return_bp_id); |
228 | } |
229 | } |
230 | |
231 | if (m_stepped_past_frames.empty()) |
232 | return; |
233 | |
234 | s->Printf(format: "\n" ); |
235 | for (StackFrameSP frame_sp : m_stepped_past_frames) { |
236 | s->Printf(format: "Stepped out past: " ); |
237 | frame_sp->DumpUsingSettingsFormat(strm: s); |
238 | } |
239 | } |
240 | |
241 | bool ThreadPlanStepOut::ValidatePlan(Stream *error) { |
242 | if (m_step_out_to_inline_plan_sp) |
243 | return m_step_out_to_inline_plan_sp->ValidatePlan(error); |
244 | |
245 | if (m_step_through_inline_plan_sp) |
246 | return m_step_through_inline_plan_sp->ValidatePlan(error); |
247 | |
248 | if (m_could_not_resolve_hw_bp) { |
249 | if (error) |
250 | error->PutCString( |
251 | cstr: "Could not create hardware breakpoint for thread plan." ); |
252 | return false; |
253 | } |
254 | |
255 | if (m_return_bp_id == LLDB_INVALID_BREAK_ID) { |
256 | if (error) { |
257 | error->PutCString(cstr: "Could not create return address breakpoint." ); |
258 | if (m_constructor_errors.GetSize() > 0) { |
259 | error->PutCString(cstr: " " ); |
260 | error->PutCString(cstr: m_constructor_errors.GetString()); |
261 | } |
262 | } |
263 | return false; |
264 | } |
265 | |
266 | return true; |
267 | } |
268 | |
269 | bool ThreadPlanStepOut::DoPlanExplainsStop(Event *event_ptr) { |
270 | // If the step out plan is done, then we just need to step through the |
271 | // inlined frame. |
272 | if (m_step_out_to_inline_plan_sp) { |
273 | return m_step_out_to_inline_plan_sp->MischiefManaged(); |
274 | } else if (m_step_through_inline_plan_sp) { |
275 | if (m_step_through_inline_plan_sp->MischiefManaged()) { |
276 | CalculateReturnValue(); |
277 | SetPlanComplete(); |
278 | return true; |
279 | } else |
280 | return false; |
281 | } else if (m_step_out_further_plan_sp) { |
282 | return m_step_out_further_plan_sp->MischiefManaged(); |
283 | } |
284 | |
285 | // We don't explain signals or breakpoints (breakpoints that handle stepping |
286 | // in or out will be handled by a child plan. |
287 | |
288 | StopInfoSP stop_info_sp = GetPrivateStopInfo(); |
289 | if (stop_info_sp) { |
290 | StopReason reason = stop_info_sp->GetStopReason(); |
291 | if (reason == eStopReasonBreakpoint) { |
292 | // If this is OUR breakpoint, we're fine, otherwise we don't know why |
293 | // this happened... |
294 | BreakpointSiteSP site_sp( |
295 | m_process.GetBreakpointSiteList().FindByID(site_id: stop_info_sp->GetValue())); |
296 | if (site_sp && site_sp->IsBreakpointAtThisSite(bp_id: m_return_bp_id)) { |
297 | bool done; |
298 | |
299 | StackID frame_zero_id = |
300 | GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
301 | |
302 | if (m_step_out_to_id == frame_zero_id) |
303 | done = true; |
304 | else if (m_step_out_to_id < frame_zero_id) { |
305 | // Either we stepped past the breakpoint, or the stack ID calculation |
306 | // was incorrect and we should probably stop. |
307 | done = true; |
308 | } else { |
309 | done = (m_immediate_step_from_id < frame_zero_id); |
310 | } |
311 | |
312 | if (done) { |
313 | if (InvokeShouldStopHereCallback(operation: eFrameCompareOlder, status&: m_status)) { |
314 | CalculateReturnValue(); |
315 | SetPlanComplete(); |
316 | } |
317 | } |
318 | |
319 | // If there was only one owner, then we're done. But if we also hit |
320 | // some user breakpoint on our way out, we should mark ourselves as |
321 | // done, but also not claim to explain the stop, since it is more |
322 | // important to report the user breakpoint than the step out |
323 | // completion. |
324 | |
325 | if (site_sp->GetNumberOfConstituents() == 1) |
326 | return true; |
327 | } |
328 | return false; |
329 | } else if (IsUsuallyUnexplainedStopReason(reason)) |
330 | return false; |
331 | else |
332 | return true; |
333 | } |
334 | return true; |
335 | } |
336 | |
337 | bool ThreadPlanStepOut::ShouldStop(Event *event_ptr) { |
338 | if (IsPlanComplete()) |
339 | return true; |
340 | |
341 | bool done = false; |
342 | if (m_step_out_to_inline_plan_sp) { |
343 | if (m_step_out_to_inline_plan_sp->MischiefManaged()) { |
344 | // Now step through the inlined stack we are in: |
345 | if (QueueInlinedStepPlan(queue_now: true)) { |
346 | // If we can't queue a plan to do this, then just call ourselves done. |
347 | m_step_out_to_inline_plan_sp.reset(); |
348 | SetPlanComplete(false); |
349 | return true; |
350 | } else |
351 | done = true; |
352 | } else |
353 | return m_step_out_to_inline_plan_sp->ShouldStop(event_ptr); |
354 | } else if (m_step_through_inline_plan_sp) { |
355 | if (m_step_through_inline_plan_sp->MischiefManaged()) |
356 | done = true; |
357 | else |
358 | return m_step_through_inline_plan_sp->ShouldStop(event_ptr); |
359 | } else if (m_step_out_further_plan_sp) { |
360 | if (m_step_out_further_plan_sp->MischiefManaged()) |
361 | m_step_out_further_plan_sp.reset(); |
362 | else |
363 | return m_step_out_further_plan_sp->ShouldStop(event_ptr); |
364 | } |
365 | |
366 | if (!done) { |
367 | StackID frame_zero_id = GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
368 | done = !(frame_zero_id < m_step_out_to_id); |
369 | } |
370 | |
371 | // The normal step out computations think we are done, so all we need to do |
372 | // is consult the ShouldStopHere, and we are done. |
373 | |
374 | if (done) { |
375 | if (InvokeShouldStopHereCallback(operation: eFrameCompareOlder, status&: m_status)) { |
376 | CalculateReturnValue(); |
377 | SetPlanComplete(); |
378 | } else { |
379 | m_step_out_further_plan_sp = |
380 | QueueStepOutFromHerePlan(flags&: m_flags, operation: eFrameCompareOlder, status&: m_status); |
381 | done = false; |
382 | } |
383 | } |
384 | |
385 | return done; |
386 | } |
387 | |
388 | bool ThreadPlanStepOut::StopOthers() { return m_stop_others; } |
389 | |
390 | StateType ThreadPlanStepOut::GetPlanRunState() { return eStateRunning; } |
391 | |
392 | bool ThreadPlanStepOut::DoWillResume(StateType resume_state, |
393 | bool current_plan) { |
394 | if (m_step_out_to_inline_plan_sp || m_step_through_inline_plan_sp) |
395 | return true; |
396 | |
397 | if (m_return_bp_id == LLDB_INVALID_BREAK_ID) |
398 | return false; |
399 | |
400 | if (current_plan) { |
401 | Breakpoint *return_bp = GetTarget().GetBreakpointByID(break_id: m_return_bp_id).get(); |
402 | if (return_bp != nullptr) |
403 | return_bp->SetEnabled(true); |
404 | } |
405 | return true; |
406 | } |
407 | |
408 | bool ThreadPlanStepOut::WillStop() { |
409 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) { |
410 | Breakpoint *return_bp = GetTarget().GetBreakpointByID(break_id: m_return_bp_id).get(); |
411 | if (return_bp != nullptr) |
412 | return_bp->SetEnabled(false); |
413 | } |
414 | |
415 | return true; |
416 | } |
417 | |
418 | bool ThreadPlanStepOut::MischiefManaged() { |
419 | if (IsPlanComplete()) { |
420 | // Did I reach my breakpoint? If so I'm done. |
421 | // |
422 | // I also check the stack depth, since if we've blown past the breakpoint |
423 | // for some |
424 | // reason and we're now stopping for some other reason altogether, then |
425 | // we're done with this step out operation. |
426 | |
427 | Log *log = GetLog(mask: LLDBLog::Step); |
428 | if (log) |
429 | LLDB_LOGF(log, "Completed step out plan." ); |
430 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) { |
431 | GetTarget().RemoveBreakpointByID(break_id: m_return_bp_id); |
432 | m_return_bp_id = LLDB_INVALID_BREAK_ID; |
433 | } |
434 | |
435 | ThreadPlan::MischiefManaged(); |
436 | return true; |
437 | } else { |
438 | return false; |
439 | } |
440 | } |
441 | |
442 | bool ThreadPlanStepOut::QueueInlinedStepPlan(bool queue_now) { |
443 | // Now figure out the range of this inlined block, and set up a "step through |
444 | // range" plan for that. If we've been provided with a context, then use the |
445 | // block in that context. |
446 | Thread &thread = GetThread(); |
447 | StackFrameSP immediate_return_from_sp(thread.GetStackFrameAtIndex(idx: 0)); |
448 | if (!immediate_return_from_sp) |
449 | return false; |
450 | |
451 | Log *log = GetLog(mask: LLDBLog::Step); |
452 | if (log) { |
453 | StreamString s; |
454 | immediate_return_from_sp->Dump(strm: &s, show_frame_index: true, show_fullpaths: false); |
455 | LLDB_LOGF(log, "Queuing inlined frame to step past: %s." , s.GetData()); |
456 | } |
457 | |
458 | Block *from_block = immediate_return_from_sp->GetFrameBlock(); |
459 | if (from_block) { |
460 | Block *inlined_block = from_block->GetContainingInlinedBlock(); |
461 | if (inlined_block) { |
462 | size_t num_ranges = inlined_block->GetNumRanges(); |
463 | AddressRange inline_range; |
464 | if (inlined_block->GetRangeAtIndex(range_idx: 0, range&: inline_range)) { |
465 | SymbolContext inlined_sc; |
466 | inlined_block->CalculateSymbolContext(sc: &inlined_sc); |
467 | inlined_sc.target_sp = GetTarget().shared_from_this(); |
468 | RunMode run_mode = |
469 | m_stop_others ? lldb::eOnlyThisThread : lldb::eAllThreads; |
470 | const LazyBool avoid_no_debug = eLazyBoolNo; |
471 | |
472 | m_step_through_inline_plan_sp = |
473 | std::make_shared<ThreadPlanStepOverRange>( |
474 | args&: thread, args&: inline_range, args&: inlined_sc, args&: run_mode, args: avoid_no_debug); |
475 | ThreadPlanStepOverRange *step_through_inline_plan_ptr = |
476 | static_cast<ThreadPlanStepOverRange *>( |
477 | m_step_through_inline_plan_sp.get()); |
478 | m_step_through_inline_plan_sp->SetPrivate(true); |
479 | |
480 | step_through_inline_plan_ptr->SetOkayToDiscard(true); |
481 | StreamString errors; |
482 | if (!step_through_inline_plan_ptr->ValidatePlan(error: &errors)) { |
483 | // FIXME: Log this failure. |
484 | delete step_through_inline_plan_ptr; |
485 | return false; |
486 | } |
487 | |
488 | for (size_t i = 1; i < num_ranges; i++) { |
489 | if (inlined_block->GetRangeAtIndex(range_idx: i, range&: inline_range)) |
490 | step_through_inline_plan_ptr->AddRange(new_range: inline_range); |
491 | } |
492 | |
493 | if (queue_now) |
494 | thread.QueueThreadPlan(plan_sp&: m_step_through_inline_plan_sp, abort_other_plans: false); |
495 | return true; |
496 | } |
497 | } |
498 | } |
499 | |
500 | return false; |
501 | } |
502 | |
503 | void ThreadPlanStepOut::CalculateReturnValue() { |
504 | if (m_return_valobj_sp) |
505 | return; |
506 | |
507 | if (!m_calculate_return_value) |
508 | return; |
509 | |
510 | if (m_immediate_step_from_function != nullptr) { |
511 | CompilerType return_compiler_type = |
512 | m_immediate_step_from_function->GetCompilerType() |
513 | .GetFunctionReturnType(); |
514 | if (return_compiler_type) { |
515 | lldb::ABISP abi_sp = m_process.GetABI(); |
516 | if (abi_sp) |
517 | m_return_valobj_sp = |
518 | abi_sp->GetReturnValueObject(thread&: GetThread(), type&: return_compiler_type); |
519 | } |
520 | } |
521 | } |
522 | |
523 | bool ThreadPlanStepOut::IsPlanStale() { |
524 | // If we are still lower on the stack than the frame we are returning to, |
525 | // then there's something for us to do. Otherwise, we're stale. |
526 | |
527 | StackID frame_zero_id = GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
528 | return !(frame_zero_id < m_step_out_to_id); |
529 | } |
530 | |