| 1 | //===-- ThreadPlanStepOut.cpp ---------------------------------------------===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | |
| 9 | #include "lldb/Target/ThreadPlanStepOut.h" |
| 10 | #include "lldb/Breakpoint/Breakpoint.h" |
| 11 | #include "lldb/Core/Value.h" |
| 12 | #include "lldb/Symbol/Block.h" |
| 13 | #include "lldb/Symbol/Function.h" |
| 14 | #include "lldb/Symbol/Symbol.h" |
| 15 | #include "lldb/Symbol/Type.h" |
| 16 | #include "lldb/Target/ABI.h" |
| 17 | #include "lldb/Target/Process.h" |
| 18 | #include "lldb/Target/RegisterContext.h" |
| 19 | #include "lldb/Target/StopInfo.h" |
| 20 | #include "lldb/Target/Target.h" |
| 21 | #include "lldb/Target/ThreadPlanStepOverRange.h" |
| 22 | #include "lldb/Target/ThreadPlanStepThrough.h" |
| 23 | #include "lldb/Utility/LLDBLog.h" |
| 24 | #include "lldb/Utility/Log.h" |
| 25 | #include "lldb/ValueObject/ValueObjectConstResult.h" |
| 26 | |
| 27 | #include <memory> |
| 28 | |
| 29 | using namespace lldb; |
| 30 | using namespace lldb_private; |
| 31 | |
| 32 | uint32_t ThreadPlanStepOut::s_default_flag_values = 0; |
| 33 | |
| 34 | /// Computes the target frame this plan should step out to. |
| 35 | static StackFrameSP |
| 36 | ComputeTargetFrame(Thread &thread, uint32_t start_frame_idx, |
| 37 | std::vector<StackFrameSP> &skipped_frames) { |
| 38 | uint32_t frame_idx = start_frame_idx + 1; |
| 39 | StackFrameSP return_frame_sp = thread.GetStackFrameAtIndex(idx: frame_idx); |
| 40 | if (!return_frame_sp) |
| 41 | return nullptr; |
| 42 | |
| 43 | while (return_frame_sp->IsArtificial() || return_frame_sp->IsHidden()) { |
| 44 | skipped_frames.push_back(x: return_frame_sp); |
| 45 | |
| 46 | frame_idx++; |
| 47 | return_frame_sp = thread.GetStackFrameAtIndex(idx: frame_idx); |
| 48 | |
| 49 | // We never expect to see an artificial frame without a regular ancestor. |
| 50 | // Defensively refuse to step out. |
| 51 | if (!return_frame_sp) { |
| 52 | LLDB_LOG(GetLog(LLDBLog::Step), |
| 53 | "Can't step out of frame with artificial ancestors" ); |
| 54 | return nullptr; |
| 55 | } |
| 56 | } |
| 57 | return return_frame_sp; |
| 58 | } |
| 59 | |
| 60 | // ThreadPlanStepOut: Step out of the current frame |
| 61 | ThreadPlanStepOut::ThreadPlanStepOut( |
| 62 | Thread &thread, SymbolContext *context, bool first_insn, bool stop_others, |
| 63 | Vote report_stop_vote, Vote report_run_vote, uint32_t frame_idx, |
| 64 | LazyBool step_out_avoids_code_without_debug_info, |
| 65 | bool continue_to_next_branch, bool gather_return_value) |
| 66 | : ThreadPlan(ThreadPlan::eKindStepOut, "Step out" , thread, report_stop_vote, |
| 67 | report_run_vote), |
| 68 | ThreadPlanShouldStopHere(this), m_step_from_insn(LLDB_INVALID_ADDRESS), |
| 69 | m_return_bp_id(LLDB_INVALID_BREAK_ID), |
| 70 | m_return_addr(LLDB_INVALID_ADDRESS), m_stop_others(stop_others), |
| 71 | m_immediate_step_from_function(nullptr), |
| 72 | m_calculate_return_value(gather_return_value) { |
| 73 | SetFlagsToDefault(); |
| 74 | SetupAvoidNoDebug(step_out_avoids_code_without_debug_info); |
| 75 | |
| 76 | m_step_from_insn = thread.GetRegisterContext()->GetPC(fail_value: 0); |
| 77 | |
| 78 | StackFrameSP return_frame_sp = |
| 79 | ComputeTargetFrame(thread, start_frame_idx: frame_idx, skipped_frames&: m_stepped_past_frames); |
| 80 | StackFrameSP immediate_return_from_sp(thread.GetStackFrameAtIndex(idx: frame_idx)); |
| 81 | |
| 82 | SetupReturnAddress(return_frame_sp, immediate_return_from_sp, frame_idx, |
| 83 | continue_to_next_branch); |
| 84 | } |
| 85 | |
| 86 | ThreadPlanStepOut::ThreadPlanStepOut(Thread &thread, bool stop_others, |
| 87 | Vote report_stop_vote, |
| 88 | Vote report_run_vote, uint32_t frame_idx, |
| 89 | bool continue_to_next_branch, |
| 90 | bool gather_return_value) |
| 91 | : ThreadPlan(ThreadPlan::eKindStepOut, "Step out" , thread, report_stop_vote, |
| 92 | report_run_vote), |
| 93 | ThreadPlanShouldStopHere(this), m_return_bp_id(LLDB_INVALID_BREAK_ID), |
| 94 | m_return_addr(LLDB_INVALID_ADDRESS), m_stop_others(stop_others), |
| 95 | m_immediate_step_from_function(nullptr), |
| 96 | m_calculate_return_value(gather_return_value) { |
| 97 | SetFlagsToDefault(); |
| 98 | m_step_from_insn = thread.GetRegisterContext()->GetPC(fail_value: 0); |
| 99 | |
| 100 | StackFrameSP return_frame_sp = thread.GetStackFrameAtIndex(idx: frame_idx + 1); |
| 101 | StackFrameSP immediate_return_from_sp = |
| 102 | thread.GetStackFrameAtIndex(idx: frame_idx); |
| 103 | |
| 104 | SetupReturnAddress(return_frame_sp, immediate_return_from_sp, frame_idx, |
| 105 | continue_to_next_branch); |
| 106 | } |
| 107 | |
| 108 | void ThreadPlanStepOut::SetupReturnAddress( |
| 109 | StackFrameSP return_frame_sp, StackFrameSP immediate_return_from_sp, |
| 110 | uint32_t frame_idx, bool continue_to_next_branch) { |
| 111 | if (!return_frame_sp || !immediate_return_from_sp) |
| 112 | return; // we can't do anything here. ValidatePlan() will return false. |
| 113 | |
| 114 | m_step_out_to_id = return_frame_sp->GetStackID(); |
| 115 | m_immediate_step_from_id = immediate_return_from_sp->GetStackID(); |
| 116 | |
| 117 | // If the frame directly below the one we are returning to is inlined, we |
| 118 | // have to be a little more careful. It is non-trivial to determine the real |
| 119 | // "return code address" for an inlined frame, so we have to work our way to |
| 120 | // that frame and then step out. |
| 121 | if (immediate_return_from_sp->IsInlined()) { |
| 122 | if (frame_idx > 0) { |
| 123 | // First queue a plan that gets us to this inlined frame, and when we get |
| 124 | // there we'll queue a second plan that walks us out of this frame. |
| 125 | m_step_out_to_inline_plan_sp = std::make_shared<ThreadPlanStepOut>( |
| 126 | args&: GetThread(), args: nullptr, args: false, args&: m_stop_others, args: eVoteNoOpinion, |
| 127 | args: eVoteNoOpinion, args: frame_idx - 1, args: eLazyBoolNo, args&: continue_to_next_branch); |
| 128 | static_cast<ThreadPlanStepOut *>(m_step_out_to_inline_plan_sp.get()) |
| 129 | ->SetShouldStopHereCallbacks(callbacks: nullptr, baton: nullptr); |
| 130 | m_step_out_to_inline_plan_sp->SetPrivate(true); |
| 131 | } else { |
| 132 | // If we're already at the inlined frame we're stepping through, then |
| 133 | // just do that now. |
| 134 | QueueInlinedStepPlan(queue_now: false); |
| 135 | } |
| 136 | } else { |
| 137 | // Find the return address and set a breakpoint there: |
| 138 | // FIXME - can we do this more securely if we know first_insn? |
| 139 | |
| 140 | Address return_address(return_frame_sp->GetFrameCodeAddress()); |
| 141 | if (continue_to_next_branch) { |
| 142 | SymbolContext return_address_sc; |
| 143 | AddressRange range; |
| 144 | Address return_address_decr_pc = return_address; |
| 145 | if (return_address_decr_pc.GetOffset() > 0) |
| 146 | return_address_decr_pc.Slide(offset: -1); |
| 147 | |
| 148 | return_address_decr_pc.CalculateSymbolContext( |
| 149 | sc: &return_address_sc, resolve_scope: lldb::eSymbolContextLineEntry); |
| 150 | if (return_address_sc.line_entry.IsValid()) { |
| 151 | const bool include_inlined_functions = false; |
| 152 | range = return_address_sc.line_entry.GetSameLineContiguousAddressRange( |
| 153 | include_inlined_functions); |
| 154 | if (range.GetByteSize() > 0) { |
| 155 | return_address = m_process.AdvanceAddressToNextBranchInstruction( |
| 156 | default_stop_addr: return_address, range_bounds: range); |
| 157 | } |
| 158 | } |
| 159 | } |
| 160 | m_return_addr = return_address.GetLoadAddress(target: &m_process.GetTarget()); |
| 161 | |
| 162 | if (m_return_addr == LLDB_INVALID_ADDRESS) |
| 163 | return; |
| 164 | |
| 165 | // Perform some additional validation on the return address. |
| 166 | uint32_t permissions = 0; |
| 167 | Log *log = GetLog(mask: LLDBLog::Step); |
| 168 | if (!m_process.GetLoadAddressPermissions(load_addr: m_return_addr, permissions)) { |
| 169 | LLDB_LOGF(log, "ThreadPlanStepOut(%p): Return address (0x%" PRIx64 |
| 170 | ") permissions not found." , static_cast<void *>(this), |
| 171 | m_return_addr); |
| 172 | } else if (!(permissions & ePermissionsExecutable)) { |
| 173 | m_constructor_errors.Printf(format: "Return address (0x%" PRIx64 |
| 174 | ") did not point to executable memory." , |
| 175 | m_return_addr); |
| 176 | LLDB_LOGF(log, "ThreadPlanStepOut(%p): %s" , static_cast<void *>(this), |
| 177 | m_constructor_errors.GetData()); |
| 178 | return; |
| 179 | } |
| 180 | |
| 181 | Breakpoint *return_bp = |
| 182 | GetTarget().CreateBreakpoint(load_addr: m_return_addr, internal: true, request_hardware: false).get(); |
| 183 | |
| 184 | if (return_bp != nullptr) { |
| 185 | if (return_bp->IsHardware() && !return_bp->HasResolvedLocations()) |
| 186 | m_could_not_resolve_hw_bp = true; |
| 187 | return_bp->SetThreadID(m_tid); |
| 188 | m_return_bp_id = return_bp->GetID(); |
| 189 | return_bp->SetBreakpointKind("step-out" ); |
| 190 | } |
| 191 | |
| 192 | if (immediate_return_from_sp) { |
| 193 | const SymbolContext &sc = |
| 194 | immediate_return_from_sp->GetSymbolContext(resolve_scope: eSymbolContextFunction); |
| 195 | if (sc.function) { |
| 196 | m_immediate_step_from_function = sc.function; |
| 197 | } |
| 198 | } |
| 199 | } |
| 200 | } |
| 201 | |
| 202 | void ThreadPlanStepOut::SetupAvoidNoDebug( |
| 203 | LazyBool step_out_avoids_code_without_debug_info) { |
| 204 | bool avoid_nodebug = true; |
| 205 | switch (step_out_avoids_code_without_debug_info) { |
| 206 | case eLazyBoolYes: |
| 207 | avoid_nodebug = true; |
| 208 | break; |
| 209 | case eLazyBoolNo: |
| 210 | avoid_nodebug = false; |
| 211 | break; |
| 212 | case eLazyBoolCalculate: |
| 213 | avoid_nodebug = GetThread().GetStepOutAvoidsNoDebug(); |
| 214 | break; |
| 215 | } |
| 216 | if (avoid_nodebug) |
| 217 | GetFlags().Set(ThreadPlanShouldStopHere::eStepOutAvoidNoDebug); |
| 218 | else |
| 219 | GetFlags().Clear(mask: ThreadPlanShouldStopHere::eStepOutAvoidNoDebug); |
| 220 | } |
| 221 | |
| 222 | void ThreadPlanStepOut::DidPush() { |
| 223 | Thread &thread = GetThread(); |
| 224 | if (m_step_out_to_inline_plan_sp) |
| 225 | thread.QueueThreadPlan(plan_sp&: m_step_out_to_inline_plan_sp, abort_other_plans: false); |
| 226 | else if (m_step_through_inline_plan_sp) |
| 227 | thread.QueueThreadPlan(plan_sp&: m_step_through_inline_plan_sp, abort_other_plans: false); |
| 228 | } |
| 229 | |
| 230 | ThreadPlanStepOut::~ThreadPlanStepOut() { |
| 231 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) |
| 232 | GetTarget().RemoveBreakpointByID(break_id: m_return_bp_id); |
| 233 | } |
| 234 | |
| 235 | void ThreadPlanStepOut::GetDescription(Stream *s, |
| 236 | lldb::DescriptionLevel level) { |
| 237 | if (level == lldb::eDescriptionLevelBrief) |
| 238 | s->Printf(format: "step out" ); |
| 239 | else { |
| 240 | if (m_step_out_to_inline_plan_sp) |
| 241 | s->Printf(format: "Stepping out to inlined frame so we can walk through it." ); |
| 242 | else if (m_step_through_inline_plan_sp) |
| 243 | s->Printf(format: "Stepping out by stepping through inlined function." ); |
| 244 | else { |
| 245 | s->Printf(format: "Stepping out from " ); |
| 246 | Address tmp_address; |
| 247 | if (tmp_address.SetLoadAddress(load_addr: m_step_from_insn, target: &GetTarget())) { |
| 248 | tmp_address.Dump(s, exe_scope: &m_process, style: Address::DumpStyleResolvedDescription, |
| 249 | fallback_style: Address::DumpStyleLoadAddress); |
| 250 | } else { |
| 251 | s->Printf(format: "address 0x%" PRIx64 "" , (uint64_t)m_step_from_insn); |
| 252 | } |
| 253 | |
| 254 | // FIXME: find some useful way to present the m_return_id, since there may |
| 255 | // be multiple copies of the |
| 256 | // same function on the stack. |
| 257 | |
| 258 | s->Printf(format: " returning to frame at " ); |
| 259 | if (tmp_address.SetLoadAddress(load_addr: m_return_addr, target: &GetTarget())) { |
| 260 | tmp_address.Dump(s, exe_scope: &m_process, style: Address::DumpStyleResolvedDescription, |
| 261 | fallback_style: Address::DumpStyleLoadAddress); |
| 262 | } else { |
| 263 | s->Printf(format: "address 0x%" PRIx64 "" , (uint64_t)m_return_addr); |
| 264 | } |
| 265 | |
| 266 | if (level == eDescriptionLevelVerbose) |
| 267 | s->Printf(format: " using breakpoint site %d" , m_return_bp_id); |
| 268 | } |
| 269 | } |
| 270 | |
| 271 | if (m_stepped_past_frames.empty()) |
| 272 | return; |
| 273 | |
| 274 | s->Printf(format: "\n" ); |
| 275 | for (StackFrameSP frame_sp : m_stepped_past_frames) { |
| 276 | s->Printf(format: "Stepped out past: " ); |
| 277 | frame_sp->DumpUsingSettingsFormat(strm: s); |
| 278 | } |
| 279 | } |
| 280 | |
| 281 | bool ThreadPlanStepOut::ValidatePlan(Stream *error) { |
| 282 | if (m_step_out_to_inline_plan_sp) |
| 283 | return m_step_out_to_inline_plan_sp->ValidatePlan(error); |
| 284 | |
| 285 | if (m_step_through_inline_plan_sp) |
| 286 | return m_step_through_inline_plan_sp->ValidatePlan(error); |
| 287 | |
| 288 | if (m_could_not_resolve_hw_bp) { |
| 289 | if (error) |
| 290 | error->PutCString( |
| 291 | cstr: "Could not create hardware breakpoint for thread plan." ); |
| 292 | return false; |
| 293 | } |
| 294 | |
| 295 | if (m_return_bp_id == LLDB_INVALID_BREAK_ID) { |
| 296 | if (error) { |
| 297 | error->PutCString(cstr: "Could not create return address breakpoint." ); |
| 298 | if (m_constructor_errors.GetSize() > 0) { |
| 299 | error->PutCString(cstr: " " ); |
| 300 | error->PutCString(cstr: m_constructor_errors.GetString()); |
| 301 | } |
| 302 | } |
| 303 | return false; |
| 304 | } |
| 305 | |
| 306 | return true; |
| 307 | } |
| 308 | |
| 309 | bool ThreadPlanStepOut::DoPlanExplainsStop(Event *event_ptr) { |
| 310 | // If the step out plan is done, then we just need to step through the |
| 311 | // inlined frame. |
| 312 | if (m_step_out_to_inline_plan_sp) { |
| 313 | return m_step_out_to_inline_plan_sp->MischiefManaged(); |
| 314 | } else if (m_step_through_inline_plan_sp) { |
| 315 | if (m_step_through_inline_plan_sp->MischiefManaged()) { |
| 316 | CalculateReturnValue(); |
| 317 | SetPlanComplete(); |
| 318 | return true; |
| 319 | } else |
| 320 | return false; |
| 321 | } else if (m_step_out_further_plan_sp) { |
| 322 | return m_step_out_further_plan_sp->MischiefManaged(); |
| 323 | } |
| 324 | |
| 325 | // We don't explain signals or breakpoints (breakpoints that handle stepping |
| 326 | // in or out will be handled by a child plan. |
| 327 | |
| 328 | StopInfoSP stop_info_sp = GetPrivateStopInfo(); |
| 329 | if (stop_info_sp) { |
| 330 | StopReason reason = stop_info_sp->GetStopReason(); |
| 331 | if (reason == eStopReasonBreakpoint) { |
| 332 | // If this is OUR breakpoint, we're fine, otherwise we don't know why |
| 333 | // this happened... |
| 334 | BreakpointSiteSP site_sp( |
| 335 | m_process.GetBreakpointSiteList().FindByID(site_id: stop_info_sp->GetValue())); |
| 336 | if (site_sp && site_sp->IsBreakpointAtThisSite(bp_id: m_return_bp_id)) { |
| 337 | bool done; |
| 338 | |
| 339 | StackID frame_zero_id = |
| 340 | GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
| 341 | |
| 342 | if (m_step_out_to_id == frame_zero_id) |
| 343 | done = true; |
| 344 | else if (m_step_out_to_id < frame_zero_id) { |
| 345 | // Either we stepped past the breakpoint, or the stack ID calculation |
| 346 | // was incorrect and we should probably stop. |
| 347 | done = true; |
| 348 | } else { |
| 349 | done = (m_immediate_step_from_id < frame_zero_id); |
| 350 | } |
| 351 | |
| 352 | if (done) { |
| 353 | if (InvokeShouldStopHereCallback(operation: eFrameCompareOlder, status&: m_status)) { |
| 354 | CalculateReturnValue(); |
| 355 | SetPlanComplete(); |
| 356 | } |
| 357 | } |
| 358 | |
| 359 | // If there was only one owner, then we're done. But if we also hit |
| 360 | // some user breakpoint on our way out, we should mark ourselves as |
| 361 | // done, but also not claim to explain the stop, since it is more |
| 362 | // important to report the user breakpoint than the step out |
| 363 | // completion. |
| 364 | |
| 365 | if (site_sp->GetNumberOfConstituents() == 1) |
| 366 | return true; |
| 367 | } |
| 368 | return false; |
| 369 | } else if (IsUsuallyUnexplainedStopReason(reason)) |
| 370 | return false; |
| 371 | else |
| 372 | return true; |
| 373 | } |
| 374 | return true; |
| 375 | } |
| 376 | |
| 377 | bool ThreadPlanStepOut::ShouldStop(Event *event_ptr) { |
| 378 | if (IsPlanComplete()) |
| 379 | return true; |
| 380 | |
| 381 | bool done = false; |
| 382 | if (m_step_out_to_inline_plan_sp) { |
| 383 | if (m_step_out_to_inline_plan_sp->MischiefManaged()) { |
| 384 | // Now step through the inlined stack we are in: |
| 385 | if (QueueInlinedStepPlan(queue_now: true)) { |
| 386 | // If we can't queue a plan to do this, then just call ourselves done. |
| 387 | m_step_out_to_inline_plan_sp.reset(); |
| 388 | SetPlanComplete(false); |
| 389 | return true; |
| 390 | } else |
| 391 | done = true; |
| 392 | } else |
| 393 | return m_step_out_to_inline_plan_sp->ShouldStop(event_ptr); |
| 394 | } else if (m_step_through_inline_plan_sp) { |
| 395 | if (m_step_through_inline_plan_sp->MischiefManaged()) |
| 396 | done = true; |
| 397 | else |
| 398 | return m_step_through_inline_plan_sp->ShouldStop(event_ptr); |
| 399 | } else if (m_step_out_further_plan_sp) { |
| 400 | if (m_step_out_further_plan_sp->MischiefManaged()) |
| 401 | m_step_out_further_plan_sp.reset(); |
| 402 | else |
| 403 | return m_step_out_further_plan_sp->ShouldStop(event_ptr); |
| 404 | } |
| 405 | |
| 406 | if (!done) { |
| 407 | StopInfoSP stop_info_sp = GetPrivateStopInfo(); |
| 408 | if (stop_info_sp && stop_info_sp->GetStopReason() == eStopReasonBreakpoint) { |
| 409 | StackID frame_zero_id = GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
| 410 | done = !(frame_zero_id < m_step_out_to_id); |
| 411 | } |
| 412 | } |
| 413 | |
| 414 | // The normal step out computations think we are done, so all we need to do |
| 415 | // is consult the ShouldStopHere, and we are done. |
| 416 | |
| 417 | if (done) { |
| 418 | if (InvokeShouldStopHereCallback(operation: eFrameCompareOlder, status&: m_status)) { |
| 419 | CalculateReturnValue(); |
| 420 | SetPlanComplete(); |
| 421 | } else { |
| 422 | m_step_out_further_plan_sp = |
| 423 | QueueStepOutFromHerePlan(flags&: m_flags, operation: eFrameCompareOlder, status&: m_status); |
| 424 | done = false; |
| 425 | } |
| 426 | } |
| 427 | |
| 428 | return done; |
| 429 | } |
| 430 | |
| 431 | bool ThreadPlanStepOut::StopOthers() { return m_stop_others; } |
| 432 | |
| 433 | StateType ThreadPlanStepOut::GetPlanRunState() { return eStateRunning; } |
| 434 | |
| 435 | bool ThreadPlanStepOut::DoWillResume(StateType resume_state, |
| 436 | bool current_plan) { |
| 437 | if (m_step_out_to_inline_plan_sp || m_step_through_inline_plan_sp) |
| 438 | return true; |
| 439 | |
| 440 | if (m_return_bp_id == LLDB_INVALID_BREAK_ID) |
| 441 | return false; |
| 442 | |
| 443 | if (current_plan) { |
| 444 | Breakpoint *return_bp = GetTarget().GetBreakpointByID(break_id: m_return_bp_id).get(); |
| 445 | if (return_bp != nullptr) |
| 446 | return_bp->SetEnabled(true); |
| 447 | } |
| 448 | return true; |
| 449 | } |
| 450 | |
| 451 | bool ThreadPlanStepOut::WillStop() { |
| 452 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) { |
| 453 | Breakpoint *return_bp = GetTarget().GetBreakpointByID(break_id: m_return_bp_id).get(); |
| 454 | if (return_bp != nullptr) |
| 455 | return_bp->SetEnabled(false); |
| 456 | } |
| 457 | |
| 458 | return true; |
| 459 | } |
| 460 | |
| 461 | bool ThreadPlanStepOut::MischiefManaged() { |
| 462 | if (IsPlanComplete()) { |
| 463 | // Did I reach my breakpoint? If so I'm done. |
| 464 | // |
| 465 | // I also check the stack depth, since if we've blown past the breakpoint |
| 466 | // for some |
| 467 | // reason and we're now stopping for some other reason altogether, then |
| 468 | // we're done with this step out operation. |
| 469 | |
| 470 | Log *log = GetLog(mask: LLDBLog::Step); |
| 471 | if (log) |
| 472 | LLDB_LOGF(log, "Completed step out plan." ); |
| 473 | if (m_return_bp_id != LLDB_INVALID_BREAK_ID) { |
| 474 | GetTarget().RemoveBreakpointByID(break_id: m_return_bp_id); |
| 475 | m_return_bp_id = LLDB_INVALID_BREAK_ID; |
| 476 | } |
| 477 | |
| 478 | ThreadPlan::MischiefManaged(); |
| 479 | return true; |
| 480 | } else { |
| 481 | return false; |
| 482 | } |
| 483 | } |
| 484 | |
| 485 | bool ThreadPlanStepOut::QueueInlinedStepPlan(bool queue_now) { |
| 486 | // Now figure out the range of this inlined block, and set up a "step through |
| 487 | // range" plan for that. If we've been provided with a context, then use the |
| 488 | // block in that context. |
| 489 | Thread &thread = GetThread(); |
| 490 | StackFrameSP immediate_return_from_sp(thread.GetStackFrameAtIndex(idx: 0)); |
| 491 | if (!immediate_return_from_sp) |
| 492 | return false; |
| 493 | |
| 494 | Log *log = GetLog(mask: LLDBLog::Step); |
| 495 | if (log) { |
| 496 | StreamString s; |
| 497 | immediate_return_from_sp->Dump(strm: &s, show_frame_index: true, show_fullpaths: false); |
| 498 | LLDB_LOGF(log, "Queuing inlined frame to step past: %s." , s.GetData()); |
| 499 | } |
| 500 | |
| 501 | Block *from_block = immediate_return_from_sp->GetFrameBlock(); |
| 502 | if (from_block) { |
| 503 | Block *inlined_block = from_block->GetContainingInlinedBlock(); |
| 504 | if (inlined_block) { |
| 505 | size_t num_ranges = inlined_block->GetNumRanges(); |
| 506 | AddressRange inline_range; |
| 507 | if (inlined_block->GetRangeAtIndex(range_idx: 0, range&: inline_range)) { |
| 508 | SymbolContext inlined_sc; |
| 509 | inlined_block->CalculateSymbolContext(sc: &inlined_sc); |
| 510 | inlined_sc.target_sp = GetTarget().shared_from_this(); |
| 511 | RunMode run_mode = |
| 512 | m_stop_others ? lldb::eOnlyThisThread : lldb::eAllThreads; |
| 513 | const LazyBool avoid_no_debug = eLazyBoolNo; |
| 514 | |
| 515 | m_step_through_inline_plan_sp = |
| 516 | std::make_shared<ThreadPlanStepOverRange>( |
| 517 | args&: thread, args&: inline_range, args&: inlined_sc, args&: run_mode, args: avoid_no_debug); |
| 518 | ThreadPlanStepOverRange *step_through_inline_plan_ptr = |
| 519 | static_cast<ThreadPlanStepOverRange *>( |
| 520 | m_step_through_inline_plan_sp.get()); |
| 521 | m_step_through_inline_plan_sp->SetPrivate(true); |
| 522 | |
| 523 | step_through_inline_plan_ptr->SetOkayToDiscard(true); |
| 524 | StreamString errors; |
| 525 | if (!step_through_inline_plan_ptr->ValidatePlan(error: &errors)) { |
| 526 | // FIXME: Log this failure. |
| 527 | delete step_through_inline_plan_ptr; |
| 528 | return false; |
| 529 | } |
| 530 | |
| 531 | for (size_t i = 1; i < num_ranges; i++) { |
| 532 | if (inlined_block->GetRangeAtIndex(range_idx: i, range&: inline_range)) |
| 533 | step_through_inline_plan_ptr->AddRange(new_range: inline_range); |
| 534 | } |
| 535 | |
| 536 | if (queue_now) |
| 537 | thread.QueueThreadPlan(plan_sp&: m_step_through_inline_plan_sp, abort_other_plans: false); |
| 538 | return true; |
| 539 | } |
| 540 | } |
| 541 | } |
| 542 | |
| 543 | return false; |
| 544 | } |
| 545 | |
| 546 | void ThreadPlanStepOut::CalculateReturnValue() { |
| 547 | if (m_return_valobj_sp) |
| 548 | return; |
| 549 | |
| 550 | if (!m_calculate_return_value) |
| 551 | return; |
| 552 | |
| 553 | if (m_immediate_step_from_function != nullptr) { |
| 554 | CompilerType return_compiler_type = |
| 555 | m_immediate_step_from_function->GetCompilerType() |
| 556 | .GetFunctionReturnType(); |
| 557 | if (return_compiler_type) { |
| 558 | lldb::ABISP abi_sp = m_process.GetABI(); |
| 559 | if (abi_sp) |
| 560 | m_return_valobj_sp = |
| 561 | abi_sp->GetReturnValueObject(thread&: GetThread(), type&: return_compiler_type); |
| 562 | } |
| 563 | } |
| 564 | } |
| 565 | |
| 566 | bool ThreadPlanStepOut::IsPlanStale() { |
| 567 | // If we are still lower on the stack than the frame we are returning to, |
| 568 | // then there's something for us to do. Otherwise, we're stale. |
| 569 | |
| 570 | StackID frame_zero_id = GetThread().GetStackFrameAtIndex(idx: 0)->GetStackID(); |
| 571 | return !(frame_zero_id < m_step_out_to_id); |
| 572 | } |
| 573 | |