1 | // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/runtime_entry.h" |
6 | |
7 | #include <memory> |
8 | |
9 | #include "platform/memory_sanitizer.h" |
10 | #include "platform/thread_sanitizer.h" |
11 | #include "vm/code_descriptors.h" |
12 | #include "vm/code_patcher.h" |
13 | #include "vm/compiler/api/deopt_id.h" |
14 | #include "vm/compiler/api/type_check_mode.h" |
15 | #include "vm/compiler/jit/compiler.h" |
16 | #include "vm/dart_api_impl.h" |
17 | #include "vm/dart_api_state.h" |
18 | #include "vm/dart_entry.h" |
19 | #include "vm/debugger.h" |
20 | #include "vm/double_conversion.h" |
21 | #include "vm/exceptions.h" |
22 | #include "vm/ffi_callback_metadata.h" |
23 | #include "vm/flags.h" |
24 | #include "vm/heap/verifier.h" |
25 | #include "vm/instructions.h" |
26 | #include "vm/kernel_isolate.h" |
27 | #include "vm/message.h" |
28 | #include "vm/message_handler.h" |
29 | #include "vm/object_store.h" |
30 | #include "vm/parser.h" |
31 | #include "vm/resolver.h" |
32 | #include "vm/service_isolate.h" |
33 | #include "vm/stack_frame.h" |
34 | #include "vm/symbols.h" |
35 | #include "vm/thread.h" |
36 | #include "vm/type_testing_stubs.h" |
37 | #include "vm/zone_text_buffer.h" |
38 | |
39 | #if !defined(DART_PRECOMPILED_RUNTIME) |
40 | #include "vm/deopt_instructions.h" |
41 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
42 | |
43 | namespace dart { |
44 | |
45 | static constexpr intptr_t kDefaultMaxSubtypeCacheEntries = |
46 | SubtypeTestCache::MaxEntriesForCacheAllocatedFor(count: 1000); |
47 | DEFINE_FLAG( |
48 | int, |
49 | max_subtype_cache_entries, |
50 | kDefaultMaxSubtypeCacheEntries, |
51 | "Maximum number of subtype cache entries (number of checks cached)." ); |
52 | DEFINE_FLAG( |
53 | int, |
54 | regexp_optimization_counter_threshold, |
55 | 1000, |
56 | "RegExp's usage-counter value before it is optimized, -1 means never" ); |
57 | DEFINE_FLAG(int, |
58 | reoptimization_counter_threshold, |
59 | 4000, |
60 | "Counter threshold before a function gets reoptimized." ); |
61 | DEFINE_FLAG(bool, |
62 | stress_write_barrier_elimination, |
63 | false, |
64 | "Stress test write barrier elimination." ); |
65 | DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization" ); |
66 | DEFINE_FLAG(bool, |
67 | trace_deoptimization_verbose, |
68 | false, |
69 | "Trace deoptimization verbose" ); |
70 | |
71 | DECLARE_FLAG(int, max_deoptimization_counter_threshold); |
72 | DECLARE_FLAG(bool, trace_compiler); |
73 | DECLARE_FLAG(bool, trace_optimizing_compiler); |
74 | DECLARE_FLAG(int, max_polymorphic_checks); |
75 | |
76 | DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement." ); |
77 | |
78 | DEFINE_FLAG(int, gc_every, 0, "Run major GC on every N stack overflow checks" ); |
79 | DEFINE_FLAG(int, |
80 | stacktrace_every, |
81 | 0, |
82 | "Compute debugger stacktrace on every N stack overflow checks" ); |
83 | DEFINE_FLAG(charp, |
84 | stacktrace_filter, |
85 | nullptr, |
86 | "Compute stacktrace in named function on stack overflow checks" ); |
87 | DEFINE_FLAG(charp, |
88 | deoptimize_filter, |
89 | nullptr, |
90 | "Deoptimize in named function on stack overflow checks" ); |
91 | DEFINE_FLAG(charp, |
92 | deoptimize_on_runtime_call_name_filter, |
93 | nullptr, |
94 | "Runtime call name filter for --deoptimize-on-runtime-call-every." ); |
95 | |
96 | DEFINE_FLAG(bool, |
97 | unopt_monomorphic_calls, |
98 | true, |
99 | "Enable specializing monomorphic calls from unoptimized code." ); |
100 | DEFINE_FLAG(bool, |
101 | unopt_megamorphic_calls, |
102 | true, |
103 | "Enable specializing megamorphic calls from unoptimized code." ); |
104 | DEFINE_FLAG(bool, |
105 | verbose_stack_overflow, |
106 | false, |
107 | "Print additional details about stack overflow." ); |
108 | |
109 | DECLARE_FLAG(int, reload_every); |
110 | DECLARE_FLAG(bool, reload_every_optimized); |
111 | DECLARE_FLAG(bool, reload_every_back_off); |
112 | |
113 | DEFINE_RUNTIME_ENTRY(RangeError, 2) { |
114 | const Instance& length = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
115 | const Instance& index = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
116 | if (!length.IsInteger()) { |
117 | // Throw: new ArgumentError.value(length, "length", "is not an integer"); |
118 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 3)); |
119 | args.SetAt(0, length); |
120 | args.SetAt(1, Symbols::Length()); |
121 | args.SetAt(2, String::Handle(zone, ptr: String::New(cstr: "is not an integer" ))); |
122 | Exceptions::ThrowByType(type: Exceptions::kArgumentValue, arguments: args); |
123 | } |
124 | if (!index.IsInteger()) { |
125 | // Throw: new ArgumentError.value(index, "index", "is not an integer"); |
126 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 3)); |
127 | args.SetAt(0, index); |
128 | args.SetAt(1, Symbols::Index()); |
129 | args.SetAt(2, String::Handle(zone, ptr: String::New(cstr: "is not an integer" ))); |
130 | Exceptions::ThrowByType(type: Exceptions::kArgumentValue, arguments: args); |
131 | } |
132 | // Throw: new RangeError.range(index, 0, length - 1, "length"); |
133 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 4)); |
134 | args.SetAt(0, index); |
135 | args.SetAt(1, Integer::Handle(zone, ptr: Integer::New(value: 0))); |
136 | args.SetAt( |
137 | 2, Integer::Handle( |
138 | zone, ptr: Integer::Cast(obj: length).ArithmeticOp( |
139 | operation: Token::kSUB, other: Integer::Handle(zone, ptr: Integer::New(value: 1))))); |
140 | args.SetAt(3, Symbols::Length()); |
141 | Exceptions::ThrowByType(type: Exceptions::kRange, arguments: args); |
142 | } |
143 | |
144 | DEFINE_RUNTIME_ENTRY(WriteError, 0) { |
145 | Exceptions::ThrowUnsupportedError(msg: "Cannot modify an unmodifiable list" ); |
146 | } |
147 | |
148 | static void NullErrorHelper(Zone* zone, |
149 | const String& selector, |
150 | bool is_param_name = false) { |
151 | if (is_param_name) { |
152 | const String& error = String::Handle( |
153 | ptr: selector.IsNull() |
154 | ? String::New(cstr: "argument value is null" ) |
155 | : String::NewFormatted(format: "argument value for '%s' is null" , |
156 | selector.ToCString())); |
157 | Exceptions::ThrowArgumentError(arg: error); |
158 | return; |
159 | } |
160 | |
161 | // If the selector is null, this must be a null check that wasn't due to a |
162 | // method invocation, so was due to the null check operator. |
163 | if (selector.IsNull()) { |
164 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 4)); |
165 | args.SetAt( |
166 | 3, String::Handle( |
167 | zone, ptr: String::New(cstr: "Null check operator used on a null value" ))); |
168 | Exceptions::ThrowByType(type: Exceptions::kType, arguments: args); |
169 | return; |
170 | } |
171 | |
172 | InvocationMirror::Kind kind = InvocationMirror::kMethod; |
173 | if (Field::IsGetterName(function_name: selector)) { |
174 | kind = InvocationMirror::kGetter; |
175 | } else if (Field::IsSetterName(function_name: selector)) { |
176 | kind = InvocationMirror::kSetter; |
177 | } |
178 | |
179 | const Smi& invocation_type = Smi::Handle( |
180 | zone, |
181 | ptr: Smi::New(value: InvocationMirror::EncodeType(level: InvocationMirror::kDynamic, kind))); |
182 | |
183 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 7)); |
184 | args.SetAt(0, /* instance */ Object::null_object()); |
185 | args.SetAt(1, selector); |
186 | args.SetAt(2, invocation_type); |
187 | args.SetAt(3, /* func_type_args_length */ Object::smi_zero()); |
188 | args.SetAt(4, /* func_type_args */ Object::null_object()); |
189 | args.SetAt(5, /* func_args */ Object::null_object()); |
190 | args.SetAt(6, /* func_arg_names */ Object::null_object()); |
191 | Exceptions::ThrowByType(type: Exceptions::kNoSuchMethod, arguments: args); |
192 | } |
193 | |
194 | static void DoThrowNullError(Isolate* isolate, |
195 | Thread* thread, |
196 | Zone* zone, |
197 | bool is_param) { |
198 | DartFrameIterator iterator(thread, |
199 | StackFrameIterator::kNoCrossThreadIteration); |
200 | const StackFrame* caller_frame = iterator.NextFrame(); |
201 | ASSERT(caller_frame->IsDartFrame()); |
202 | const Code& code = Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
203 | const uword pc_offset = caller_frame->pc() - code.PayloadStart(); |
204 | |
205 | if (FLAG_shared_slow_path_triggers_gc) { |
206 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
207 | } |
208 | |
209 | const CodeSourceMap& map = |
210 | CodeSourceMap::Handle(zone, ptr: code.code_source_map()); |
211 | String& member_name = String::Handle(zone); |
212 | if (!map.IsNull()) { |
213 | CodeSourceMapReader reader(map, Array::null_array(), |
214 | Function::null_function()); |
215 | const intptr_t name_index = reader.GetNullCheckNameIndexAt(pc_offset); |
216 | RELEASE_ASSERT(name_index >= 0); |
217 | |
218 | const ObjectPool& pool = ObjectPool::Handle(zone, ptr: code.GetObjectPool()); |
219 | member_name ^= pool.ObjectAt(name_index); |
220 | } else { |
221 | member_name = Symbols::OptimizedOut().ptr(); |
222 | } |
223 | |
224 | NullErrorHelper(zone, selector: member_name, is_param_name: is_param); |
225 | } |
226 | |
227 | DEFINE_RUNTIME_ENTRY(NullError, 0) { |
228 | DoThrowNullError(isolate, thread, zone, /*is_param=*/false); |
229 | } |
230 | |
231 | // Collects information about pointers within the top |kMaxSlotsCollected| |
232 | // slots on the stack. |
233 | // TODO(b/179632636) This code is added in attempt to better understand |
234 | // b/179632636 and should be removed in the future. |
235 | void ReportImpossibleNullError(intptr_t cid, |
236 | StackFrame* caller_frame, |
237 | Thread* thread) { |
238 | TextBuffer buffer(512); |
239 | buffer.Printf("hit null error with cid %" Pd ", caller context: " , cid); |
240 | |
241 | const intptr_t kMaxSlotsCollected = 5; |
242 | const auto slots = reinterpret_cast<ObjectPtr*>(caller_frame->sp()); |
243 | const intptr_t num_slots_in_frame = |
244 | reinterpret_cast<ObjectPtr*>(caller_frame->fp()) - slots; |
245 | const auto num_slots_to_collect = |
246 | Utils::Maximum(x: kMaxSlotsCollected, y: num_slots_in_frame); |
247 | bool comma = false; |
248 | for (intptr_t i = 0; i < num_slots_to_collect; i++) { |
249 | const ObjectPtr ptr = slots[i]; |
250 | buffer.Printf("%s[sp+%" Pd "] %" Pp "" , comma ? ", " : "" , i, |
251 | static_cast<uword>(ptr)); |
252 | if (ptr->IsHeapObject() && |
253 | (Dart::vm_isolate_group()->heap()->Contains( |
254 | addr: UntaggedObject::ToAddr(raw_obj: ptr)) || |
255 | thread->heap()->Contains(addr: UntaggedObject::ToAddr(raw_obj: ptr)))) { |
256 | buffer.Printf("(%" Pp ")" , static_cast<uword>(ptr->untag()->tags_)); |
257 | } |
258 | comma = true; |
259 | } |
260 | |
261 | const char* message = buffer.buffer(); |
262 | FATAL("%s" , message); |
263 | } |
264 | |
265 | DEFINE_RUNTIME_ENTRY(DispatchTableNullError, 1) { |
266 | const Smi& cid = Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
267 | if (cid.Value() != kNullCid) { |
268 | // We hit null error, but receiver is not null itself. This most likely |
269 | // is a memory corruption. Crash the VM but provide some additional |
270 | // information about the arguments on the stack. |
271 | DartFrameIterator iterator(thread, |
272 | StackFrameIterator::kNoCrossThreadIteration); |
273 | StackFrame* caller_frame = iterator.NextFrame(); |
274 | RELEASE_ASSERT(caller_frame->IsDartFrame()); |
275 | ReportImpossibleNullError(cid: cid.Value(), caller_frame, thread); |
276 | } |
277 | DoThrowNullError(isolate, thread, zone, /*is_param=*/false); |
278 | } |
279 | |
280 | DEFINE_RUNTIME_ENTRY(NullErrorWithSelector, 1) { |
281 | const String& selector = String::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
282 | NullErrorHelper(zone, selector); |
283 | } |
284 | |
285 | DEFINE_RUNTIME_ENTRY(NullCastError, 0) { |
286 | NullErrorHelper(zone, selector: String::null_string()); |
287 | } |
288 | |
289 | DEFINE_RUNTIME_ENTRY(ArgumentNullError, 0) { |
290 | DoThrowNullError(isolate, thread, zone, /*is_param=*/true); |
291 | } |
292 | |
293 | DEFINE_RUNTIME_ENTRY(ArgumentError, 1) { |
294 | const Instance& value = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
295 | Exceptions::ThrowArgumentError(arg: value); |
296 | } |
297 | |
298 | DEFINE_RUNTIME_ENTRY(ArgumentErrorUnboxedInt64, 0) { |
299 | // Unboxed value is passed through a dedicated slot in Thread. |
300 | int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg(); |
301 | const Integer& value = Integer::Handle(zone, ptr: Integer::New(value: unboxed_value)); |
302 | Exceptions::ThrowArgumentError(arg: value); |
303 | } |
304 | |
305 | DEFINE_RUNTIME_ENTRY(DoubleToInteger, 1) { |
306 | // Unboxed value is passed through a dedicated slot in Thread. |
307 | double val = arguments.thread()->unboxed_double_runtime_arg(); |
308 | const Smi& recognized_kind = Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
309 | switch (recognized_kind.Value()) { |
310 | case MethodRecognizer::kDoubleToInteger: |
311 | break; |
312 | case MethodRecognizer::kDoubleFloorToInt: |
313 | val = floor(val); |
314 | break; |
315 | case MethodRecognizer::kDoubleCeilToInt: |
316 | val = ceil(val); |
317 | break; |
318 | default: |
319 | UNREACHABLE(); |
320 | } |
321 | arguments.SetReturn(Integer::Handle(zone, ptr: DoubleToInteger(zone, val))); |
322 | } |
323 | |
324 | DEFINE_RUNTIME_ENTRY(IntegerDivisionByZeroException, 0) { |
325 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 0)); |
326 | Exceptions::ThrowByType(type: Exceptions::kIntegerDivisionByZeroException, arguments: args); |
327 | } |
328 | |
329 | static Heap::Space SpaceForRuntimeAllocation() { |
330 | return FLAG_stress_write_barrier_elimination ? Heap::kOld : Heap::kNew; |
331 | } |
332 | |
333 | // Allocation of a fixed length array of given element type. |
334 | // This runtime entry is never called for allocating a List of a generic type, |
335 | // because a prior run time call instantiates the element type if necessary. |
336 | // Arg0: array length. |
337 | // Arg1: array type arguments, i.e. vector of 1 type, the element type. |
338 | // Return value: newly allocated array of length arg0. |
339 | DEFINE_RUNTIME_ENTRY(AllocateArray, 2) { |
340 | const Instance& length = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
341 | if (!length.IsInteger()) { |
342 | // Throw: new ArgumentError.value(length, "length", "is not an integer"); |
343 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 3)); |
344 | args.SetAt(0, length); |
345 | args.SetAt(1, Symbols::Length()); |
346 | args.SetAt(2, String::Handle(zone, ptr: String::New(cstr: "is not an integer" ))); |
347 | Exceptions::ThrowByType(type: Exceptions::kArgumentValue, arguments: args); |
348 | } |
349 | const int64_t len = Integer::Cast(obj: length).AsInt64Value(); |
350 | if (len < 0) { |
351 | // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length"); |
352 | Exceptions::ThrowRangeError(argument_name: "length" , argument_value: Integer::Cast(obj: length), expected_from: 0, |
353 | expected_to: Array::kMaxElements); |
354 | } |
355 | if (len > Array::kMaxElements) { |
356 | Exceptions::ThrowOOM(); |
357 | } |
358 | |
359 | const Array& array = Array::Handle( |
360 | zone, |
361 | ptr: Array::New(len: static_cast<intptr_t>(len), space: SpaceForRuntimeAllocation())); |
362 | arguments.SetReturn(array); |
363 | TypeArguments& element_type = |
364 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
365 | // An Array is raw or takes one type argument. However, its type argument |
366 | // vector may be longer than 1 due to a type optimization reusing the type |
367 | // argument vector of the instantiator. |
368 | ASSERT(element_type.IsNull() || |
369 | (element_type.Length() >= 1 && element_type.IsInstantiated())); |
370 | array.SetTypeArguments(element_type); // May be null. |
371 | } |
372 | |
373 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateDouble, 0) { |
374 | if (FLAG_shared_slow_path_triggers_gc) { |
375 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
376 | } |
377 | arguments.SetReturn(Object::Handle(zone, ptr: Double::New(d: 0.0))); |
378 | } |
379 | |
380 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(BoxDouble, 0) { |
381 | const double val = thread->unboxed_double_runtime_arg(); |
382 | arguments.SetReturn(Object::Handle(zone, ptr: Double::New(d: val))); |
383 | } |
384 | |
385 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(BoxFloat32x4, 0) { |
386 | const auto val = thread->unboxed_simd128_runtime_arg(); |
387 | arguments.SetReturn(Object::Handle(zone, ptr: Float32x4::New(value: val))); |
388 | } |
389 | |
390 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(BoxFloat64x2, 0) { |
391 | const auto val = thread->unboxed_simd128_runtime_arg(); |
392 | arguments.SetReturn(Object::Handle(zone, ptr: Float64x2::New(value: val))); |
393 | } |
394 | |
395 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateMint, 0) { |
396 | if (FLAG_shared_slow_path_triggers_gc) { |
397 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
398 | } |
399 | arguments.SetReturn(Object::Handle(zone, ptr: Integer::New(value: kMaxInt64))); |
400 | } |
401 | |
402 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateFloat32x4, 0) { |
403 | if (FLAG_shared_slow_path_triggers_gc) { |
404 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
405 | } |
406 | arguments.SetReturn(Object::Handle(zone, ptr: Float32x4::New(value0: 0.0, value1: 0.0, value2: 0.0, value3: 0.0))); |
407 | } |
408 | |
409 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateFloat64x2, 0) { |
410 | if (FLAG_shared_slow_path_triggers_gc) { |
411 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
412 | } |
413 | arguments.SetReturn(Object::Handle(zone, ptr: Float64x2::New(value0: 0.0, value1: 0.0))); |
414 | } |
415 | |
416 | DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateInt32x4, 0) { |
417 | if (FLAG_shared_slow_path_triggers_gc) { |
418 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
419 | } |
420 | arguments.SetReturn(Object::Handle(zone, ptr: Int32x4::New(value0: 0, value1: 0, value2: 0, value3: 0))); |
421 | } |
422 | |
423 | // Allocate typed data array of given class id and length. |
424 | // Arg0: class id. |
425 | // Arg1: number of elements. |
426 | // Return value: newly allocated typed data array. |
427 | DEFINE_RUNTIME_ENTRY(AllocateTypedData, 2) { |
428 | const intptr_t cid = Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)).Value(); |
429 | const auto& length = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
430 | if (!length.IsInteger()) { |
431 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 1)); |
432 | args.SetAt(0, length); |
433 | Exceptions::ThrowByType(type: Exceptions::kArgument, arguments: args); |
434 | } |
435 | const int64_t len = Integer::Cast(obj: length).AsInt64Value(); |
436 | const intptr_t max = TypedData::MaxElements(class_id: cid); |
437 | if (len < 0) { |
438 | Exceptions::ThrowRangeError(argument_name: "length" , argument_value: Integer::Cast(obj: length), expected_from: 0, expected_to: max); |
439 | } else if (len > max) { |
440 | Exceptions::ThrowOOM(); |
441 | } |
442 | const auto& typed_data = |
443 | TypedData::Handle(zone, ptr: TypedData::New(class_id: cid, len: static_cast<intptr_t>(len))); |
444 | arguments.SetReturn(typed_data); |
445 | } |
446 | |
447 | // Helper returning the token position of the Dart caller. |
448 | static TokenPosition GetCallerLocation() { |
449 | DartFrameIterator iterator(Thread::Current(), |
450 | StackFrameIterator::kNoCrossThreadIteration); |
451 | StackFrame* caller_frame = iterator.NextFrame(); |
452 | ASSERT(caller_frame != nullptr); |
453 | return caller_frame->GetTokenPos(); |
454 | } |
455 | |
456 | // Result of an invoke may be an unhandled exception, in which case we |
457 | // rethrow it. |
458 | static void ThrowIfError(const Object& result) { |
459 | if (!result.IsNull() && result.IsError()) { |
460 | Exceptions::PropagateError(error: Error::Cast(obj: result)); |
461 | } |
462 | } |
463 | |
464 | // Allocate a new object. |
465 | // Arg0: class of the object that needs to be allocated. |
466 | // Arg1: type arguments of the object that needs to be allocated. |
467 | // Return value: newly allocated object. |
468 | DEFINE_RUNTIME_ENTRY(AllocateObject, 2) { |
469 | const Class& cls = Class::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
470 | ASSERT(cls.is_allocate_finalized()); |
471 | const Instance& instance = Instance::Handle( |
472 | zone, ptr: Instance::NewAlreadyFinalized(cls, space: SpaceForRuntimeAllocation())); |
473 | |
474 | arguments.SetReturn(instance); |
475 | if (cls.NumTypeArguments() == 0) { |
476 | // No type arguments required for a non-parameterized type. |
477 | ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull()); |
478 | } else { |
479 | const auto& type_arguments = |
480 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
481 | // Unless null (for a raw type), the type argument vector may be longer than |
482 | // necessary due to a type optimization reusing the type argument vector of |
483 | // the instantiator. |
484 | ASSERT(type_arguments.IsNull() || |
485 | (type_arguments.IsInstantiated() && |
486 | (type_arguments.Length() >= cls.NumTypeArguments()))); |
487 | instance.SetTypeArguments(type_arguments); |
488 | } |
489 | } |
490 | |
491 | DEFINE_LEAF_RUNTIME_ENTRY(uword /*ObjectPtr*/, |
492 | EnsureRememberedAndMarkingDeferred, |
493 | 2, |
494 | uword /*ObjectPtr*/ object_in, |
495 | Thread* thread) { |
496 | ObjectPtr object = static_cast<ObjectPtr>(object_in); |
497 | // The allocation stubs will call this leaf method for newly allocated |
498 | // old space objects. |
499 | RELEASE_ASSERT(object->IsOldObject()); |
500 | |
501 | // If we eliminate a generational write barriers on allocations of an object |
502 | // we need to ensure it's either a new-space object or it has been added to |
503 | // the remembered set. |
504 | // |
505 | // NOTE: We use reinterpret_cast<>() instead of ::RawCast() to avoid handle |
506 | // allocations in debug mode. Handle allocations in leaf runtimes can cause |
507 | // memory leaks because they will allocate into a handle scope from the next |
508 | // outermost runtime code (to which the generated Dart code might not return |
509 | // in a long time). |
510 | bool add_to_remembered_set = true; |
511 | if (object->untag()->IsRemembered()) { |
512 | // Objects must not be added to the remembered set twice because the |
513 | // scavenger's visitor is not idempotent. |
514 | // Might already be remembered because of type argument store in |
515 | // AllocateArray or any field in CloneContext. |
516 | add_to_remembered_set = false; |
517 | } else if (object->IsArray()) { |
518 | const intptr_t length = Array::LengthOf(array: static_cast<ArrayPtr>(object)); |
519 | add_to_remembered_set = |
520 | compiler::target::WillAllocateNewOrRememberedArray(length); |
521 | } else if (object->IsContext()) { |
522 | const intptr_t num_context_variables = |
523 | Context::NumVariables(context: static_cast<ContextPtr>(object)); |
524 | add_to_remembered_set = |
525 | compiler::target::WillAllocateNewOrRememberedContext( |
526 | num_context_variables); |
527 | } |
528 | |
529 | if (add_to_remembered_set) { |
530 | object->untag()->EnsureInRememberedSet(thread); |
531 | } |
532 | |
533 | // For incremental write barrier elimination, we need to ensure that the |
534 | // allocation ends up in the new space or else the object needs to added |
535 | // to deferred marking stack so it will be [re]scanned. |
536 | if (thread->is_marking()) { |
537 | thread->DeferredMarkingStackAddObject(obj: object); |
538 | } |
539 | |
540 | return static_cast<uword>(object); |
541 | } |
542 | END_LEAF_RUNTIME_ENTRY |
543 | |
544 | // Instantiate type. |
545 | // Arg0: uninstantiated type. |
546 | // Arg1: instantiator type arguments. |
547 | // Arg2: function type arguments. |
548 | // Return value: instantiated type. |
549 | DEFINE_RUNTIME_ENTRY(InstantiateType, 3) { |
550 | AbstractType& type = AbstractType::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
551 | const TypeArguments& instantiator_type_arguments = |
552 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
553 | const TypeArguments& function_type_arguments = |
554 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
555 | ASSERT(!type.IsNull()); |
556 | ASSERT(instantiator_type_arguments.IsNull() || |
557 | instantiator_type_arguments.IsInstantiated()); |
558 | ASSERT(function_type_arguments.IsNull() || |
559 | function_type_arguments.IsInstantiated()); |
560 | type = type.InstantiateFrom(instantiator_type_arguments, |
561 | function_type_arguments, num_free_fun_type_params: kAllFree, space: Heap::kOld); |
562 | ASSERT(!type.IsNull() && type.IsInstantiated()); |
563 | arguments.SetReturn(type); |
564 | } |
565 | |
566 | // Instantiate type arguments. |
567 | // Arg0: uninstantiated type arguments. |
568 | // Arg1: instantiator type arguments. |
569 | // Arg2: function type arguments. |
570 | // Return value: instantiated type arguments. |
571 | DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) { |
572 | TypeArguments& type_arguments = |
573 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
574 | const TypeArguments& instantiator_type_arguments = |
575 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
576 | const TypeArguments& function_type_arguments = |
577 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
578 | ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated()); |
579 | ASSERT(instantiator_type_arguments.IsNull() || |
580 | instantiator_type_arguments.IsInstantiated()); |
581 | ASSERT(function_type_arguments.IsNull() || |
582 | function_type_arguments.IsInstantiated()); |
583 | // Code inlined in the caller should have optimized the case where the |
584 | // instantiator can be reused as type argument vector. |
585 | ASSERT(!type_arguments.IsUninstantiatedIdentity()); |
586 | type_arguments = type_arguments.InstantiateAndCanonicalizeFrom( |
587 | instantiator_type_arguments, function_type_arguments); |
588 | ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated()); |
589 | arguments.SetReturn(type_arguments); |
590 | } |
591 | |
592 | // Helper routine for tracing a subtype check. |
593 | static void PrintSubtypeCheck(const AbstractType& subtype, |
594 | const AbstractType& supertype, |
595 | const bool result) { |
596 | DartFrameIterator iterator(Thread::Current(), |
597 | StackFrameIterator::kNoCrossThreadIteration); |
598 | StackFrame* caller_frame = iterator.NextFrame(); |
599 | ASSERT(caller_frame != nullptr); |
600 | |
601 | LogBlock lb; |
602 | THR_Print("SubtypeCheck: '%s' %d %s '%s' %d (pc: %#" Px ").\n" , |
603 | String::Handle(subtype.Name()).ToCString(), subtype.type_class_id(), |
604 | result ? "is" : "is !" , |
605 | String::Handle(supertype.Name()).ToCString(), |
606 | supertype.type_class_id(), caller_frame->pc()); |
607 | |
608 | const Function& function = |
609 | Function::Handle(ptr: caller_frame->LookupDartFunction()); |
610 | if (function.HasSavedArgumentsDescriptor()) { |
611 | const auto& args_desc_array = Array::Handle(ptr: function.saved_args_desc()); |
612 | const ArgumentsDescriptor args_desc(args_desc_array); |
613 | THR_Print(" -> Function %s [%s]\n" , function.ToFullyQualifiedCString(), |
614 | args_desc.ToCString()); |
615 | } else { |
616 | THR_Print(" -> Function %s\n" , function.ToFullyQualifiedCString()); |
617 | } |
618 | } |
619 | |
620 | // Instantiate type. |
621 | // Arg0: instantiator type arguments |
622 | // Arg1: function type arguments |
623 | // Arg2: type to be a subtype of the other |
624 | // Arg3: type to be a supertype of the other |
625 | // Arg4: variable name of the subtype parameter |
626 | // No return value. |
627 | DEFINE_RUNTIME_ENTRY(SubtypeCheck, 5) { |
628 | const TypeArguments& instantiator_type_args = |
629 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
630 | const TypeArguments& function_type_args = |
631 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
632 | AbstractType& subtype = AbstractType::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
633 | AbstractType& supertype = |
634 | AbstractType::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
635 | const String& dst_name = String::CheckedHandle(zone, ptr: arguments.ArgAt(index: 4)); |
636 | |
637 | ASSERT(!supertype.IsNull()); |
638 | ASSERT(!subtype.IsNull()); |
639 | |
640 | // Now that AssertSubtype may be checking types only available at runtime, |
641 | // we can't guarantee the supertype isn't the top type. |
642 | if (supertype.IsTopTypeForSubtyping()) return; |
643 | |
644 | // The supertype or subtype may not be instantiated. |
645 | if (AbstractType::InstantiateAndTestSubtype( |
646 | subtype: &subtype, supertype: &supertype, instantiator_type_args, function_type_args)) { |
647 | if (FLAG_trace_type_checks) { |
648 | // The supertype and subtype are now instantiated. Subtype check passed. |
649 | PrintSubtypeCheck(subtype, supertype, result: true); |
650 | } |
651 | return; |
652 | } |
653 | if (FLAG_trace_type_checks) { |
654 | // The supertype and subtype are now instantiated. Subtype check failed. |
655 | PrintSubtypeCheck(subtype, supertype, result: false); |
656 | } |
657 | |
658 | // Throw a dynamic type error. |
659 | const TokenPosition location = GetCallerLocation(); |
660 | Exceptions::CreateAndThrowTypeError(location, src_type: subtype, dst_type: supertype, dst_name); |
661 | UNREACHABLE(); |
662 | } |
663 | |
664 | // Allocate a new closure and initializes its function and context fields with |
665 | // the arguments and all other fields to null. |
666 | // Arg0: function. |
667 | // Arg1: context. |
668 | // Return value: newly allocated closure. |
669 | DEFINE_RUNTIME_ENTRY(AllocateClosure, 2) { |
670 | const auto& function = Function::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
671 | const auto& context = Context::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
672 | const Closure& closure = Closure::Handle( |
673 | zone, |
674 | ptr: Closure::New(instantiator_type_arguments: Object::null_type_arguments(), function_type_arguments: Object::null_type_arguments(), |
675 | delayed_type_arguments: Object::null_type_arguments(), function, context, |
676 | space: SpaceForRuntimeAllocation())); |
677 | arguments.SetReturn(closure); |
678 | } |
679 | |
680 | // Allocate a new context large enough to hold the given number of variables. |
681 | // Arg0: number of variables. |
682 | // Return value: newly allocated context. |
683 | DEFINE_RUNTIME_ENTRY(AllocateContext, 1) { |
684 | const Smi& num_variables = Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
685 | const Context& context = Context::Handle( |
686 | zone, ptr: Context::New(num_variables: num_variables.Value(), space: SpaceForRuntimeAllocation())); |
687 | arguments.SetReturn(context); |
688 | } |
689 | |
690 | // Make a copy of the given context, including the values of the captured |
691 | // variables. |
692 | // Arg0: the context to be cloned. |
693 | // Return value: newly allocated context. |
694 | DEFINE_RUNTIME_ENTRY(CloneContext, 1) { |
695 | const Context& ctx = Context::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
696 | Context& cloned_ctx = Context::Handle( |
697 | zone, ptr: Context::New(num_variables: ctx.num_variables(), space: SpaceForRuntimeAllocation())); |
698 | cloned_ctx.set_parent(Context::Handle(zone, ptr: ctx.parent())); |
699 | Object& inst = Object::Handle(zone); |
700 | for (int i = 0; i < ctx.num_variables(); i++) { |
701 | inst = ctx.At(context_index: i); |
702 | cloned_ctx.SetAt(index: i, value: inst); |
703 | } |
704 | arguments.SetReturn(cloned_ctx); |
705 | } |
706 | |
707 | // Allocate a new record instance. |
708 | // Arg0: record shape id. |
709 | // Return value: newly allocated record. |
710 | DEFINE_RUNTIME_ENTRY(AllocateRecord, 1) { |
711 | const RecordShape shape(Smi::RawCast(raw: arguments.ArgAt(index: 0))); |
712 | const Record& record = |
713 | Record::Handle(zone, ptr: Record::New(shape, space: SpaceForRuntimeAllocation())); |
714 | arguments.SetReturn(record); |
715 | } |
716 | |
717 | // Allocate a new small record instance and initialize its fields. |
718 | // Arg0: record shape id. |
719 | // Arg1-Arg3: field values. |
720 | // Return value: newly allocated record. |
721 | DEFINE_RUNTIME_ENTRY(AllocateSmallRecord, 4) { |
722 | const RecordShape shape(Smi::RawCast(raw: arguments.ArgAt(index: 0))); |
723 | const auto& value0 = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
724 | const auto& value1 = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
725 | const auto& value2 = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
726 | const Record& record = |
727 | Record::Handle(zone, ptr: Record::New(shape, space: SpaceForRuntimeAllocation())); |
728 | const intptr_t num_fields = shape.num_fields(); |
729 | ASSERT(num_fields == 2 || num_fields == 3); |
730 | record.SetFieldAt(field_index: 0, value: value0); |
731 | record.SetFieldAt(field_index: 1, value: value1); |
732 | if (num_fields > 2) { |
733 | record.SetFieldAt(field_index: 2, value: value2); |
734 | } |
735 | arguments.SetReturn(record); |
736 | } |
737 | |
738 | // Allocate a SuspendState object. |
739 | // Arg0: frame size. |
740 | // Arg1: existing SuspendState object or function data. |
741 | // Return value: newly allocated object. |
742 | DEFINE_RUNTIME_ENTRY(AllocateSuspendState, 2) { |
743 | const intptr_t frame_size = |
744 | Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)).Value(); |
745 | const Object& previous_state = Object::Handle(zone, ptr: arguments.ArgAt(index: 1)); |
746 | SuspendState& result = SuspendState::Handle(zone); |
747 | if (previous_state.IsSuspendState()) { |
748 | const auto& suspend_state = SuspendState::Cast(obj: previous_state); |
749 | const auto& function_data = |
750 | Instance::Handle(zone, ptr: suspend_state.function_data()); |
751 | ObjectStore* object_store = thread->isolate_group()->object_store(); |
752 | if (function_data.GetClassId() == |
753 | Class::Handle(zone, ptr: object_store->async_star_stream_controller()) |
754 | .id()) { |
755 | // Reset _AsyncStarStreamController.asyncStarBody to null in order |
756 | // to create a new callback closure during next yield. |
757 | // The new callback closure will capture the reallocated SuspendState. |
758 | function_data.SetField( |
759 | field: Field::Handle( |
760 | zone, |
761 | ptr: object_store->async_star_stream_controller_async_star_body()), |
762 | value: Object::null_object()); |
763 | } |
764 | result = SuspendState::New(frame_size, function_data, |
765 | space: SpaceForRuntimeAllocation()); |
766 | if (function_data.GetClassId() == |
767 | Class::Handle(zone, ptr: object_store->sync_star_iterator_class()).id()) { |
768 | // Refresh _SyncStarIterator._state with the new SuspendState object. |
769 | function_data.SetField( |
770 | field: Field::Handle(zone, ptr: object_store->sync_star_iterator_state()), |
771 | value: result); |
772 | } |
773 | } else { |
774 | result = SuspendState::New(frame_size, function_data: Instance::Cast(obj: previous_state), |
775 | space: SpaceForRuntimeAllocation()); |
776 | } |
777 | arguments.SetReturn(result); |
778 | } |
779 | |
780 | // Makes a copy of the given SuspendState object, including the payload frame. |
781 | // Arg0: the SuspendState object to be cloned. |
782 | // Return value: newly allocated object. |
783 | DEFINE_RUNTIME_ENTRY(CloneSuspendState, 1) { |
784 | const SuspendState& src = |
785 | SuspendState::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
786 | const SuspendState& dst = SuspendState::Handle( |
787 | zone, ptr: SuspendState::Clone(thread, src, space: SpaceForRuntimeAllocation())); |
788 | arguments.SetReturn(dst); |
789 | } |
790 | |
791 | // Helper routine for tracing a type check. |
792 | static void PrintTypeCheck(const char* message, |
793 | const Instance& instance, |
794 | const AbstractType& type, |
795 | const TypeArguments& instantiator_type_arguments, |
796 | const TypeArguments& function_type_arguments, |
797 | const Bool& result) { |
798 | DartFrameIterator iterator(Thread::Current(), |
799 | StackFrameIterator::kNoCrossThreadIteration); |
800 | StackFrame* caller_frame = iterator.NextFrame(); |
801 | ASSERT(caller_frame != nullptr); |
802 | |
803 | const AbstractType& instance_type = |
804 | AbstractType::Handle(ptr: instance.GetType(space: Heap::kNew)); |
805 | ASSERT(instance_type.IsInstantiated() || |
806 | (instance.IsClosure() && instance_type.IsInstantiated(kCurrentClass))); |
807 | LogBlock lb; |
808 | if (type.IsInstantiated()) { |
809 | THR_Print("%s: '%s' %d %s '%s' %d (pc: %#" Px ").\n" , message, |
810 | String::Handle(instance_type.Name()).ToCString(), |
811 | instance_type.type_class_id(), |
812 | (result.ptr() == Bool::True().ptr()) ? "is" : "is !" , |
813 | String::Handle(type.Name()).ToCString(), type.type_class_id(), |
814 | caller_frame->pc()); |
815 | } else { |
816 | // Instantiate type before printing. |
817 | const AbstractType& instantiated_type = AbstractType::Handle( |
818 | ptr: type.InstantiateFrom(instantiator_type_arguments, |
819 | function_type_arguments, num_free_fun_type_params: kAllFree, space: Heap::kOld)); |
820 | THR_Print("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n" , |
821 | message, String::Handle(instance_type.Name()).ToCString(), |
822 | (result.ptr() == Bool::True().ptr()) ? "is" : "is !" , |
823 | String::Handle(instantiated_type.Name()).ToCString(), |
824 | String::Handle(type.Name()).ToCString(), caller_frame->pc()); |
825 | } |
826 | const Function& function = |
827 | Function::Handle(ptr: caller_frame->LookupDartFunction()); |
828 | if (function.HasSavedArgumentsDescriptor()) { |
829 | const auto& args_desc_array = Array::Handle(ptr: function.saved_args_desc()); |
830 | const ArgumentsDescriptor args_desc(args_desc_array); |
831 | THR_Print(" -> Function %s [%s]\n" , function.ToFullyQualifiedCString(), |
832 | args_desc.ToCString()); |
833 | } else { |
834 | THR_Print(" -> Function %s\n" , function.ToFullyQualifiedCString()); |
835 | } |
836 | } |
837 | |
838 | #if defined(TARGET_ARCH_IA32) |
839 | static BoolPtr CheckHashBasedSubtypeTestCache( |
840 | Zone* zone, |
841 | Thread* thread, |
842 | const Instance& instance, |
843 | const AbstractType& destination_type, |
844 | const TypeArguments& instantiator_type_arguments, |
845 | const TypeArguments& function_type_arguments, |
846 | const SubtypeTestCache& cache) { |
847 | ASSERT(cache.IsHash()); |
848 | // Record instances are not added to the cache as they don't have a valid |
849 | // key (type of a record depends on types of all its fields). |
850 | if (instance.IsRecord()) return Bool::null(); |
851 | Class& instance_class = Class::Handle(zone); |
852 | if (instance.IsSmi()) { |
853 | instance_class = Smi::Class(); |
854 | } else { |
855 | instance_class = instance.clazz(); |
856 | } |
857 | // If the type is uninstantiated and refers to parent function type |
858 | // parameters, the function_type_arguments have been canonicalized |
859 | // when concatenated. |
860 | auto& instance_class_id_or_signature = Object::Handle(zone); |
861 | auto& instance_type_arguments = TypeArguments::Handle(zone); |
862 | auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone); |
863 | auto& instance_delayed_type_arguments = TypeArguments::Handle(zone); |
864 | if (instance_class.IsClosureClass()) { |
865 | const auto& closure = Closure::Cast(instance); |
866 | const auto& function = Function::Handle(zone, closure.function()); |
867 | instance_class_id_or_signature = function.signature(); |
868 | instance_type_arguments = closure.instantiator_type_arguments(); |
869 | instance_parent_function_type_arguments = closure.function_type_arguments(); |
870 | instance_delayed_type_arguments = closure.delayed_type_arguments(); |
871 | } else { |
872 | instance_class_id_or_signature = Smi::New(instance_class.id()); |
873 | if (instance_class.NumTypeArguments() > 0) { |
874 | instance_type_arguments = instance.GetTypeArguments(); |
875 | } |
876 | } |
877 | |
878 | intptr_t index = -1; |
879 | auto& result = Bool::Handle(zone); |
880 | if (cache.HasCheck(instance_class_id_or_signature, destination_type, |
881 | instance_type_arguments, instantiator_type_arguments, |
882 | function_type_arguments, |
883 | instance_parent_function_type_arguments, |
884 | instance_delayed_type_arguments, &index, &result)) { |
885 | return result.ptr(); |
886 | } |
887 | |
888 | return Bool::null(); |
889 | } |
890 | #endif // defined(TARGET_ARCH_IA32) |
891 | |
892 | // This updates the type test cache, an array containing 8 elements: |
893 | // - instance class (or function if the instance is a closure) |
894 | // - instance type arguments (null if the instance class is not generic) |
895 | // - instantiator type arguments (null if the type is instantiated) |
896 | // - function type arguments (null if the type is instantiated) |
897 | // - instance parent function type arguments (null if instance is not a closure) |
898 | // - instance delayed type arguments (null if instance is not a closure) |
899 | // - destination type (null if the type was known at compile time) |
900 | // - test result |
901 | // It can be applied to classes with type arguments in which case it contains |
902 | // just the result of the class subtype test, not including the evaluation of |
903 | // type arguments. |
904 | // This operation is currently very slow (lookup of code is not efficient yet). |
905 | static void UpdateTypeTestCache( |
906 | Zone* zone, |
907 | Thread* thread, |
908 | const Instance& instance, |
909 | const AbstractType& destination_type, |
910 | const TypeArguments& instantiator_type_arguments, |
911 | const TypeArguments& function_type_arguments, |
912 | const Bool& result, |
913 | const SubtypeTestCache& new_cache) { |
914 | ASSERT(!new_cache.IsNull()); |
915 | ASSERT(destination_type.IsCanonical()); |
916 | ASSERT(instantiator_type_arguments.IsCanonical()); |
917 | ASSERT(function_type_arguments.IsCanonical()); |
918 | if (instance.IsRecord()) { |
919 | // Do not add record instances to cache as they don't have a valid |
920 | // key (type of a record depends on types of all its fields). |
921 | if (FLAG_trace_type_checks) { |
922 | THR_Print("Not updating subtype test cache for the record instance.\n" ); |
923 | } |
924 | return; |
925 | } |
926 | Class& instance_class = Class::Handle(zone); |
927 | if (instance.IsSmi()) { |
928 | instance_class = Smi::Class(); |
929 | } else { |
930 | instance_class = instance.clazz(); |
931 | } |
932 | // If the type is uninstantiated and refers to parent function type |
933 | // parameters, the function_type_arguments have been canonicalized |
934 | // when concatenated. |
935 | auto& instance_class_id_or_signature = Object::Handle(zone); |
936 | auto& instance_type_arguments = TypeArguments::Handle(zone); |
937 | auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone); |
938 | auto& instance_delayed_type_arguments = TypeArguments::Handle(zone); |
939 | if (instance_class.IsClosureClass()) { |
940 | const auto& closure = Closure::Cast(obj: instance); |
941 | const auto& function = Function::Handle(zone, ptr: closure.function()); |
942 | instance_class_id_or_signature = function.signature(); |
943 | ASSERT(instance_class_id_or_signature.IsFunctionType()); |
944 | instance_type_arguments = closure.instantiator_type_arguments(); |
945 | instance_parent_function_type_arguments = closure.function_type_arguments(); |
946 | instance_delayed_type_arguments = closure.delayed_type_arguments(); |
947 | ASSERT(instance_class_id_or_signature.IsCanonical()); |
948 | ASSERT(instance_type_arguments.IsCanonical()); |
949 | ASSERT(instance_parent_function_type_arguments.IsCanonical()); |
950 | ASSERT(instance_delayed_type_arguments.IsCanonical()); |
951 | } else { |
952 | instance_class_id_or_signature = Smi::New(value: instance_class.id()); |
953 | if (instance_class.NumTypeArguments() > 0) { |
954 | instance_type_arguments = instance.GetTypeArguments(); |
955 | ASSERT(instance_type_arguments.IsCanonical()); |
956 | } |
957 | } |
958 | if (FLAG_trace_type_checks) { |
959 | const auto& instance_class_name = |
960 | String::Handle(zone, ptr: instance_class.Name()); |
961 | TextBuffer buffer(256); |
962 | buffer.Printf(" Updating test cache %#" Px " with result %s for:\n" , |
963 | static_cast<uword>(new_cache.ptr()), result.ToCString()); |
964 | if (instance.IsString()) { |
965 | buffer.Printf(format: " instance: '%s'\n" , instance.ToCString()); |
966 | } else { |
967 | buffer.Printf(format: " instance: %s\n" , instance.ToCString()); |
968 | } |
969 | buffer.Printf(" class: %s (%" Pd ")\n" , instance_class_name.ToCString(), |
970 | instance_class.id()); |
971 | buffer.Printf( |
972 | " raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px |
973 | ", %#" Px ", %#" Px ", %#" Px " ]\n" , |
974 | static_cast<uword>(instance_class_id_or_signature.ptr()), |
975 | static_cast<uword>(instance_type_arguments.ptr()), |
976 | static_cast<uword>(instantiator_type_arguments.ptr()), |
977 | static_cast<uword>(function_type_arguments.ptr()), |
978 | static_cast<uword>(instance_parent_function_type_arguments.ptr()), |
979 | static_cast<uword>(instance_delayed_type_arguments.ptr()), |
980 | static_cast<uword>(destination_type.ptr()), |
981 | static_cast<uword>(result.ptr())); |
982 | THR_Print("%s" , buffer.buffer()); |
983 | } |
984 | { |
985 | SafepointMutexLocker ml( |
986 | thread->isolate_group()->subtype_test_cache_mutex()); |
987 | const intptr_t len = new_cache.NumberOfChecks(); |
988 | if (len >= FLAG_max_subtype_cache_entries) { |
989 | if (FLAG_trace_type_checks) { |
990 | THR_Print("Not updating subtype test cache as its length reached %d\n" , |
991 | FLAG_max_subtype_cache_entries); |
992 | } |
993 | return; |
994 | } |
995 | intptr_t colliding_index = -1; |
996 | auto& old_result = Bool::Handle(zone); |
997 | if (new_cache.HasCheck( |
998 | instance_class_id_or_signature, destination_type, |
999 | instance_type_arguments, instantiator_type_arguments, |
1000 | function_type_arguments, instance_parent_function_type_arguments, |
1001 | instance_delayed_type_arguments, index: &colliding_index, result: &old_result)) { |
1002 | if (FLAG_trace_type_checks) { |
1003 | TextBuffer buffer(256); |
1004 | buffer.Printf(" Collision for test cache %#" Px " at index %" Pd ":\n" , |
1005 | static_cast<uword>(new_cache.ptr()), colliding_index); |
1006 | buffer.Printf(format: " entry: " ); |
1007 | new_cache.WriteEntryToBuffer(zone, buffer: &buffer, index: colliding_index, line_prefix: " " ); |
1008 | THR_Print("%s\n" , buffer.buffer()); |
1009 | } |
1010 | if (old_result.ptr() != result.ptr()) { |
1011 | FATAL("Existing subtype test cache entry has result %s, not %s" , |
1012 | old_result.ToCString(), result.ToCString()); |
1013 | } |
1014 | // Some other isolate might have updated the cache between entry was |
1015 | // found missing and now. |
1016 | return; |
1017 | } |
1018 | const intptr_t new_index = new_cache.AddCheck( |
1019 | instance_class_id_or_signature, destination_type, |
1020 | instance_type_arguments, instantiator_type_arguments, |
1021 | function_type_arguments, instance_parent_function_type_arguments, |
1022 | instance_delayed_type_arguments, test_result: result); |
1023 | if (FLAG_trace_type_checks) { |
1024 | TextBuffer buffer(256); |
1025 | buffer.Printf(" Added new entry to test cache %#" Px " at index %" Pd |
1026 | ":\n" , |
1027 | static_cast<uword>(new_cache.ptr()), new_index); |
1028 | buffer.Printf(format: " new entry: " ); |
1029 | new_cache.WriteEntryToBuffer(zone, buffer: &buffer, index: new_index, line_prefix: " " ); |
1030 | THR_Print("%s\n" , buffer.buffer()); |
1031 | } |
1032 | } |
1033 | } |
1034 | |
1035 | // Check that the given instance is an instance of the given type. |
1036 | // Tested instance may be null, because a null test cannot always be inlined, |
1037 | // e.g 'null is T' yields true if T = Null, but false if T = bool. |
1038 | // Arg0: instance being checked. |
1039 | // Arg1: type. |
1040 | // Arg2: type arguments of the instantiator of the type. |
1041 | // Arg3: type arguments of the function of the type. |
1042 | // Arg4: SubtypeTestCache. |
1043 | // Return value: true or false. |
1044 | DEFINE_RUNTIME_ENTRY(Instanceof, 5) { |
1045 | const Instance& instance = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1046 | const AbstractType& type = |
1047 | AbstractType::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
1048 | const TypeArguments& instantiator_type_arguments = |
1049 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
1050 | const TypeArguments& function_type_arguments = |
1051 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
1052 | const SubtypeTestCache& cache = |
1053 | SubtypeTestCache::CheckedHandle(zone, ptr: arguments.ArgAt(index: 4)); |
1054 | ASSERT(type.IsFinalized()); |
1055 | ASSERT(!type.IsDynamicType()); // No need to check assignment. |
1056 | ASSERT(!cache.IsNull()); |
1057 | #if defined(TARGET_ARCH_IA32) |
1058 | // Hash-based caches are still not handled by the stubs on IA32. |
1059 | if (cache.IsHash()) { |
1060 | const auto& result = Bool::Handle( |
1061 | zone, CheckHashBasedSubtypeTestCache(zone, thread, instance, type, |
1062 | instantiator_type_arguments, |
1063 | function_type_arguments, cache)); |
1064 | if (!result.IsNull()) { |
1065 | // Early exit because an entry already exists in the cache. |
1066 | arguments.SetReturn(result); |
1067 | return; |
1068 | } |
1069 | } |
1070 | #endif // defined(TARGET_ARCH_IA32) |
1071 | const Bool& result = Bool::Get(value: instance.IsInstanceOf( |
1072 | other: type, other_instantiator_type_arguments: instantiator_type_arguments, other_function_type_arguments: function_type_arguments)); |
1073 | if (FLAG_trace_type_checks) { |
1074 | PrintTypeCheck(message: "InstanceOf" , instance, type, instantiator_type_arguments, |
1075 | function_type_arguments, result); |
1076 | } |
1077 | UpdateTypeTestCache(zone, thread, instance, destination_type: type, instantiator_type_arguments, |
1078 | function_type_arguments, result, new_cache: cache); |
1079 | arguments.SetReturn(result); |
1080 | } |
1081 | |
1082 | #if defined(TESTING) |
1083 | // Used only in type_testing_stubs_test.cc. If DRT_TypeCheck is entered, then |
1084 | // this flag is set to true. |
1085 | bool TESTING_runtime_entered_on_TTS_invocation = false; |
1086 | #endif |
1087 | |
1088 | // Check that the type of the given instance is a subtype of the given type and |
1089 | // can therefore be assigned. |
1090 | // Tested instance may not be null, because a null test is always inlined. |
1091 | // Arg0: instance being assigned. |
1092 | // Arg1: type being assigned to. |
1093 | // Arg2: type arguments of the instantiator of the type being assigned to. |
1094 | // Arg3: type arguments of the function of the type being assigned to. |
1095 | // Arg4: name of variable being assigned to. |
1096 | // Arg5: SubtypeTestCache. |
1097 | // Arg6: invocation mode (see TypeCheckMode) |
1098 | // Return value: instance if a subtype, otherwise throw a TypeError. |
1099 | DEFINE_RUNTIME_ENTRY(TypeCheck, 7) { |
1100 | const Instance& src_instance = |
1101 | Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1102 | const AbstractType& dst_type = |
1103 | AbstractType::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
1104 | const TypeArguments& instantiator_type_arguments = |
1105 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
1106 | const TypeArguments& function_type_arguments = |
1107 | TypeArguments::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
1108 | String& dst_name = String::Handle(zone); |
1109 | dst_name ^= arguments.ArgAt(index: 4); |
1110 | ASSERT(dst_name.IsNull() || dst_name.IsString()); |
1111 | |
1112 | SubtypeTestCache& cache = SubtypeTestCache::Handle(zone); |
1113 | cache ^= arguments.ArgAt(index: 5); |
1114 | ASSERT(cache.IsNull() || cache.IsSubtypeTestCache()); |
1115 | |
1116 | const TypeCheckMode mode = static_cast<TypeCheckMode>( |
1117 | Smi::CheckedHandle(zone, ptr: arguments.ArgAt(index: 6)).Value()); |
1118 | |
1119 | #if defined(TESTING) |
1120 | TESTING_runtime_entered_on_TTS_invocation = true; |
1121 | #endif |
1122 | |
1123 | #if defined(TARGET_ARCH_IA32) |
1124 | ASSERT(mode == kTypeCheckFromInline); |
1125 | // Hash-based caches are still not handled by the stubs on IA32. |
1126 | if (cache.IsHash()) { |
1127 | const auto& result = Bool::Handle( |
1128 | zone, CheckHashBasedSubtypeTestCache( |
1129 | zone, thread, src_instance, dst_type, |
1130 | instantiator_type_arguments, function_type_arguments, cache)); |
1131 | if (!result.IsNull()) { |
1132 | // Early exit because an entry already exists in the cache. |
1133 | arguments.SetReturn(result); |
1134 | return; |
1135 | } |
1136 | } |
1137 | #endif // defined(TARGET_ARCH_IA32) |
1138 | |
1139 | // These are guaranteed on the calling side. |
1140 | ASSERT(!dst_type.IsDynamicType()); |
1141 | ASSERT(!src_instance.IsNull() || |
1142 | isolate->group()->use_strict_null_safety_checks()); |
1143 | |
1144 | const bool is_instance_of = src_instance.IsAssignableTo( |
1145 | other: dst_type, other_instantiator_type_arguments: instantiator_type_arguments, other_function_type_arguments: function_type_arguments); |
1146 | |
1147 | if (FLAG_trace_type_checks) { |
1148 | PrintTypeCheck(message: "TypeCheck" , instance: src_instance, type: dst_type, |
1149 | instantiator_type_arguments, function_type_arguments, |
1150 | result: Bool::Get(value: is_instance_of)); |
1151 | } |
1152 | |
1153 | // Most paths through this runtime entry don't need to know what the |
1154 | // destination name was or if this was a dynamic assert assignable call, |
1155 | // so only walk the stack to find the stored destination name when necessary. |
1156 | auto resolve_dst_name = [&]() { |
1157 | if (!dst_name.IsNull()) return; |
1158 | #if !defined(TARGET_ARCH_IA32) |
1159 | // Can only come here from type testing stub. |
1160 | ASSERT(mode != kTypeCheckFromInline); |
1161 | |
1162 | // Grab the [dst_name] from the pool. It's stored at one pool slot after |
1163 | // the subtype-test-cache. |
1164 | DartFrameIterator iterator(thread, |
1165 | StackFrameIterator::kNoCrossThreadIteration); |
1166 | StackFrame* caller_frame = iterator.NextFrame(); |
1167 | const Code& caller_code = |
1168 | Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
1169 | const ObjectPool& pool = |
1170 | ObjectPool::Handle(zone, ptr: caller_code.GetObjectPool()); |
1171 | TypeTestingStubCallPattern tts_pattern(caller_frame->pc()); |
1172 | const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex(); |
1173 | const intptr_t dst_name_idx = stc_pool_idx + 1; |
1174 | dst_name ^= pool.ObjectAt(dst_name_idx); |
1175 | #else |
1176 | UNREACHABLE(); |
1177 | #endif |
1178 | }; |
1179 | |
1180 | if (!is_instance_of) { |
1181 | resolve_dst_name(); |
1182 | if (dst_name.ptr() == |
1183 | Symbols::dynamic_assert_assignable_stc_check().ptr()) { |
1184 | #if !defined(TARGET_ARCH_IA32) |
1185 | // Can only come here from type testing stub via dynamic AssertAssignable. |
1186 | ASSERT(mode != kTypeCheckFromInline); |
1187 | #endif |
1188 | // This was a dynamic closure call where the destination name was not |
1189 | // known at compile-time. Thus, fetch the original arguments and arguments |
1190 | // descriptor and re-do the type check in the runtime, which causes the |
1191 | // error with the proper destination name to be thrown. |
1192 | DartFrameIterator iterator(thread, |
1193 | StackFrameIterator::kNoCrossThreadIteration); |
1194 | StackFrame* caller_frame = iterator.NextFrame(); |
1195 | const auto& dispatcher = |
1196 | Function::Handle(zone, ptr: caller_frame->LookupDartFunction()); |
1197 | ASSERT(dispatcher.IsInvokeFieldDispatcher()); |
1198 | const auto& orig_arguments_desc = |
1199 | Array::Handle(zone, ptr: dispatcher.saved_args_desc()); |
1200 | const ArgumentsDescriptor args_desc(orig_arguments_desc); |
1201 | const intptr_t arg_count = args_desc.CountWithTypeArgs(); |
1202 | const auto& orig_arguments = Array::Handle(zone, ptr: Array::New(len: arg_count)); |
1203 | auto& obj = Object::Handle(zone); |
1204 | for (intptr_t i = 0; i < arg_count; i++) { |
1205 | obj = *reinterpret_cast<ObjectPtr*>( |
1206 | ParamAddress(fp: caller_frame->fp(), reverse_index: arg_count - i)); |
1207 | orig_arguments.SetAt(i, obj); |
1208 | } |
1209 | const auto& receiver = Closure::CheckedHandle( |
1210 | zone, ptr: orig_arguments.At(args_desc.FirstArgIndex())); |
1211 | const auto& function = Function::Handle(zone, receiver.function()); |
1212 | const auto& result = Object::Handle( |
1213 | zone, function.DoArgumentTypesMatch(orig_arguments, args_desc)); |
1214 | if (result.IsError()) { |
1215 | Exceptions::PropagateError(Error::Cast(result)); |
1216 | } |
1217 | // IsAssignableTo returned false, so we should have thrown a type |
1218 | // error in DoArgumentsTypesMatch. |
1219 | UNREACHABLE(); |
1220 | } |
1221 | |
1222 | ASSERT(!dst_name.IsNull()); |
1223 | // Throw a dynamic type error. |
1224 | const TokenPosition location = GetCallerLocation(); |
1225 | const auto& src_type = |
1226 | AbstractType::Handle(zone, ptr: src_instance.GetType(space: Heap::kNew)); |
1227 | auto& reported_type = AbstractType::Handle(zone, ptr: dst_type.ptr()); |
1228 | if (!reported_type.IsInstantiated()) { |
1229 | // Instantiate dst_type before reporting the error. |
1230 | reported_type = reported_type.InstantiateFrom(instantiator_type_arguments, |
1231 | function_type_arguments, |
1232 | num_free_fun_type_params: kAllFree, space: Heap::kNew); |
1233 | } |
1234 | Exceptions::CreateAndThrowTypeError(location, src_type, dst_type: reported_type, |
1235 | dst_name); |
1236 | UNREACHABLE(); |
1237 | } |
1238 | |
1239 | bool should_update_cache = true; |
1240 | #if !defined(TARGET_ARCH_IA32) |
1241 | bool would_update_cache_if_not_lazy = false; |
1242 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1243 | // Checks against type parameters are done by loading the corresponding type |
1244 | // argument at runtime and calling the type argument's TTS. Thus, we install |
1245 | // specialized TTSes on the type argument, not the parameter itself. |
1246 | auto& tts_type = AbstractType::Handle(zone, ptr: dst_type.ptr()); |
1247 | if (tts_type.IsTypeParameter()) { |
1248 | const auto& param = TypeParameter::Cast(obj: tts_type); |
1249 | tts_type = param.GetFromTypeArguments(instantiator_type_arguments, |
1250 | function_type_arguments); |
1251 | } |
1252 | ASSERT(!tts_type.IsTypeParameter()); |
1253 | |
1254 | if (mode == kTypeCheckFromLazySpecializeStub) { |
1255 | if (FLAG_trace_type_checks) { |
1256 | THR_Print(" Specializing type testing stub for %s\n" , |
1257 | tts_type.ToCString()); |
1258 | } |
1259 | const Code& code = Code::Handle( |
1260 | zone, ptr: TypeTestingStubGenerator::SpecializeStubFor(thread, type: tts_type)); |
1261 | tts_type.SetTypeTestingStub(code); |
1262 | |
1263 | // Only create the cache if we failed to create a specialized TTS and doing |
1264 | // the same check would cause an update to the cache. |
1265 | would_update_cache_if_not_lazy = |
1266 | (!src_instance.IsNull() && |
1267 | tts_type.type_test_stub() == |
1268 | StubCode::DefaultNullableTypeTest().ptr()) || |
1269 | tts_type.type_test_stub() == StubCode::DefaultTypeTest().ptr(); |
1270 | should_update_cache = would_update_cache_if_not_lazy && cache.IsNull(); |
1271 | } |
1272 | |
1273 | // Since dst_type is not a top type or type parameter, then the only default |
1274 | // stubs it can use are DefaultTypeTest or DefaultNullableTypeTest. |
1275 | if ((mode == kTypeCheckFromSlowStub) && |
1276 | (tts_type.type_test_stub() != StubCode::DefaultNullableTypeTest().ptr() && |
1277 | tts_type.type_test_stub() != StubCode::DefaultTypeTest().ptr())) { |
1278 | // The specialized type testing stub returned a false negative. That means |
1279 | // the specialization may have been generated using outdated cid ranges and |
1280 | // new classes appeared since the stub was generated. Try respecializing. |
1281 | if (FLAG_trace_type_checks) { |
1282 | THR_Print(" Rebuilding type testing stub for %s\n" , |
1283 | tts_type.ToCString()); |
1284 | } |
1285 | const auto& old_code = Code::Handle(zone, ptr: tts_type.type_test_stub()); |
1286 | const auto& new_code = Code::Handle( |
1287 | zone, ptr: TypeTestingStubGenerator::SpecializeStubFor(thread, type: tts_type)); |
1288 | ASSERT(old_code.ptr() != new_code.ptr()); |
1289 | // A specialized stub should always respecialize to a non-default stub. |
1290 | ASSERT(new_code.ptr() != StubCode::DefaultNullableTypeTest().ptr() && |
1291 | new_code.ptr() != StubCode::DefaultTypeTest().ptr()); |
1292 | const auto& old_instructions = |
1293 | Instructions::Handle(ptr: old_code.instructions()); |
1294 | const auto& new_instructions = |
1295 | Instructions::Handle(ptr: new_code.instructions()); |
1296 | // Check if specialization produced exactly the same sequence of |
1297 | // instructions. If it did, then we have a false negative, which can |
1298 | // happen in some cases involving uninstantiated types. In these cases, |
1299 | // update the cache, because the only case in which these false negatives |
1300 | // could possibly turn into true positives is with reloads, which clear |
1301 | // all the SubtypeTestCaches. |
1302 | should_update_cache = old_instructions.Equals(other: new_instructions); |
1303 | if (FLAG_trace_type_checks) { |
1304 | THR_Print(" %s rebuilt type testing stub for %s\n" , |
1305 | should_update_cache ? "Discarding" : "Installing" , |
1306 | tts_type.ToCString()); |
1307 | } |
1308 | if (!should_update_cache) { |
1309 | tts_type.SetTypeTestingStub(new_code); |
1310 | } |
1311 | } |
1312 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1313 | #endif // !defined(TARGET_ARCH_IA32) |
1314 | |
1315 | if (should_update_cache) { |
1316 | if (cache.IsNull()) { |
1317 | #if !defined(TARGET_ARCH_IA32) |
1318 | ASSERT(mode == kTypeCheckFromSlowStub || |
1319 | (mode == kTypeCheckFromLazySpecializeStub && |
1320 | would_update_cache_if_not_lazy)); |
1321 | // We lazily create [SubtypeTestCache] for those call sites which actually |
1322 | // need one and will patch the pool entry. |
1323 | DartFrameIterator iterator(thread, |
1324 | StackFrameIterator::kNoCrossThreadIteration); |
1325 | StackFrame* caller_frame = iterator.NextFrame(); |
1326 | const Code& caller_code = |
1327 | Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
1328 | const ObjectPool& pool = |
1329 | ObjectPool::Handle(zone, ptr: caller_code.GetObjectPool()); |
1330 | TypeTestingStubCallPattern tts_pattern(caller_frame->pc()); |
1331 | const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex(); |
1332 | // Ensure we do have a STC (lazily create it if not) and all threads use |
1333 | // the same STC. |
1334 | { |
1335 | SafepointMutexLocker ml(isolate->group()->subtype_test_cache_mutex()); |
1336 | cache ^= pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx); |
1337 | if (cache.IsNull()) { |
1338 | resolve_dst_name(); |
1339 | // If this is a dynamic AssertAssignable check, then we must assume |
1340 | // all inputs may be needed, as the type may vary from call to call. |
1341 | const intptr_t num_inputs = |
1342 | dst_name.ptr() == |
1343 | Symbols::dynamic_assert_assignable_stc_check().ptr() |
1344 | ? SubtypeTestCache::kMaxInputs |
1345 | : SubtypeTestCache::UsedInputsForType(type: dst_type); |
1346 | cache = SubtypeTestCache::New(num_inputs); |
1347 | pool.SetObjectAt<std::memory_order_release>(stc_pool_idx, cache); |
1348 | if (FLAG_trace_type_checks) { |
1349 | THR_Print(" Installed new subtype test cache %#" Px " with %" Pd |
1350 | " inputs at index %" Pd " of pool for %s\n" , |
1351 | static_cast<uword>(cache.ptr()), num_inputs, stc_pool_idx, |
1352 | caller_code.ToCString()); |
1353 | } |
1354 | } |
1355 | } |
1356 | #else |
1357 | UNREACHABLE(); |
1358 | #endif |
1359 | } |
1360 | |
1361 | UpdateTypeTestCache(zone, thread, instance: src_instance, destination_type: dst_type, |
1362 | instantiator_type_arguments, function_type_arguments, |
1363 | result: Bool::True(), new_cache: cache); |
1364 | } |
1365 | |
1366 | arguments.SetReturn(src_instance); |
1367 | } |
1368 | |
1369 | // Report that the type of the given object is not bool in conditional context. |
1370 | // Throw assertion error if the object is null. (cf. Boolean Conversion |
1371 | // in language Spec.) |
1372 | // Arg0: bad object. |
1373 | // Return value: none, throws TypeError or AssertionError. |
1374 | DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) { |
1375 | const TokenPosition location = GetCallerLocation(); |
1376 | const Instance& src_instance = |
1377 | Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1378 | |
1379 | if (src_instance.IsNull()) { |
1380 | const Array& args = Array::Handle(zone, ptr: Array::New(len: 5)); |
1381 | args.SetAt( |
1382 | 0, String::Handle( |
1383 | zone, |
1384 | ptr: String::New( |
1385 | cstr: "Failed assertion: boolean expression must not be null" ))); |
1386 | |
1387 | // No source code for this assertion, set url to null. |
1388 | args.SetAt(1, String::Handle(zone, ptr: String::null())); |
1389 | args.SetAt(2, Object::smi_zero()); |
1390 | args.SetAt(3, Object::smi_zero()); |
1391 | args.SetAt(4, String::Handle(zone, ptr: String::null())); |
1392 | |
1393 | Exceptions::ThrowByType(type: Exceptions::kAssertion, arguments: args); |
1394 | UNREACHABLE(); |
1395 | } |
1396 | |
1397 | ASSERT(!src_instance.IsBool()); |
1398 | const Type& bool_interface = Type::Handle(ptr: Type::BoolType()); |
1399 | const AbstractType& src_type = |
1400 | AbstractType::Handle(zone, ptr: src_instance.GetType(space: Heap::kNew)); |
1401 | Exceptions::CreateAndThrowTypeError(location, src_type, dst_type: bool_interface, |
1402 | dst_name: Symbols::BooleanExpression()); |
1403 | UNREACHABLE(); |
1404 | } |
1405 | |
1406 | DEFINE_RUNTIME_ENTRY(Throw, 1) { |
1407 | const Instance& exception = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1408 | Exceptions::Throw(thread, exception); |
1409 | } |
1410 | |
1411 | DEFINE_RUNTIME_ENTRY(ReThrow, 2) { |
1412 | const Instance& exception = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1413 | const Instance& stacktrace = |
1414 | Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
1415 | Exceptions::ReThrow(thread, exception, stacktrace); |
1416 | } |
1417 | |
1418 | // Patches static call in optimized code with the target's entry point. |
1419 | // Compiles target if necessary. |
1420 | DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) { |
1421 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1422 | DartFrameIterator iterator(thread, |
1423 | StackFrameIterator::kNoCrossThreadIteration); |
1424 | StackFrame* caller_frame = iterator.NextFrame(); |
1425 | ASSERT(caller_frame != nullptr); |
1426 | const Code& caller_code = Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
1427 | ASSERT(!caller_code.IsNull()); |
1428 | ASSERT(caller_code.is_optimized()); |
1429 | const Function& target_function = Function::Handle( |
1430 | zone, ptr: caller_code.GetStaticCallTargetFunctionAt(pc: caller_frame->pc())); |
1431 | const Code& target_code = Code::Handle(zone, ptr: target_function.EnsureHasCode()); |
1432 | // Before patching verify that we are not repeatedly patching to the same |
1433 | // target. |
1434 | if (target_code.ptr() != |
1435 | CodePatcher::GetStaticCallTargetAt(return_address: caller_frame->pc(), code: caller_code)) { |
1436 | GcSafepointOperationScope safepoint(thread); |
1437 | if (target_code.ptr() != |
1438 | CodePatcher::GetStaticCallTargetAt(return_address: caller_frame->pc(), code: caller_code)) { |
1439 | CodePatcher::PatchStaticCallAt(return_address: caller_frame->pc(), code: caller_code, |
1440 | new_target: target_code); |
1441 | caller_code.SetStaticCallTargetCodeAt(pc: caller_frame->pc(), code: target_code); |
1442 | if (FLAG_trace_patching) { |
1443 | THR_Print("PatchStaticCall: patching caller pc %#" Px |
1444 | "" |
1445 | " to '%s' new entry point %#" Px " (%s)\n" , |
1446 | caller_frame->pc(), target_function.ToFullyQualifiedCString(), |
1447 | target_code.EntryPoint(), |
1448 | target_code.is_optimized() ? "optimized" : "unoptimized" ); |
1449 | } |
1450 | } |
1451 | } |
1452 | arguments.SetReturn(target_code); |
1453 | #else |
1454 | UNREACHABLE(); |
1455 | #endif |
1456 | } |
1457 | |
1458 | #if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME) |
1459 | DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { |
1460 | UNREACHABLE(); |
1461 | return; |
1462 | } |
1463 | #else |
1464 | // Gets called from debug stub when code reaches a breakpoint |
1465 | // set on a runtime stub call. |
1466 | DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) { |
1467 | DartFrameIterator iterator(thread, |
1468 | StackFrameIterator::kNoCrossThreadIteration); |
1469 | StackFrame* caller_frame = iterator.NextFrame(); |
1470 | ASSERT(caller_frame != nullptr); |
1471 | Code& orig_stub = Code::Handle(zone); |
1472 | orig_stub = |
1473 | isolate->group()->debugger()->GetPatchedStubAddress(breakpoint_address: caller_frame->pc()); |
1474 | const Error& error = |
1475 | Error::Handle(zone, ptr: isolate->debugger()->PauseBreakpoint()); |
1476 | ThrowIfError(result: error); |
1477 | arguments.SetReturn(orig_stub); |
1478 | } |
1479 | #endif |
1480 | |
1481 | DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) { |
1482 | #if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME) |
1483 | UNREACHABLE(); |
1484 | #else |
1485 | const Error& error = |
1486 | Error::Handle(zone, ptr: isolate->debugger()->PauseStepping()); |
1487 | ThrowIfError(result: error); |
1488 | #endif |
1489 | } |
1490 | |
1491 | // An instance call of the form o.f(...) could not be resolved. Check if |
1492 | // there is a getter with the same name. If so, invoke it. If the value is |
1493 | // a closure, invoke it with the given arguments. If the value is a |
1494 | // non-closure, attempt to invoke "call" on it. |
1495 | static bool ResolveCallThroughGetter(const Class& receiver_class, |
1496 | const String& target_name, |
1497 | const String& demangled, |
1498 | const Array& arguments_descriptor, |
1499 | Function* result) { |
1500 | const String& getter_name = String::Handle(ptr: Field::GetterName(field_name: demangled)); |
1501 | const int kTypeArgsLen = 0; |
1502 | const int kNumArguments = 1; |
1503 | ArgumentsDescriptor args_desc(Array::Handle( |
1504 | ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: kNumArguments))); |
1505 | const Function& getter = |
1506 | Function::Handle(ptr: Resolver::ResolveDynamicForReceiverClass( |
1507 | receiver_class, function_name: getter_name, args_desc)); |
1508 | if (getter.IsNull() || getter.IsMethodExtractor()) { |
1509 | return false; |
1510 | } |
1511 | // We do this on the target_name, _not_ on the demangled name, so that |
1512 | // FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher can detect dynamic |
1513 | // calls from the dyn: tag on the name of the dispatcher. |
1514 | const Function& target_function = |
1515 | Function::Handle(ptr: receiver_class.GetInvocationDispatcher( |
1516 | target_name, args_desc: arguments_descriptor, |
1517 | kind: UntaggedFunction::kInvokeFieldDispatcher, create_if_absent: FLAG_lazy_dispatchers)); |
1518 | ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers); |
1519 | if (FLAG_trace_ic) { |
1520 | OS::PrintErr( |
1521 | "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n" , |
1522 | receiver_class.ToCString(), receiver_class.id(), |
1523 | target_function.IsNull() ? "null" : target_function.ToCString()); |
1524 | } |
1525 | *result = target_function.ptr(); |
1526 | return true; |
1527 | } |
1528 | |
1529 | // Handle other invocations (implicit closures, noSuchMethod). |
1530 | FunctionPtr InlineCacheMissHelper(const Class& receiver_class, |
1531 | const Array& args_descriptor, |
1532 | const String& target_name) { |
1533 | // Create a demangled version of the target_name, if necessary, This is used |
1534 | // for the field getter in ResolveCallThroughGetter and as the target name |
1535 | // for the NoSuchMethod dispatcher (if needed). |
1536 | const String* demangled = &target_name; |
1537 | if (Function::IsDynamicInvocationForwarderName(name: target_name)) { |
1538 | demangled = &String::Handle( |
1539 | ptr: Function::DemangleDynamicInvocationForwarderName(name: target_name)); |
1540 | } |
1541 | const bool is_getter = Field::IsGetterName(function_name: *demangled); |
1542 | Function& result = Function::Handle(); |
1543 | if (is_getter || |
1544 | !ResolveCallThroughGetter(receiver_class, target_name, demangled: *demangled, |
1545 | arguments_descriptor: args_descriptor, result: &result)) { |
1546 | ArgumentsDescriptor desc(args_descriptor); |
1547 | const Function& target_function = |
1548 | Function::Handle(ptr: receiver_class.GetInvocationDispatcher( |
1549 | target_name: *demangled, args_desc: args_descriptor, |
1550 | kind: UntaggedFunction::kNoSuchMethodDispatcher, create_if_absent: FLAG_lazy_dispatchers)); |
1551 | if (FLAG_trace_ic) { |
1552 | OS::PrintErr( |
1553 | "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n" , |
1554 | receiver_class.ToCString(), receiver_class.id(), |
1555 | target_function.IsNull() ? "null" : target_function.ToCString()); |
1556 | } |
1557 | result = target_function.ptr(); |
1558 | } |
1559 | // May be null if --no-lazy-dispatchers, in which case dispatch will be |
1560 | // handled by NoSuchMethodFromCallStub. |
1561 | ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers); |
1562 | return result.ptr(); |
1563 | } |
1564 | |
1565 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1566 | static void TrySwitchInstanceCall(Thread* thread, |
1567 | StackFrame* caller_frame, |
1568 | const Code& caller_code, |
1569 | const Function& caller_function, |
1570 | const ICData& ic_data, |
1571 | const Function& target_function) { |
1572 | auto zone = thread->zone(); |
1573 | |
1574 | // Monomorphic/megamorphic calls only check the receiver CID. |
1575 | if (ic_data.NumArgsTested() != 1) return; |
1576 | |
1577 | ASSERT(ic_data.rebind_rule() == ICData::kInstance); |
1578 | |
1579 | // Monomorphic/megamorphic calls don't record exactness. |
1580 | if (ic_data.is_tracking_exactness()) return; |
1581 | |
1582 | #if !defined(PRODUCT) |
1583 | // Monomorphic/megamorphic do not check the isolate's stepping flag. |
1584 | if (thread->isolate()->has_attempted_stepping()) return; |
1585 | #endif |
1586 | |
1587 | // Monomorphic/megamorphic calls are only for unoptimized code. |
1588 | ASSERT(!caller_code.is_optimized()); |
1589 | |
1590 | // Code is detached from its function. This will prevent us from resetting |
1591 | // the switchable call later because resets are function based and because |
1592 | // the ic_data_array belongs to the function instead of the code. This should |
1593 | // only happen because of reload, but it sometimes happens with KBC mixed mode |
1594 | // probably through a race between foreground and background compilation. |
1595 | if (caller_function.unoptimized_code() != caller_code.ptr()) { |
1596 | return; |
1597 | } |
1598 | #if !defined(PRODUCT) |
1599 | // Skip functions that contain breakpoints or when debugger is in single |
1600 | // stepping mode. |
1601 | if (thread->isolate_group()->debugger()->IsDebugging(thread, |
1602 | function: caller_function)) { |
1603 | return; |
1604 | } |
1605 | #endif |
1606 | |
1607 | const intptr_t num_checks = ic_data.NumberOfChecks(); |
1608 | |
1609 | // Monomorphic call. |
1610 | if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) { |
1611 | // A call site in the monomorphic state does not load the arguments |
1612 | // descriptor, so do not allow transition to this state if the callee |
1613 | // needs it. |
1614 | if (target_function.PrologueNeedsArgumentsDescriptor()) { |
1615 | return; |
1616 | } |
1617 | |
1618 | const Array& data = Array::Handle(zone, ic_data.entries()); |
1619 | const Code& target = Code::Handle(zone, target_function.EnsureHasCode()); |
1620 | CodePatcher::PatchInstanceCallAt(return_address: caller_frame->pc(), caller_code, data, |
1621 | target); |
1622 | if (FLAG_trace_ic) { |
1623 | OS::PrintErr("Instance call at %" Px |
1624 | " switching to monomorphic dispatch, %s\n" , |
1625 | caller_frame->pc(), ic_data.ToCString()); |
1626 | } |
1627 | return; // Success. |
1628 | } |
1629 | |
1630 | // Megamorphic call. |
1631 | if (FLAG_unopt_megamorphic_calls && |
1632 | (num_checks > FLAG_max_polymorphic_checks)) { |
1633 | const String& name = String::Handle(zone, ic_data.target_name()); |
1634 | const Array& descriptor = |
1635 | Array::Handle(zone, ic_data.arguments_descriptor()); |
1636 | const MegamorphicCache& cache = MegamorphicCache::Handle( |
1637 | zone, MegamorphicCacheTable::Lookup(thread, name, descriptor)); |
1638 | ic_data.set_is_megamorphic(true); |
1639 | CodePatcher::PatchInstanceCallAt(return_address: caller_frame->pc(), caller_code, data: cache, |
1640 | target: StubCode::MegamorphicCall()); |
1641 | if (FLAG_trace_ic) { |
1642 | OS::PrintErr("Instance call at %" Px |
1643 | " switching to megamorphic dispatch, %s\n" , |
1644 | caller_frame->pc(), ic_data.ToCString()); |
1645 | } |
1646 | return; // Success. |
1647 | } |
1648 | } |
1649 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1650 | |
1651 | // Perform the subtype and return constant function based on the result. |
1652 | static FunctionPtr ComputeTypeCheckTarget(const Instance& receiver, |
1653 | const AbstractType& type, |
1654 | const ArgumentsDescriptor& desc) { |
1655 | const bool result = receiver.IsInstanceOf(other: type, other_instantiator_type_arguments: Object::null_type_arguments(), |
1656 | other_function_type_arguments: Object::null_type_arguments()); |
1657 | const ObjectStore* store = IsolateGroup::Current()->object_store(); |
1658 | const Function& target = |
1659 | Function::Handle(ptr: result ? store->simple_instance_of_true_function() |
1660 | : store->simple_instance_of_false_function()); |
1661 | ASSERT(!target.IsNull()); |
1662 | return target.ptr(); |
1663 | } |
1664 | |
1665 | static FunctionPtr Resolve( |
1666 | Thread* thread, |
1667 | Zone* zone, |
1668 | const GrowableArray<const Instance*>& caller_arguments, |
1669 | const Class& receiver_class, |
1670 | const String& name, |
1671 | const Array& descriptor) { |
1672 | ASSERT(name.IsSymbol()); |
1673 | auto& target_function = Function::Handle(zone); |
1674 | ArgumentsDescriptor args_desc(descriptor); |
1675 | |
1676 | if (receiver_class.EnsureIsFinalized(thread) == Error::null()) { |
1677 | target_function = Resolver::ResolveDynamicForReceiverClass(receiver_class, |
1678 | function_name: name, args_desc); |
1679 | } |
1680 | if (caller_arguments.length() == 2 && |
1681 | target_function.ptr() == thread->isolate_group() |
1682 | ->object_store() |
1683 | ->simple_instance_of_function()) { |
1684 | // Replace the target function with constant function. |
1685 | const AbstractType& type = AbstractType::Cast(*caller_arguments[1]); |
1686 | target_function = |
1687 | ComputeTypeCheckTarget(*caller_arguments[0], type, args_desc); |
1688 | } |
1689 | |
1690 | if (target_function.IsNull()) { |
1691 | target_function = InlineCacheMissHelper(receiver_class, args_descriptor: descriptor, target_name: name); |
1692 | } |
1693 | if (target_function.IsNull()) { |
1694 | ASSERT(!FLAG_lazy_dispatchers); |
1695 | } |
1696 | |
1697 | return target_function.ptr(); |
1698 | } |
1699 | |
1700 | // Handles a static call in unoptimized code that has one argument type not |
1701 | // seen before. Compile the target if necessary and update the ICData. |
1702 | // Arg0: argument. |
1703 | // Arg1: IC data object. |
1704 | DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) { |
1705 | const Instance& arg = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1706 | const ICData& ic_data = ICData::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
1707 | // IC data for static call is prepopulated with the statically known target. |
1708 | ASSERT(ic_data.NumberOfChecksIs(1)); |
1709 | const Function& target = Function::Handle(zone, ptr: ic_data.GetTargetAt(index: 0)); |
1710 | target.EnsureHasCode(); |
1711 | ASSERT(!target.IsNull() && target.HasCode()); |
1712 | ic_data.EnsureHasReceiverCheck(receiver_class_id: arg.GetClassId(), target, count: 1); |
1713 | if (FLAG_trace_ic) { |
1714 | DartFrameIterator iterator(thread, |
1715 | StackFrameIterator::kNoCrossThreadIteration); |
1716 | StackFrame* caller_frame = iterator.NextFrame(); |
1717 | ASSERT(caller_frame != nullptr); |
1718 | OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n" , |
1719 | caller_frame->pc(), target.ToCString(), arg.GetClassId()); |
1720 | } |
1721 | arguments.SetReturn(target); |
1722 | } |
1723 | |
1724 | // Handles a static call in unoptimized code that has two argument types not |
1725 | // seen before. Compile the target if necessary and update the ICData. |
1726 | // Arg0: argument 0. |
1727 | // Arg1: argument 1. |
1728 | // Arg2: IC data object. |
1729 | DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) { |
1730 | const Instance& arg0 = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
1731 | const Instance& arg1 = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
1732 | const ICData& ic_data = ICData::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
1733 | // IC data for static call is prepopulated with the statically known target. |
1734 | ASSERT(!ic_data.NumberOfChecksIs(0)); |
1735 | const Function& target = Function::Handle(zone, ptr: ic_data.GetTargetAt(index: 0)); |
1736 | target.EnsureHasCode(); |
1737 | GrowableArray<intptr_t> cids(2); |
1738 | cids.Add(arg0.GetClassId()); |
1739 | cids.Add(arg1.GetClassId()); |
1740 | ic_data.EnsureHasCheck(class_ids: cids, target); |
1741 | if (FLAG_trace_ic) { |
1742 | DartFrameIterator iterator(thread, |
1743 | StackFrameIterator::kNoCrossThreadIteration); |
1744 | StackFrame* caller_frame = iterator.NextFrame(); |
1745 | ASSERT(caller_frame != nullptr); |
1746 | OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd |
1747 | ")\n" , |
1748 | caller_frame->pc(), target.ToCString(), cids[0], cids[1]); |
1749 | } |
1750 | arguments.SetReturn(target); |
1751 | } |
1752 | |
1753 | #if defined(DART_PRECOMPILED_RUNTIME) |
1754 | |
1755 | static bool IsSingleTarget(IsolateGroup* isolate_group, |
1756 | Zone* zone, |
1757 | intptr_t lower_cid, |
1758 | intptr_t upper_cid, |
1759 | const Function& target, |
1760 | const String& name) { |
1761 | Class& cls = Class::Handle(zone); |
1762 | ClassTable* table = isolate_group->class_table(); |
1763 | Function& other_target = Function::Handle(zone); |
1764 | for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) { |
1765 | if (!table->HasValidClassAt(cid)) continue; |
1766 | cls = table->At(cid); |
1767 | if (cls.is_abstract()) continue; |
1768 | if (!cls.is_allocated()) continue; |
1769 | other_target = Resolver::ResolveDynamicAnyArgs(zone, cls, name, |
1770 | /*allow_add=*/false); |
1771 | if (other_target.ptr() != target.ptr()) { |
1772 | return false; |
1773 | } |
1774 | } |
1775 | return true; |
1776 | } |
1777 | |
1778 | class SavedUnlinkedCallMapKeyEqualsTraits : public AllStatic { |
1779 | public: |
1780 | static const char* Name() { return "SavedUnlinkedCallMapKeyEqualsTraits " ; } |
1781 | static bool ReportStats() { return false; } |
1782 | |
1783 | static bool IsMatch(const Object& key1, const Object& key2) { |
1784 | if (!key1.IsInteger() || !key2.IsInteger()) return false; |
1785 | return Integer::Cast(key1).Equals(Integer::Cast(key2)); |
1786 | } |
1787 | static uword Hash(const Object& key) { |
1788 | return Integer::Cast(key).CanonicalizeHash(); |
1789 | } |
1790 | }; |
1791 | |
1792 | using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>; |
1793 | |
1794 | static void SaveUnlinkedCall(Zone* zone, |
1795 | Isolate* isolate, |
1796 | uword frame_pc, |
1797 | const UnlinkedCall& unlinked_call) { |
1798 | IsolateGroup* isolate_group = isolate->group(); |
1799 | |
1800 | SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex()); |
1801 | if (isolate_group->saved_unlinked_calls() == Array::null()) { |
1802 | const auto& initial_map = |
1803 | Array::Handle(zone, HashTables::New<UnlinkedCallMap>(16, Heap::kOld)); |
1804 | isolate_group->set_saved_unlinked_calls(initial_map); |
1805 | } |
1806 | |
1807 | UnlinkedCallMap unlinked_call_map(zone, |
1808 | isolate_group->saved_unlinked_calls()); |
1809 | const auto& pc = Integer::Handle(zone, Integer::NewFromUint64(frame_pc)); |
1810 | // Some other isolate might have updated unlinked_call_map[pc] too, but |
1811 | // their update should be identical to ours. |
1812 | const auto& new_or_old_value = UnlinkedCall::Handle( |
1813 | zone, UnlinkedCall::RawCast( |
1814 | unlinked_call_map.InsertOrGetValue(pc, unlinked_call))); |
1815 | RELEASE_ASSERT(new_or_old_value.ptr() == unlinked_call.ptr()); |
1816 | isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release()); |
1817 | } |
1818 | |
1819 | static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone, |
1820 | Isolate* isolate, |
1821 | uword pc) { |
1822 | IsolateGroup* isolate_group = isolate->group(); |
1823 | |
1824 | SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex()); |
1825 | ASSERT(isolate_group->saved_unlinked_calls() != Array::null()); |
1826 | UnlinkedCallMap unlinked_call_map(zone, |
1827 | isolate_group->saved_unlinked_calls()); |
1828 | |
1829 | const auto& pc_integer = Integer::Handle(zone, Integer::NewFromUint64(pc)); |
1830 | const auto& unlinked_call = UnlinkedCall::Cast( |
1831 | Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer))); |
1832 | isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release()); |
1833 | return unlinked_call.ptr(); |
1834 | } |
1835 | |
1836 | // NOTE: Right now we never delete [UnlinkedCall] objects. They are needed while |
1837 | // a call site is in Unlinked/Monomorphic/MonomorphicSmiable/SingleTarget |
1838 | // states. |
1839 | // |
1840 | // Theoretically we could free the [UnlinkedCall] object once we transition the |
1841 | // call site to use ICData/MegamorphicCache, but that would require careful |
1842 | // coordination between the deleter and a possible concurrent reader. |
1843 | // |
1844 | // To simplify the code we decided not to do that atm (only a very small |
1845 | // fraction of callsites in AOT use switchable calls, the name/args-descriptor |
1846 | // objects are kept alive anyways -> there is little memory savings from |
1847 | // freeing the [UnlinkedCall] objects). |
1848 | |
1849 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
1850 | |
1851 | enum class MissHandler { |
1852 | kInlineCacheMiss, |
1853 | kSwitchableCallMiss, |
1854 | kFixCallersTargetMonomorphic, |
1855 | }; |
1856 | |
1857 | // Handles updating of type feedback and possible patching of instance calls. |
1858 | // |
1859 | // It works in 3 separate steps: |
1860 | // - resolve the actual target |
1861 | // - update type feedback & (optionally) perform call site transition |
1862 | // - return the right values |
1863 | // |
1864 | // Depending on the JIT/AOT mode we obtain current and patch new (target, data) |
1865 | // differently: |
1866 | // |
1867 | // - JIT calls must be patched with CodePatcher::PatchInstanceCallAt() |
1868 | // - AOT calls must be patched with CodePatcher::PatchSwitchableCallAt() |
1869 | // |
1870 | // Independent of which miss handler was used or how we will return, we look at |
1871 | // current (target, data) and see if we need to transition the call site to a |
1872 | // new (target, data). We do this while holding `IG->patchable_call_mutex()`. |
1873 | // |
1874 | // Depending on which miss handler got called we might need to return |
1875 | // differently: |
1876 | // |
1877 | // - SwitchableCallMiss will get get (stub, data) return value |
1878 | // - InlineCache*Miss will get get function as return value |
1879 | // |
1880 | class PatchableCallHandler { |
1881 | public: |
1882 | PatchableCallHandler(Thread* thread, |
1883 | const GrowableArray<const Instance*>& caller_arguments, |
1884 | MissHandler miss_handler, |
1885 | NativeArguments arguments, |
1886 | StackFrame* caller_frame, |
1887 | const Code& caller_code, |
1888 | const Function& caller_function) |
1889 | : isolate_(thread->isolate()), |
1890 | thread_(thread), |
1891 | zone_(thread->zone()), |
1892 | caller_arguments_(caller_arguments), |
1893 | miss_handler_(miss_handler), |
1894 | arguments_(arguments), |
1895 | caller_frame_(caller_frame), |
1896 | caller_code_(caller_code), |
1897 | caller_function_(caller_function), |
1898 | name_(String::Handle()), |
1899 | args_descriptor_(Array::Handle()) { |
1900 | // We only have two arg IC calls in JIT mode. |
1901 | ASSERT(caller_arguments_.length() == 1 || !FLAG_precompiled_mode); |
1902 | } |
1903 | |
1904 | void ResolveSwitchAndReturn(const Object& data); |
1905 | |
1906 | private: |
1907 | FunctionPtr ResolveTargetFunction(const Object& data); |
1908 | |
1909 | #if defined(DART_PRECOMPILED_RUNTIME) |
1910 | void HandleMissAOT(const Object& old_data, |
1911 | uword old_entry, |
1912 | const Function& target_function); |
1913 | |
1914 | void DoUnlinkedCallAOT(const UnlinkedCall& unlinked, |
1915 | const Function& target_function); |
1916 | void DoMonomorphicMissAOT(const Object& old_data, |
1917 | const Function& target_function); |
1918 | void DoSingleTargetMissAOT(const SingleTargetCache& data, |
1919 | const Function& target_function); |
1920 | void DoICDataMissAOT(const ICData& data, const Function& target_function); |
1921 | bool CanExtendSingleTargetRange(const String& name, |
1922 | const Function& old_target, |
1923 | const Function& target_function, |
1924 | intptr_t* lower, |
1925 | intptr_t* upper); |
1926 | #else |
1927 | void HandleMissJIT(const Object& old_data, |
1928 | const Code& old_target, |
1929 | const Function& target_function); |
1930 | |
1931 | void DoMonomorphicMissJIT(const Object& old_data, |
1932 | const Function& target_function); |
1933 | void DoICDataMissJIT(const ICData& data, |
1934 | const Object& old_data, |
1935 | const Function& target_function); |
1936 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1937 | void DoMegamorphicMiss(const MegamorphicCache& data, |
1938 | const Function& target_function); |
1939 | |
1940 | void UpdateICDataWithTarget(const ICData& ic_data, |
1941 | const Function& target_function); |
1942 | void TrySwitch(const ICData& ic_data, const Function& target_function); |
1943 | |
1944 | void ReturnAOT(const Code& stub, const Object& data); |
1945 | void ReturnJIT(const Code& stub, const Object& data, const Function& target); |
1946 | void ReturnJITorAOT(const Code& stub, |
1947 | const Object& data, |
1948 | const Function& target); |
1949 | |
1950 | const Instance& receiver() { return *caller_arguments_[0]; } |
1951 | |
1952 | bool should_consider_patching() { |
1953 | // In AOT we use switchable calls. |
1954 | if (FLAG_precompiled_mode) return true; |
1955 | |
1956 | // In JIT instance calls use a different calling sequence in unoptimized vs |
1957 | // optimized code (see [FlowGraphCompiler::EmitInstanceCallJIT] vs |
1958 | // [FlowGraphCompiler::EmitOptimizedInstanceCall]). |
1959 | // |
1960 | // The [CodePatcher::GetInstanceCallAt], [CodePatcher::PatchInstanceCallAt] |
1961 | // only recognize unoptimized call pattern. |
1962 | // |
1963 | // So we will not try to switch optimized instance calls. |
1964 | return !caller_code_.is_optimized(); |
1965 | } |
1966 | |
1967 | ICDataPtr NewICData(); |
1968 | ICDataPtr NewICDataWithTarget(intptr_t cid, const Function& target); |
1969 | |
1970 | Isolate* isolate_; |
1971 | Thread* thread_; |
1972 | Zone* zone_; |
1973 | const GrowableArray<const Instance*>& caller_arguments_; |
1974 | MissHandler miss_handler_; |
1975 | NativeArguments arguments_; |
1976 | StackFrame* caller_frame_; |
1977 | const Code& caller_code_; |
1978 | const Function& caller_function_; |
1979 | |
1980 | // Call-site information populated during resolution. |
1981 | String& name_; |
1982 | Array& args_descriptor_; |
1983 | bool is_monomorphic_hit_ = false; |
1984 | }; |
1985 | |
1986 | #if defined(DART_PRECOMPILED_RUNTIME) |
1987 | void PatchableCallHandler::DoUnlinkedCallAOT(const UnlinkedCall& unlinked, |
1988 | const Function& target_function) { |
1989 | const auto& ic_data = ICData::Handle( |
1990 | zone_, |
1991 | target_function.IsNull() |
1992 | ? NewICData() |
1993 | : NewICDataWithTarget(receiver().GetClassId(), target_function)); |
1994 | |
1995 | Object& object = Object::Handle(zone_, ic_data.ptr()); |
1996 | Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().ptr()); |
1997 | // If the target function has optional parameters or is generic, it's |
1998 | // prologue requires ARGS_DESC_REG to be populated. Yet the switchable calls |
1999 | // do not populate that on the call site, which is why we don't transition |
2000 | // those call sites to monomorphic, but rather directly to call via stub |
2001 | // (which will populate the ARGS_DESC_REG from the ICData). |
2002 | // |
2003 | // Because of this we also don't generate monomorphic checks for those |
2004 | // functions. |
2005 | if (!target_function.IsNull() && |
2006 | !target_function.PrologueNeedsArgumentsDescriptor()) { |
2007 | // Patch to monomorphic call. |
2008 | ASSERT(target_function.HasCode()); |
2009 | const Code& target_code = |
2010 | Code::Handle(zone_, target_function.CurrentCode()); |
2011 | const Smi& expected_cid = |
2012 | Smi::Handle(zone_, Smi::New(receiver().GetClassId())); |
2013 | |
2014 | if (unlinked.can_patch_to_monomorphic()) { |
2015 | object = expected_cid.ptr(); |
2016 | code = target_code.ptr(); |
2017 | ASSERT(code.HasMonomorphicEntry()); |
2018 | } else { |
2019 | object = MonomorphicSmiableCall::New(expected_cid.Value(), target_code); |
2020 | code = StubCode::MonomorphicSmiableCheck().ptr(); |
2021 | } |
2022 | } |
2023 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, object, |
2024 | code); |
2025 | |
2026 | // Return the ICData. The miss stub will jump to continue in the IC lookup |
2027 | // stub. |
2028 | ReturnAOT(StubCode::ICCallThroughCode(), ic_data); |
2029 | } |
2030 | |
2031 | bool PatchableCallHandler::CanExtendSingleTargetRange( |
2032 | const String& name, |
2033 | const Function& old_target, |
2034 | const Function& target_function, |
2035 | intptr_t* lower, |
2036 | intptr_t* upper) { |
2037 | if (old_target.ptr() != target_function.ptr()) { |
2038 | return false; |
2039 | } |
2040 | intptr_t unchecked_lower, unchecked_upper; |
2041 | if (receiver().GetClassId() < *lower) { |
2042 | unchecked_lower = receiver().GetClassId(); |
2043 | unchecked_upper = *lower - 1; |
2044 | *lower = receiver().GetClassId(); |
2045 | } else { |
2046 | unchecked_upper = receiver().GetClassId(); |
2047 | unchecked_lower = *upper + 1; |
2048 | *upper = receiver().GetClassId(); |
2049 | } |
2050 | |
2051 | return IsSingleTarget(isolate_->group(), zone_, unchecked_lower, |
2052 | unchecked_upper, target_function, name); |
2053 | } |
2054 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2055 | |
2056 | #if defined(DART_PRECOMPILED_RUNTIME) |
2057 | void PatchableCallHandler::DoMonomorphicMissAOT( |
2058 | const Object& old_data, |
2059 | const Function& target_function) { |
2060 | classid_t old_expected_cid; |
2061 | if (old_data.IsSmi()) { |
2062 | old_expected_cid = Smi::Cast(old_data).Value(); |
2063 | } else { |
2064 | RELEASE_ASSERT(old_data.IsMonomorphicSmiableCall()); |
2065 | old_expected_cid = MonomorphicSmiableCall::Cast(old_data).expected_cid(); |
2066 | } |
2067 | const bool is_monomorphic_hit = old_expected_cid == receiver().GetClassId(); |
2068 | const auto& old_receiver_class = Class::Handle( |
2069 | zone_, isolate_->group()->class_table()->At(old_expected_cid)); |
2070 | const auto& old_target = Function::Handle( |
2071 | zone_, Resolve(thread_, zone_, caller_arguments_, old_receiver_class, |
2072 | name_, args_descriptor_)); |
2073 | |
2074 | const auto& ic_data = ICData::Handle( |
2075 | zone_, old_target.IsNull() |
2076 | ? NewICData() |
2077 | : NewICDataWithTarget(old_expected_cid, old_target)); |
2078 | |
2079 | if (is_monomorphic_hit) { |
2080 | // The site just have been updated to monomorphic state with same |
2081 | // exact class id - do nothing in that case: stub will call through ic data. |
2082 | ReturnAOT(StubCode::ICCallThroughCode(), ic_data); |
2083 | return; |
2084 | } |
2085 | |
2086 | intptr_t lower = old_expected_cid; |
2087 | intptr_t upper = old_expected_cid; |
2088 | if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower, |
2089 | &upper)) { |
2090 | const SingleTargetCache& cache = |
2091 | SingleTargetCache::Handle(zone_, SingleTargetCache::New()); |
2092 | const Code& code = Code::Handle(zone_, target_function.CurrentCode()); |
2093 | cache.set_target(code); |
2094 | cache.set_entry_point(code.EntryPoint()); |
2095 | cache.set_lower_limit(lower); |
2096 | cache.set_upper_limit(upper); |
2097 | const Code& stub = StubCode::SingleTargetCall(); |
2098 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, cache, |
2099 | stub); |
2100 | // Return the ICData. The miss stub will jump to continue in the IC call |
2101 | // stub. |
2102 | ReturnAOT(StubCode::ICCallThroughCode(), ic_data); |
2103 | return; |
2104 | } |
2105 | |
2106 | // Patch to call through stub. |
2107 | const Code& stub = StubCode::ICCallThroughCode(); |
2108 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data, |
2109 | stub); |
2110 | |
2111 | // Return the ICData. The miss stub will jump to continue in the IC lookup |
2112 | // stub. |
2113 | ReturnAOT(stub, ic_data); |
2114 | } |
2115 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2116 | |
2117 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2118 | void PatchableCallHandler::DoMonomorphicMissJIT( |
2119 | const Object& old_data, |
2120 | const Function& target_function) { |
2121 | // Monomorphic calls use the ICData::entries() as their data. |
2122 | const auto& old_ic_data_entries = Array::Cast(obj: old_data); |
2123 | // Any non-empty ICData::entries() has a backref to it's ICData. |
2124 | const auto& ic_data = |
2125 | ICData::Handle(zone: zone_, ptr: ICData::ICDataOfEntriesArray(array: old_ic_data_entries)); |
2126 | |
2127 | // The target didn't change, so we can stay inside monomorphic state. |
2128 | if (ic_data.NumberOfChecksIs(n: 1) && |
2129 | (ic_data.GetReceiverClassIdAt(index: 0) == receiver().GetClassId())) { |
2130 | // No need to update ICData - it's already up-to-date. |
2131 | |
2132 | if (FLAG_trace_ic) { |
2133 | OS::PrintErr("Instance call at %" Px |
2134 | " updating code (old code was disabled)\n" , |
2135 | caller_frame_->pc()); |
2136 | } |
2137 | |
2138 | // We stay in monomorphic state, patch the code object and reload the icdata |
2139 | // entries array. |
2140 | const auto& code = Code::Handle(zone: zone_, ptr: target_function.EnsureHasCode()); |
2141 | const auto& data = Object::Handle(zone: zone_, ptr: ic_data.entries()); |
2142 | CodePatcher::PatchInstanceCallAt(return_address: caller_frame_->pc(), caller_code: caller_code_, data, |
2143 | target: code); |
2144 | ReturnJIT(stub: code, data, target: target_function); |
2145 | return; |
2146 | } |
2147 | |
2148 | ASSERT(ic_data.NumArgsTested() == 1); |
2149 | const Code& stub = ic_data.is_tracking_exactness() |
2150 | ? StubCode::OneArgCheckInlineCacheWithExactnessCheck() |
2151 | : StubCode::OneArgCheckInlineCache(); |
2152 | if (FLAG_trace_ic) { |
2153 | OS::PrintErr("Instance call at %" Px |
2154 | " switching monomorphic to polymorphic dispatch, %s\n" , |
2155 | caller_frame_->pc(), ic_data.ToCString()); |
2156 | } |
2157 | CodePatcher::PatchInstanceCallAt(return_address: caller_frame_->pc(), caller_code: caller_code_, data: ic_data, |
2158 | target: stub); |
2159 | |
2160 | ASSERT(caller_arguments_.length() == 1); |
2161 | UpdateICDataWithTarget(ic_data, target_function); |
2162 | ASSERT(should_consider_patching()); |
2163 | TrySwitchInstanceCall(thread: thread_, caller_frame: caller_frame_, caller_code: caller_code_, caller_function: caller_function_, |
2164 | ic_data, target_function); |
2165 | ReturnJIT(stub, data: ic_data, target: target_function); |
2166 | } |
2167 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2168 | |
2169 | #if defined(DART_PRECOMPILED_RUNTIME) |
2170 | void PatchableCallHandler::DoSingleTargetMissAOT( |
2171 | const SingleTargetCache& data, |
2172 | const Function& target_function) { |
2173 | const Code& old_target_code = Code::Handle(zone_, data.target()); |
2174 | const Function& old_target = |
2175 | Function::Handle(zone_, Function::RawCast(old_target_code.owner())); |
2176 | |
2177 | // We lost the original ICData when we patched to the monomorphic case. |
2178 | const auto& ic_data = ICData::Handle( |
2179 | zone_, |
2180 | target_function.IsNull() |
2181 | ? NewICData() |
2182 | : NewICDataWithTarget(receiver().GetClassId(), target_function)); |
2183 | |
2184 | intptr_t lower = data.lower_limit(); |
2185 | intptr_t upper = data.upper_limit(); |
2186 | if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower, |
2187 | &upper)) { |
2188 | data.set_lower_limit(lower); |
2189 | data.set_upper_limit(upper); |
2190 | // Return the ICData. The single target stub will jump to continue in the |
2191 | // IC call stub. |
2192 | ReturnAOT(StubCode::ICCallThroughCode(), ic_data); |
2193 | return; |
2194 | } |
2195 | |
2196 | // Call site is not single target, switch to call using ICData. |
2197 | const Code& stub = StubCode::ICCallThroughCode(); |
2198 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data, |
2199 | stub); |
2200 | |
2201 | // Return the ICData. The single target stub will jump to continue in the |
2202 | // IC call stub. |
2203 | ReturnAOT(stub, ic_data); |
2204 | } |
2205 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2206 | |
2207 | #if defined(DART_PRECOMPILED_RUNTIME) |
2208 | void PatchableCallHandler::DoICDataMissAOT(const ICData& ic_data, |
2209 | const Function& target_function) { |
2210 | const String& name = String::Handle(zone_, ic_data.target_name()); |
2211 | const Class& cls = Class::Handle(zone_, receiver().clazz()); |
2212 | ASSERT(!cls.IsNull()); |
2213 | const Array& descriptor = |
2214 | Array::CheckedHandle(zone_, ic_data.arguments_descriptor()); |
2215 | ArgumentsDescriptor args_desc(descriptor); |
2216 | if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) { |
2217 | OS::PrintErr("ICData miss, class=%s, function<%" Pd ">=%s\n" , |
2218 | cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString()); |
2219 | } |
2220 | |
2221 | if (target_function.IsNull()) { |
2222 | ReturnAOT(StubCode::NoSuchMethodDispatcher(), ic_data); |
2223 | return; |
2224 | } |
2225 | |
2226 | const intptr_t number_of_checks = ic_data.NumberOfChecks(); |
2227 | |
2228 | if ((number_of_checks == 0) && |
2229 | (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) && |
2230 | !target_function.PrologueNeedsArgumentsDescriptor()) { |
2231 | // This call site is unlinked: transition to a monomorphic direct call. |
2232 | // Note we cannot do this if the target has optional parameters because |
2233 | // the monomorphic direct call does not load the arguments descriptor. |
2234 | // We cannot do this if we are still in the middle of precompiling because |
2235 | // the monomorphic case hides a live instance selector from the |
2236 | // treeshaker. |
2237 | const Code& target_code = |
2238 | Code::Handle(zone_, target_function.EnsureHasCode()); |
2239 | const Smi& expected_cid = |
2240 | Smi::Handle(zone_, Smi::New(receiver().GetClassId())); |
2241 | ASSERT(target_code.HasMonomorphicEntry()); |
2242 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, |
2243 | expected_cid, target_code); |
2244 | ReturnAOT(target_code, expected_cid); |
2245 | } else { |
2246 | ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function); |
2247 | if (number_of_checks > FLAG_max_polymorphic_checks) { |
2248 | // Switch to megamorphic call. |
2249 | const MegamorphicCache& cache = MegamorphicCache::Handle( |
2250 | zone_, MegamorphicCacheTable::Lookup(thread_, name, descriptor)); |
2251 | const Code& stub = StubCode::MegamorphicCall(); |
2252 | |
2253 | CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, |
2254 | cache, stub); |
2255 | ReturnAOT(stub, cache); |
2256 | } else { |
2257 | ReturnAOT(StubCode::ICCallThroughCode(), ic_data); |
2258 | } |
2259 | } |
2260 | } |
2261 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2262 | |
2263 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2264 | void PatchableCallHandler::DoICDataMissJIT(const ICData& ic_data, |
2265 | const Object& old_code, |
2266 | const Function& target_function) { |
2267 | ASSERT(ic_data.NumArgsTested() == caller_arguments_.length()); |
2268 | |
2269 | if (ic_data.NumArgsTested() == 1) { |
2270 | ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() || |
2271 | old_code.ptr() == |
2272 | StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() || |
2273 | old_code.ptr() == |
2274 | StubCode::OneArgOptimizedCheckInlineCache().ptr() || |
2275 | old_code.ptr() == |
2276 | StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck() |
2277 | .ptr() || |
2278 | old_code.ptr() == StubCode::ICCallBreakpoint().ptr() || |
2279 | (old_code.IsNull() && !should_consider_patching())); |
2280 | UpdateICDataWithTarget(ic_data, target_function); |
2281 | if (should_consider_patching()) { |
2282 | TrySwitchInstanceCall(thread: thread_, caller_frame: caller_frame_, caller_code: caller_code_, |
2283 | caller_function: caller_function_, ic_data, target_function); |
2284 | } |
2285 | const Code& stub = Code::Handle( |
2286 | zone: zone_, ptr: ic_data.is_tracking_exactness() |
2287 | ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() |
2288 | : StubCode::OneArgCheckInlineCache().ptr()); |
2289 | ReturnJIT(stub, data: ic_data, target: target_function); |
2290 | } else { |
2291 | ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() || |
2292 | old_code.ptr() == StubCode::SmiAddInlineCache().ptr() || |
2293 | old_code.ptr() == StubCode::SmiLessInlineCache().ptr() || |
2294 | old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() || |
2295 | old_code.ptr() == |
2296 | StubCode::TwoArgsOptimizedCheckInlineCache().ptr() || |
2297 | old_code.ptr() == StubCode::ICCallBreakpoint().ptr() || |
2298 | (old_code.IsNull() && !should_consider_patching())); |
2299 | UpdateICDataWithTarget(ic_data, target_function); |
2300 | ReturnJIT(stub: StubCode::TwoArgsCheckInlineCache(), data: ic_data, target: target_function); |
2301 | } |
2302 | } |
2303 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2304 | |
2305 | void PatchableCallHandler::DoMegamorphicMiss(const MegamorphicCache& data, |
2306 | const Function& target_function) { |
2307 | const String& name = String::Handle(zone: zone_, ptr: data.target_name()); |
2308 | const Class& cls = Class::Handle(zone: zone_, ptr: receiver().clazz()); |
2309 | ASSERT(!cls.IsNull()); |
2310 | const Array& descriptor = |
2311 | Array::CheckedHandle(zone: zone_, ptr: data.arguments_descriptor()); |
2312 | ArgumentsDescriptor args_desc(descriptor); |
2313 | if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) { |
2314 | OS::PrintErr("Megamorphic miss, class=%s, function<%" Pd ">=%s\n" , |
2315 | cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString()); |
2316 | } |
2317 | if (target_function.IsNull()) { |
2318 | ReturnJITorAOT(stub: StubCode::NoSuchMethodDispatcher(), data, target: target_function); |
2319 | return; |
2320 | } |
2321 | |
2322 | // Insert function found into cache. |
2323 | const Smi& class_id = Smi::Handle(zone: zone_, ptr: Smi::New(value: cls.id())); |
2324 | data.EnsureContains(class_id, target: target_function); |
2325 | ReturnJITorAOT(stub: StubCode::MegamorphicCall(), data, target: target_function); |
2326 | } |
2327 | |
2328 | void PatchableCallHandler::UpdateICDataWithTarget( |
2329 | const ICData& ic_data, |
2330 | const Function& target_function) { |
2331 | if (target_function.IsNull()) return; |
2332 | |
2333 | // If, upon return of the runtime, we will invoke the target directly we have |
2334 | // to increment the call count here in the ICData. |
2335 | // If we instead only insert a new ICData entry and will return to the IC stub |
2336 | // which will call the target, the stub will take care of the increment. |
2337 | const bool call_target_directly = |
2338 | miss_handler_ == MissHandler::kInlineCacheMiss; |
2339 | const intptr_t invocation_count = call_target_directly ? 1 : 0; |
2340 | |
2341 | if (caller_arguments_.length() == 1) { |
2342 | auto exactness = StaticTypeExactnessState::NotTracking(); |
2343 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2344 | if (ic_data.is_tracking_exactness()) { |
2345 | exactness = receiver().IsNull() |
2346 | ? StaticTypeExactnessState::NotExact() |
2347 | : StaticTypeExactnessState::Compute( |
2348 | static_type: Type::Cast(obj: AbstractType::Handle( |
2349 | ptr: ic_data.receivers_static_type())), |
2350 | value: receiver()); |
2351 | } |
2352 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2353 | ic_data.EnsureHasReceiverCheck(receiver_class_id: receiver().GetClassId(), target: target_function, |
2354 | count: invocation_count, exactness); |
2355 | } else { |
2356 | GrowableArray<intptr_t> class_ids(caller_arguments_.length()); |
2357 | ASSERT(ic_data.NumArgsTested() == caller_arguments_.length()); |
2358 | for (intptr_t i = 0; i < caller_arguments_.length(); i++) { |
2359 | class_ids.Add(caller_arguments_[i]->GetClassId()); |
2360 | } |
2361 | ic_data.EnsureHasCheck(class_ids, target: target_function, count: invocation_count); |
2362 | } |
2363 | } |
2364 | |
2365 | void PatchableCallHandler::ReturnAOT(const Code& stub, const Object& data) { |
2366 | ASSERT(miss_handler_ == MissHandler::kSwitchableCallMiss); |
2367 | arguments_.SetArgAt(index: 0, value: stub); // Second return value. |
2368 | arguments_.SetReturn(data); |
2369 | } |
2370 | |
2371 | void PatchableCallHandler::ReturnJIT(const Code& stub, |
2372 | const Object& data, |
2373 | const Function& target) { |
2374 | // In JIT we can have two different miss handlers to which we return slightly |
2375 | // differently. |
2376 | switch (miss_handler_) { |
2377 | case MissHandler::kSwitchableCallMiss: { |
2378 | arguments_.SetArgAt(index: 0, value: stub); // Second return value. |
2379 | arguments_.SetReturn(data); |
2380 | break; |
2381 | } |
2382 | case MissHandler::kFixCallersTargetMonomorphic: { |
2383 | arguments_.SetArgAt(index: 1, value: data); // Second return value. |
2384 | arguments_.SetReturn(stub); |
2385 | break; |
2386 | } |
2387 | case MissHandler::kInlineCacheMiss: { |
2388 | arguments_.SetReturn(target); |
2389 | break; |
2390 | } |
2391 | } |
2392 | } |
2393 | |
2394 | void PatchableCallHandler::ReturnJITorAOT(const Code& stub, |
2395 | const Object& data, |
2396 | const Function& target) { |
2397 | #if defined(DART_PRECOMPILED_MODE) |
2398 | ReturnAOT(stub, data); |
2399 | #else |
2400 | ReturnJIT(stub, data, target); |
2401 | #endif |
2402 | } |
2403 | |
2404 | ICDataPtr PatchableCallHandler::NewICData() { |
2405 | return ICData::New(owner: caller_function_, target_name: name_, arguments_descriptor: args_descriptor_, deopt_id: DeoptId::kNone, |
2406 | /*num_args_tested=*/1, rebind_rule: ICData::kInstance); |
2407 | } |
2408 | |
2409 | ICDataPtr PatchableCallHandler::NewICDataWithTarget(intptr_t cid, |
2410 | const Function& target) { |
2411 | GrowableArray<intptr_t> cids(1); |
2412 | cids.Add(cid); |
2413 | return ICData::NewWithCheck(owner: caller_function_, target_name: name_, arguments_descriptor: args_descriptor_, |
2414 | deopt_id: DeoptId::kNone, /*num_args_tested=*/1, |
2415 | rebind_rule: ICData::kInstance, cids: &cids, target); |
2416 | } |
2417 | |
2418 | FunctionPtr PatchableCallHandler::ResolveTargetFunction(const Object& data) { |
2419 | switch (data.GetClassId()) { |
2420 | case kUnlinkedCallCid: { |
2421 | const auto& unlinked_call = UnlinkedCall::Cast(obj: data); |
2422 | |
2423 | #if defined(DART_PRECOMPILED_RUNTIME) |
2424 | // When transitioning out of UnlinkedCall to other states (e.g. |
2425 | // Monomorphic, MonomorphicSmiable, SingleTarget) we lose |
2426 | // name/arg-descriptor in AOT mode and cannot recover it. |
2427 | // |
2428 | // Even if we could recover an old target function (which was missed) - |
2429 | // which we cannot in AOT bare mode - we can still lose the name due to a |
2430 | // dyn:* call site potentially targeting non-dyn:* targets. |
2431 | // |
2432 | // => We will therefore retain the unlinked call here. |
2433 | // |
2434 | // In JIT mode we always use ICData from the call site, which has the |
2435 | // correct name/args-descriptor. |
2436 | SaveUnlinkedCall(zone_, isolate_, caller_frame_->pc(), unlinked_call); |
2437 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2438 | |
2439 | name_ = unlinked_call.target_name(); |
2440 | args_descriptor_ = unlinked_call.arguments_descriptor(); |
2441 | break; |
2442 | } |
2443 | case kMonomorphicSmiableCallCid: |
2444 | FALL_THROUGH; |
2445 | #if defined(DART_PRECOMPILED_RUNTIME) |
2446 | case kSmiCid: |
2447 | FALL_THROUGH; |
2448 | case kSingleTargetCacheCid: { |
2449 | const auto& unlinked_call = UnlinkedCall::Handle( |
2450 | zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc())); |
2451 | name_ = unlinked_call.target_name(); |
2452 | args_descriptor_ = unlinked_call.arguments_descriptor(); |
2453 | break; |
2454 | } |
2455 | #else |
2456 | case kArrayCid: { |
2457 | // Monomorphic calls use the ICData::entries() as their data. |
2458 | const auto& ic_data_entries = Array::Cast(obj: data); |
2459 | // Any non-empty ICData::entries() has a backref to it's ICData. |
2460 | const auto& ic_data = |
2461 | ICData::Handle(zone: zone_, ptr: ICData::ICDataOfEntriesArray(array: ic_data_entries)); |
2462 | args_descriptor_ = ic_data.arguments_descriptor(); |
2463 | name_ = ic_data.target_name(); |
2464 | break; |
2465 | } |
2466 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2467 | case kICDataCid: |
2468 | FALL_THROUGH; |
2469 | case kMegamorphicCacheCid: { |
2470 | const CallSiteData& call_site_data = CallSiteData::Cast(obj: data); |
2471 | name_ = call_site_data.target_name(); |
2472 | args_descriptor_ = call_site_data.arguments_descriptor(); |
2473 | break; |
2474 | } |
2475 | default: |
2476 | UNREACHABLE(); |
2477 | } |
2478 | const Class& cls = Class::Handle(zone: zone_, ptr: receiver().clazz()); |
2479 | return Resolve(thread: thread_, zone: zone_, caller_arguments: caller_arguments_, receiver_class: cls, name: name_, |
2480 | descriptor: args_descriptor_); |
2481 | } |
2482 | |
2483 | void PatchableCallHandler::ResolveSwitchAndReturn(const Object& old_data) { |
2484 | // Find out actual target (which can be time consuming) without holding any |
2485 | // locks. |
2486 | const auto& target_function = |
2487 | Function::Handle(zone: zone_, ptr: ResolveTargetFunction(data: old_data)); |
2488 | |
2489 | auto& data = Object::Handle(zone: zone_); |
2490 | |
2491 | // We ensure any transition in a patchable calls are done in an atomic |
2492 | // manner, we ensure we always transition forward (e.g. Monomorphic -> |
2493 | // Polymorphic). |
2494 | // |
2495 | // Mutators are only stopped if we actually need to patch a patchable call. |
2496 | // We may not do that if we e.g. just add one more check to an ICData. |
2497 | SafepointMutexLocker ml(thread_->isolate_group()->patchable_call_mutex()); |
2498 | |
2499 | #if defined(DART_PRECOMPILED_RUNTIME) |
2500 | data = |
2501 | CodePatcher::GetSwitchableCallDataAt(caller_frame_->pc(), caller_code_); |
2502 | uword target_entry = 0; |
2503 | DEBUG_ONLY(target_entry = CodePatcher::GetSwitchableCallTargetEntryAt( |
2504 | caller_frame_->pc(), caller_code_)); |
2505 | HandleMissAOT(data, target_entry, target_function); |
2506 | #else |
2507 | auto& code = Code::Handle(zone: zone_); |
2508 | if (should_consider_patching()) { |
2509 | code ^= CodePatcher::GetInstanceCallAt(return_address: caller_frame_->pc(), caller_code: caller_code_, |
2510 | data: &data); |
2511 | } else { |
2512 | ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache()); |
2513 | data = old_data.ptr(); |
2514 | } |
2515 | HandleMissJIT(old_data: data, old_target: code, target_function); |
2516 | #endif |
2517 | } |
2518 | |
2519 | #if defined(DART_PRECOMPILED_RUNTIME) |
2520 | |
2521 | void PatchableCallHandler::HandleMissAOT(const Object& old_data, |
2522 | uword old_entry, |
2523 | const Function& target_function) { |
2524 | switch (old_data.GetClassId()) { |
2525 | case kUnlinkedCallCid: |
2526 | ASSERT(old_entry == |
2527 | StubCode::SwitchableCallMiss().MonomorphicEntryPoint()); |
2528 | DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function); |
2529 | break; |
2530 | case kMonomorphicSmiableCallCid: |
2531 | ASSERT(old_entry == |
2532 | StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint()); |
2533 | FALL_THROUGH; |
2534 | case kSmiCid: |
2535 | DoMonomorphicMissAOT(old_data, target_function); |
2536 | break; |
2537 | case kSingleTargetCacheCid: |
2538 | ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint()); |
2539 | DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function); |
2540 | break; |
2541 | case kICDataCid: |
2542 | ASSERT(old_entry == |
2543 | StubCode::ICCallThroughCode().MonomorphicEntryPoint()); |
2544 | DoICDataMissAOT(ICData::Cast(old_data), target_function); |
2545 | break; |
2546 | case kMegamorphicCacheCid: |
2547 | ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint()); |
2548 | DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function); |
2549 | break; |
2550 | default: |
2551 | UNREACHABLE(); |
2552 | } |
2553 | } |
2554 | |
2555 | #else |
2556 | |
2557 | void PatchableCallHandler::HandleMissJIT(const Object& old_data, |
2558 | const Code& old_code, |
2559 | const Function& target_function) { |
2560 | switch (old_data.GetClassId()) { |
2561 | case kArrayCid: |
2562 | // ICData three-element array: Smi(receiver CID), Smi(count), |
2563 | // Function(target). It is the Array from ICData::entries_. |
2564 | DoMonomorphicMissJIT(old_data, target_function); |
2565 | break; |
2566 | case kICDataCid: |
2567 | DoICDataMissJIT(ic_data: ICData::Cast(obj: old_data), old_code, target_function); |
2568 | break; |
2569 | case kMegamorphicCacheCid: |
2570 | ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() || |
2571 | (old_code.IsNull() && !should_consider_patching())); |
2572 | DoMegamorphicMiss(data: MegamorphicCache::Cast(obj: old_data), target_function); |
2573 | break; |
2574 | default: |
2575 | UNREACHABLE(); |
2576 | } |
2577 | } |
2578 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2579 | |
2580 | static void InlineCacheMissHandler(Thread* thread, |
2581 | Zone* zone, |
2582 | const GrowableArray<const Instance*>& args, |
2583 | const ICData& ic_data, |
2584 | NativeArguments native_arguments) { |
2585 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2586 | DartFrameIterator iterator(thread, |
2587 | StackFrameIterator::kNoCrossThreadIteration); |
2588 | StackFrame* caller_frame = iterator.NextFrame(); |
2589 | const auto& caller_code = Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
2590 | const auto& caller_function = |
2591 | Function::Handle(zone, ptr: caller_frame->LookupDartFunction()); |
2592 | |
2593 | PatchableCallHandler handler(thread, args, MissHandler::kInlineCacheMiss, |
2594 | native_arguments, caller_frame, caller_code, |
2595 | caller_function); |
2596 | |
2597 | handler.ResolveSwitchAndReturn(old_data: ic_data); |
2598 | #else |
2599 | UNREACHABLE(); |
2600 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2601 | } |
2602 | |
2603 | // Handles inline cache misses by updating the IC data array of the call site. |
2604 | // Arg0: Receiver object. |
2605 | // Arg1: IC data object. |
2606 | // Returns: target function with compiled code or null. |
2607 | // Modifies the instance call to hold the updated IC data array. |
2608 | DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) { |
2609 | const Instance& receiver = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
2610 | const ICData& ic_data = ICData::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
2611 | RELEASE_ASSERT(!FLAG_precompiled_mode); |
2612 | GrowableArray<const Instance*> args(1); |
2613 | args.Add(&receiver); |
2614 | InlineCacheMissHandler(thread, zone, args, ic_data, native_arguments: arguments); |
2615 | } |
2616 | |
2617 | // Handles inline cache misses by updating the IC data array of the call site. |
2618 | // Arg0: Receiver object. |
2619 | // Arg1: Argument after receiver. |
2620 | // Arg2: IC data object. |
2621 | // Returns: target function with compiled code or null. |
2622 | // Modifies the instance call to hold the updated IC data array. |
2623 | DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) { |
2624 | const Instance& receiver = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
2625 | const Instance& other = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
2626 | const ICData& ic_data = ICData::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
2627 | RELEASE_ASSERT(!FLAG_precompiled_mode); |
2628 | GrowableArray<const Instance*> args(2); |
2629 | args.Add(&receiver); |
2630 | args.Add(&other); |
2631 | InlineCacheMissHandler(thread, zone, args, ic_data, native_arguments: arguments); |
2632 | } |
2633 | |
2634 | // Handle the first use of an instance call |
2635 | // Arg1: Receiver. |
2636 | // Arg0: Stub out. |
2637 | // Returns: the ICData used to continue with the call. |
2638 | DEFINE_RUNTIME_ENTRY(SwitchableCallMiss, 2) { |
2639 | const Instance& receiver = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
2640 | |
2641 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, thread, |
2642 | StackFrameIterator::kNoCrossThreadIteration); |
2643 | StackFrame* exit_frame = iterator.NextFrame(); |
2644 | ASSERT(exit_frame->IsExitFrame()); |
2645 | StackFrame* miss_handler_frame = iterator.NextFrame(); |
2646 | // This runtime entry can be called either from miss stub or from |
2647 | // switchable_call_miss "dart" stub/function set up in |
2648 | // [MegamorphicCacheTable::InitMissHandler]. |
2649 | ASSERT(miss_handler_frame->IsStubFrame() || |
2650 | miss_handler_frame->IsDartFrame()); |
2651 | StackFrame* caller_frame = iterator.NextFrame(); |
2652 | ASSERT(caller_frame->IsDartFrame()); |
2653 | const Code& caller_code = Code::Handle(zone, ptr: caller_frame->LookupDartCode()); |
2654 | const Function& caller_function = |
2655 | Function::Handle(zone, ptr: caller_frame->LookupDartFunction()); |
2656 | |
2657 | auto& old_data = Object::Handle(zone); |
2658 | #if defined(DART_PRECOMPILED_RUNTIME) |
2659 | old_data = |
2660 | CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code); |
2661 | #else |
2662 | CodePatcher::GetInstanceCallAt(return_address: caller_frame->pc(), caller_code, data: &old_data); |
2663 | #endif |
2664 | |
2665 | GrowableArray<const Instance*> caller_arguments(1); |
2666 | caller_arguments.Add(&receiver); |
2667 | PatchableCallHandler handler(thread, caller_arguments, |
2668 | MissHandler::kSwitchableCallMiss, arguments, |
2669 | caller_frame, caller_code, caller_function); |
2670 | handler.ResolveSwitchAndReturn(old_data); |
2671 | } |
2672 | |
2673 | // Used to find the correct receiver and function to invoke or to fall back to |
2674 | // invoking noSuchMethod when lazy dispatchers are disabled. Returns the |
2675 | // result of the invocation or an Error. |
2676 | static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod( |
2677 | Thread* thread, |
2678 | Zone* zone, |
2679 | const Instance& receiver, |
2680 | const String& target_name, |
2681 | const Array& orig_arguments, |
2682 | const Array& orig_arguments_desc) { |
2683 | ASSERT(!FLAG_lazy_dispatchers); |
2684 | const bool is_dynamic_call = |
2685 | Function::IsDynamicInvocationForwarderName(name: target_name); |
2686 | String& demangled_target_name = String::Handle(zone, ptr: target_name.ptr()); |
2687 | if (is_dynamic_call) { |
2688 | demangled_target_name = |
2689 | Function::DemangleDynamicInvocationForwarderName(name: target_name); |
2690 | } |
2691 | |
2692 | Class& cls = Class::Handle(zone, ptr: receiver.clazz()); |
2693 | Function& function = Function::Handle(zone); |
2694 | |
2695 | // Dart distinguishes getters and regular methods and allows their calls |
2696 | // to mix with conversions, and its selectors are independent of arity. So do |
2697 | // a zigzagged lookup to see if this call failed because of an arity mismatch, |
2698 | // need for conversion, or there really is no such method. |
2699 | |
2700 | const bool is_getter = Field::IsGetterName(function_name: demangled_target_name); |
2701 | if (is_getter) { |
2702 | // Tear-off of a method |
2703 | // o.foo (o.get:foo) failed, closurize o.foo() if it exists. |
2704 | const auto& function_name = |
2705 | String::Handle(zone, ptr: Field::NameFromGetter(getter_name: demangled_target_name)); |
2706 | while (!cls.IsNull()) { |
2707 | // We don't generate dyn:* forwarders for method extractors so there is no |
2708 | // need to try to find a dyn:get:foo first (see assertion below) |
2709 | if (function.IsNull()) { |
2710 | if (cls.EnsureIsFinalized(thread) == Error::null()) { |
2711 | function = Resolver::ResolveDynamicFunction(zone, receiver_class: cls, function_name); |
2712 | } |
2713 | } |
2714 | if (!function.IsNull()) { |
2715 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2716 | ASSERT(!kernel::NeedsDynamicInvocationForwarder(Function::Handle( |
2717 | function.GetMethodExtractor(demangled_target_name)))); |
2718 | #endif |
2719 | const Function& closure_function = |
2720 | Function::Handle(zone, ptr: function.ImplicitClosureFunction()); |
2721 | const Object& result = Object::Handle( |
2722 | zone, ptr: closure_function.ImplicitInstanceClosure(receiver)); |
2723 | return result.ptr(); |
2724 | } |
2725 | cls = cls.SuperClass(); |
2726 | } |
2727 | |
2728 | if (receiver.IsRecord()) { |
2729 | const Record& record = Record::Cast(obj: receiver); |
2730 | const intptr_t field_index = |
2731 | record.GetFieldIndexByName(thread, field_name: function_name); |
2732 | if (field_index >= 0) { |
2733 | return record.FieldAt(field_index); |
2734 | } |
2735 | } |
2736 | |
2737 | // Fall through for noSuchMethod |
2738 | } else { |
2739 | // Call through field. |
2740 | // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong |
2741 | // number of arguments, or try (o.foo).call(...) |
2742 | |
2743 | if ((target_name.ptr() == Symbols::call().ptr()) && receiver.IsClosure()) { |
2744 | // Special case: closures are implemented with a call getter instead of a |
2745 | // call method and with lazy dispatchers the field-invocation-dispatcher |
2746 | // would perform the closure call. |
2747 | return DartEntry::InvokeClosure(thread, arguments: orig_arguments, |
2748 | arguments_descriptor: orig_arguments_desc); |
2749 | } |
2750 | |
2751 | // Dynamic call sites have to use the dynamic getter as well (if it was |
2752 | // created). |
2753 | const auto& getter_name = |
2754 | String::Handle(zone, ptr: Field::GetterName(field_name: demangled_target_name)); |
2755 | const auto& dyn_getter_name = String::Handle( |
2756 | zone, ptr: is_dynamic_call |
2757 | ? Function::CreateDynamicInvocationForwarderName(name: getter_name) |
2758 | : getter_name.ptr()); |
2759 | ArgumentsDescriptor args_desc(orig_arguments_desc); |
2760 | while (!cls.IsNull()) { |
2761 | // If there is a function with the target name but mismatched arguments |
2762 | // we need to call `receiver.noSuchMethod()`. |
2763 | if (cls.EnsureIsFinalized(thread) == Error::null()) { |
2764 | function = Resolver::ResolveDynamicFunction(zone, receiver_class: cls, function_name: target_name); |
2765 | } |
2766 | if (!function.IsNull()) { |
2767 | ASSERT(!function.AreValidArguments(args_desc, nullptr)); |
2768 | break; // mismatch, invoke noSuchMethod |
2769 | } |
2770 | if (is_dynamic_call) { |
2771 | function = |
2772 | Resolver::ResolveDynamicFunction(zone, receiver_class: cls, function_name: demangled_target_name); |
2773 | if (!function.IsNull()) { |
2774 | ASSERT(!function.AreValidArguments(args_desc, nullptr)); |
2775 | break; // mismatch, invoke noSuchMethod |
2776 | } |
2777 | } |
2778 | |
2779 | // If there is a getter we need to call-through-getter. |
2780 | if (is_dynamic_call) { |
2781 | function = Resolver::ResolveDynamicFunction(zone, receiver_class: cls, function_name: dyn_getter_name); |
2782 | } |
2783 | if (function.IsNull()) { |
2784 | function = Resolver::ResolveDynamicFunction(zone, receiver_class: cls, function_name: getter_name); |
2785 | } |
2786 | if (!function.IsNull()) { |
2787 | const Array& getter_arguments = Array::Handle(ptr: Array::New(len: 1)); |
2788 | getter_arguments.SetAt(0, receiver); |
2789 | const Object& getter_result = Object::Handle( |
2790 | zone, ptr: DartEntry::InvokeFunction(function, arguments: getter_arguments)); |
2791 | if (getter_result.IsError()) { |
2792 | return getter_result.ptr(); |
2793 | } |
2794 | ASSERT(getter_result.IsNull() || getter_result.IsInstance()); |
2795 | |
2796 | orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result); |
2797 | return DartEntry::InvokeClosure(thread, arguments: orig_arguments, |
2798 | arguments_descriptor: orig_arguments_desc); |
2799 | } |
2800 | cls = cls.SuperClass(); |
2801 | } |
2802 | |
2803 | if (receiver.IsRecord()) { |
2804 | const Record& record = Record::Cast(obj: receiver); |
2805 | const intptr_t field_index = |
2806 | record.GetFieldIndexByName(thread, field_name: demangled_target_name); |
2807 | if (field_index >= 0) { |
2808 | const Object& getter_result = |
2809 | Object::Handle(zone, ptr: record.FieldAt(field_index)); |
2810 | ASSERT(getter_result.IsNull() || getter_result.IsInstance()); |
2811 | orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result); |
2812 | return DartEntry::InvokeClosure(thread, arguments: orig_arguments, |
2813 | arguments_descriptor: orig_arguments_desc); |
2814 | } |
2815 | } |
2816 | } |
2817 | |
2818 | const Object& result = Object::Handle( |
2819 | zone, |
2820 | ptr: DartEntry::InvokeNoSuchMethod(thread, receiver, target_name: demangled_target_name, |
2821 | arguments: orig_arguments, arguments_descriptor: orig_arguments_desc)); |
2822 | return result.ptr(); |
2823 | } |
2824 | |
2825 | // Invoke appropriate noSuchMethod or closure from getter. |
2826 | // Arg0: receiver |
2827 | // Arg1: ICData or MegamorphicCache |
2828 | // Arg2: arguments descriptor array |
2829 | // Arg3: arguments array |
2830 | DEFINE_RUNTIME_ENTRY(NoSuchMethodFromCallStub, 4) { |
2831 | ASSERT(!FLAG_lazy_dispatchers); |
2832 | const Instance& receiver = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
2833 | const Object& ic_data_or_cache = Object::Handle(zone, ptr: arguments.ArgAt(index: 1)); |
2834 | const Array& orig_arguments_desc = |
2835 | Array::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
2836 | const Array& orig_arguments = Array::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
2837 | String& target_name = String::Handle(zone); |
2838 | if (ic_data_or_cache.IsICData()) { |
2839 | target_name = ICData::Cast(obj: ic_data_or_cache).target_name(); |
2840 | } else { |
2841 | ASSERT(ic_data_or_cache.IsMegamorphicCache()); |
2842 | target_name = MegamorphicCache::Cast(obj: ic_data_or_cache).target_name(); |
2843 | } |
2844 | |
2845 | const auto& result = |
2846 | Object::Handle(zone, ptr: InvokeCallThroughGetterOrNoSuchMethod( |
2847 | thread, zone, receiver, target_name, |
2848 | orig_arguments, orig_arguments_desc)); |
2849 | ThrowIfError(result); |
2850 | arguments.SetReturn(result); |
2851 | } |
2852 | |
2853 | // Invoke appropriate noSuchMethod function. |
2854 | // Arg0: receiver |
2855 | // Arg1: function |
2856 | // Arg1: arguments descriptor array. |
2857 | // Arg3: arguments array. |
2858 | DEFINE_RUNTIME_ENTRY(NoSuchMethodFromPrologue, 4) { |
2859 | const Instance& receiver = Instance::CheckedHandle(zone, ptr: arguments.ArgAt(index: 0)); |
2860 | const Function& function = Function::CheckedHandle(zone, ptr: arguments.ArgAt(index: 1)); |
2861 | const Array& orig_arguments_desc = |
2862 | Array::CheckedHandle(zone, ptr: arguments.ArgAt(index: 2)); |
2863 | const Array& orig_arguments = Array::CheckedHandle(zone, ptr: arguments.ArgAt(index: 3)); |
2864 | |
2865 | String& orig_function_name = String::Handle(zone); |
2866 | if ((function.kind() == UntaggedFunction::kClosureFunction) || |
2867 | (function.kind() == UntaggedFunction::kImplicitClosureFunction)) { |
2868 | // For closure the function name is always 'call'. Replace it with the |
2869 | // name of the closurized function so that exception contains more |
2870 | // relevant information. |
2871 | orig_function_name = function.QualifiedUserVisibleName(); |
2872 | } else { |
2873 | orig_function_name = function.name(); |
2874 | } |
2875 | |
2876 | const Object& result = Object::Handle( |
2877 | zone, ptr: DartEntry::InvokeNoSuchMethod(thread, receiver, target_name: orig_function_name, |
2878 | arguments: orig_arguments, arguments_descriptor: orig_arguments_desc)); |
2879 | ThrowIfError(result); |
2880 | arguments.SetReturn(result); |
2881 | } |
2882 | |
2883 | #if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
2884 | // The following code is used to stress test |
2885 | // - deoptimization |
2886 | // - debugger stack tracing |
2887 | // - garbage collection |
2888 | // - hot reload |
2889 | static void HandleStackOverflowTestCases(Thread* thread) { |
2890 | auto isolate = thread->isolate(); |
2891 | auto isolate_group = thread->isolate_group(); |
2892 | |
2893 | if (FLAG_shared_slow_path_triggers_gc) { |
2894 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
2895 | } |
2896 | |
2897 | bool do_deopt = false; |
2898 | bool do_stacktrace = false; |
2899 | bool do_reload = false; |
2900 | bool do_gc = false; |
2901 | const intptr_t isolate_reload_every = |
2902 | isolate->group()->reload_every_n_stack_overflow_checks(); |
2903 | if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) || |
2904 | (FLAG_gc_every > 0) || (isolate_reload_every > 0)) { |
2905 | if (!Isolate::IsSystemIsolate(isolate)) { |
2906 | // TODO(turnidge): To make --deoptimize_every and |
2907 | // --stacktrace-every faster we could move this increment/test to |
2908 | // the generated code. |
2909 | int32_t count = thread->IncrementAndGetStackOverflowCount(); |
2910 | if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) { |
2911 | do_deopt = true; |
2912 | } |
2913 | if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) { |
2914 | do_stacktrace = true; |
2915 | } |
2916 | if (FLAG_gc_every > 0 && (count % FLAG_gc_every) == 0) { |
2917 | do_gc = true; |
2918 | } |
2919 | if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) { |
2920 | do_reload = isolate->group()->CanReload(); |
2921 | } |
2922 | } |
2923 | } |
2924 | if ((FLAG_deoptimize_filter != nullptr) || |
2925 | (FLAG_stacktrace_filter != nullptr) || (FLAG_reload_every != 0)) { |
2926 | DartFrameIterator iterator(thread, |
2927 | StackFrameIterator::kNoCrossThreadIteration); |
2928 | StackFrame* frame = iterator.NextFrame(); |
2929 | ASSERT(frame != nullptr); |
2930 | Code& code = Code::Handle(); |
2931 | Function& function = Function::Handle(); |
2932 | code = frame->LookupDartCode(); |
2933 | ASSERT(!code.IsNull()); |
2934 | function = code.function(); |
2935 | ASSERT(!function.IsNull()); |
2936 | const char* function_name = nullptr; |
2937 | if ((FLAG_deoptimize_filter != nullptr) || |
2938 | (FLAG_stacktrace_filter != nullptr)) { |
2939 | function_name = function.ToFullyQualifiedCString(); |
2940 | ASSERT(function_name != nullptr); |
2941 | } |
2942 | if (!code.IsNull()) { |
2943 | if (!code.is_optimized() && FLAG_reload_every_optimized) { |
2944 | // Don't do the reload if we aren't inside optimized code. |
2945 | do_reload = false; |
2946 | } |
2947 | if (code.is_optimized() && FLAG_deoptimize_filter != nullptr && |
2948 | strstr(function_name, FLAG_deoptimize_filter) != nullptr && |
2949 | !function.ForceOptimize()) { |
2950 | OS::PrintErr(format: "*** Forcing deoptimization (%s)\n" , |
2951 | function.ToFullyQualifiedCString()); |
2952 | do_deopt = true; |
2953 | } |
2954 | } |
2955 | if (FLAG_stacktrace_filter != nullptr && |
2956 | strstr(function_name, FLAG_stacktrace_filter) != nullptr) { |
2957 | OS::PrintErr(format: "*** Computing stacktrace (%s)\n" , |
2958 | function.ToFullyQualifiedCString()); |
2959 | do_stacktrace = true; |
2960 | } |
2961 | } |
2962 | if (do_deopt) { |
2963 | // TODO(turnidge): Consider using DeoptimizeAt instead. |
2964 | DeoptimizeFunctionsOnStack(); |
2965 | } |
2966 | if (do_reload) { |
2967 | // Maybe adjust the rate of future reloads. |
2968 | isolate_group->MaybeIncreaseReloadEveryNStackOverflowChecks(); |
2969 | |
2970 | // Issue a reload. |
2971 | const char* script_uri = isolate_group->source()->script_uri; |
2972 | JSONStream js; |
2973 | const bool success = |
2974 | isolate_group->ReloadSources(js: &js, /*force_reload=*/true, root_script_url: script_uri); |
2975 | if (!success) { |
2976 | FATAL("*** Isolate reload failed:\n%s\n" , js.ToCString()); |
2977 | } |
2978 | } |
2979 | if (do_stacktrace) { |
2980 | String& var_name = String::Handle(); |
2981 | Instance& var_value = Instance::Handle(); |
2982 | DebuggerStackTrace* stack = isolate->debugger()->StackTrace(); |
2983 | intptr_t num_frames = stack->Length(); |
2984 | for (intptr_t i = 0; i < num_frames; i++) { |
2985 | ActivationFrame* frame = stack->FrameAt(i); |
2986 | int num_vars = 0; |
2987 | // Variable locations and number are unknown when precompiling. |
2988 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2989 | if (!frame->function().ForceOptimize()) { |
2990 | // Ensure that we have unoptimized code. |
2991 | frame->function().EnsureHasCompiledUnoptimizedCode(); |
2992 | num_vars = frame->NumLocalVariables(); |
2993 | } |
2994 | #endif |
2995 | TokenPosition unused = TokenPosition::kNoSource; |
2996 | for (intptr_t v = 0; v < num_vars; v++) { |
2997 | frame->VariableAt(i: v, name: &var_name, declaration_token_pos: &unused, visible_start_token_pos: &unused, visible_end_token_pos: &unused, value: &var_value); |
2998 | } |
2999 | } |
3000 | if (FLAG_stress_async_stacks) { |
3001 | DebuggerStackTrace::CollectAsyncAwaiters(); |
3002 | } |
3003 | } |
3004 | if (do_gc) { |
3005 | isolate->group()->heap()->CollectAllGarbage(reason: GCReason::kDebugging); |
3006 | } |
3007 | } |
3008 | #endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
3009 | |
3010 | #if !defined(DART_PRECOMPILED_RUNTIME) |
3011 | static void HandleOSRRequest(Thread* thread) { |
3012 | auto isolate_group = thread->isolate_group(); |
3013 | ASSERT(isolate_group->use_osr()); |
3014 | DartFrameIterator iterator(thread, |
3015 | StackFrameIterator::kNoCrossThreadIteration); |
3016 | StackFrame* frame = iterator.NextFrame(); |
3017 | ASSERT(frame != nullptr); |
3018 | const Code& code = Code::ZoneHandle(ptr: frame->LookupDartCode()); |
3019 | ASSERT(!code.IsNull()); |
3020 | ASSERT(!code.is_optimized()); |
3021 | const Function& function = Function::Handle(ptr: code.function()); |
3022 | ASSERT(!function.IsNull()); |
3023 | |
3024 | // If the code of the frame does not match the function's unoptimized code, |
3025 | // we bail out since the code was reset by an isolate reload. |
3026 | if (code.ptr() |
---|