1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/object.h"
6
7#include <memory>
8
9#include "compiler/method_recognizer.h"
10#include "include/dart_api.h"
11#include "lib/integers.h"
12#include "lib/stacktrace.h"
13#include "platform/assert.h"
14#include "platform/text_buffer.h"
15#include "platform/unaligned.h"
16#include "platform/unicode.h"
17#include "vm/bit_vector.h"
18#include "vm/bootstrap.h"
19#include "vm/canonical_tables.h"
20#include "vm/class_finalizer.h"
21#include "vm/closure_functions_cache.h"
22#include "vm/code_comments.h"
23#include "vm/code_descriptors.h"
24#include "vm/code_observers.h"
25#include "vm/compiler/assembler/disassembler.h"
26#include "vm/compiler/jit/compiler.h"
27#include "vm/compiler/runtime_api.h"
28#include "vm/cpu.h"
29#include "vm/dart.h"
30#include "vm/dart_api_state.h"
31#include "vm/dart_entry.h"
32#include "vm/datastream.h"
33#include "vm/debugger.h"
34#include "vm/deopt_instructions.h"
35#include "vm/double_conversion.h"
36#include "vm/elf.h"
37#include "vm/exceptions.h"
38#include "vm/growable_array.h"
39#include "vm/hash.h"
40#include "vm/hash_table.h"
41#include "vm/heap/become.h"
42#include "vm/heap/heap.h"
43#include "vm/heap/sampler.h"
44#include "vm/heap/weak_code.h"
45#include "vm/image_snapshot.h"
46#include "vm/isolate_reload.h"
47#include "vm/kernel.h"
48#include "vm/kernel_binary.h"
49#include "vm/kernel_isolate.h"
50#include "vm/kernel_loader.h"
51#include "vm/log.h"
52#include "vm/native_symbol.h"
53#include "vm/object_graph.h"
54#include "vm/object_store.h"
55#include "vm/os.h"
56#include "vm/parser.h"
57#include "vm/profiler.h"
58#include "vm/regexp.h"
59#include "vm/resolver.h"
60#include "vm/reusable_handles.h"
61#include "vm/reverse_pc_lookup_cache.h"
62#include "vm/runtime_entry.h"
63#include "vm/scopes.h"
64#include "vm/stack_frame.h"
65#include "vm/stub_code.h"
66#include "vm/symbols.h"
67#include "vm/tags.h"
68#include "vm/thread_registry.h"
69#include "vm/timeline.h"
70#include "vm/type_testing_stubs.h"
71#include "vm/zone_text_buffer.h"
72
73#if !defined(DART_PRECOMPILED_RUNTIME)
74#include "vm/compiler/aot/precompiler.h"
75#include "vm/compiler/assembler/assembler.h"
76#include "vm/compiler/backend/code_statistics.h"
77#include "vm/compiler/compiler_state.h"
78#include "vm/compiler/frontend/kernel_fingerprints.h"
79#include "vm/compiler/frontend/kernel_translation_helper.h"
80#include "vm/compiler/intrinsifier.h"
81#endif // !defined(DART_PRECOMPILED_RUNTIME)
82
83namespace dart {
84
85DEFINE_FLAG(uint64_t,
86 huge_method_cutoff_in_code_size,
87 200000,
88 "Huge method cutoff in unoptimized code size (in bytes).");
89DEFINE_FLAG(
90 bool,
91 show_internal_names,
92 false,
93 "Show names of internal classes (e.g. \"OneByteString\") in error messages "
94 "instead of showing the corresponding interface names (e.g. \"String\"). "
95 "Also show legacy nullability in type names.");
96DEFINE_FLAG(bool, use_lib_cache, false, "Use library name cache");
97DEFINE_FLAG(bool, use_exp_cache, false, "Use library exported name cache");
98
99DEFINE_FLAG(bool,
100 remove_script_timestamps_for_test,
101 false,
102 "Remove script timestamps to allow for deterministic testing.");
103
104DECLARE_FLAG(bool, dual_map_code);
105DECLARE_FLAG(bool, intrinsify);
106DECLARE_FLAG(bool, trace_deoptimization);
107DECLARE_FLAG(bool, trace_deoptimization_verbose);
108DECLARE_FLAG(bool, trace_reload);
109DECLARE_FLAG(bool, write_protect_code);
110DECLARE_FLAG(bool, precompiled_mode);
111DECLARE_FLAG(int, max_polymorphic_checks);
112
113static const char* const kGetterPrefix = "get:";
114static const intptr_t kGetterPrefixLength = strlen(s: kGetterPrefix);
115static const char* const kSetterPrefix = "set:";
116static const intptr_t kSetterPrefixLength = strlen(s: kSetterPrefix);
117static const char* const kInitPrefix = "init:";
118static const intptr_t kInitPrefixLength = strlen(s: kInitPrefix);
119
120// A cache of VM heap allocated preinitialized empty ic data entry arrays.
121ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount];
122
123cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {};
124
125// These are initialized to a value that will force an illegal memory access if
126// they are being used.
127#if defined(RAW_NULL)
128#error RAW_NULL should not be defined.
129#endif
130#define RAW_NULL static_cast<uword>(kHeapObjectTag)
131
132#define CHECK_ERROR(error) \
133 { \
134 ErrorPtr err = (error); \
135 if (err != Error::null()) { \
136 return err; \
137 } \
138 }
139
140#define DEFINE_SHARED_READONLY_HANDLE(Type, name) \
141 Type* Object::name##_ = nullptr;
142SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE)
143#undef DEFINE_SHARED_READONLY_HANDLE
144
145ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL);
146BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL);
147BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL);
148ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL);
149ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
150ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL);
151ClassPtr Object::type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
152ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
153ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
154ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL);
155ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
156ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
157ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL);
158ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL);
159ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL);
160ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
161ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
162ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL);
163ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
164ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
165ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
166ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
167ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
168ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
169ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
170ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
171ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
172ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL);
173ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
174ClassPtr Object::sentinel_class_ = static_cast<ClassPtr>(RAW_NULL);
175ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
176ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
177ClassPtr Object::monomorphicsmiablecall_class_ =
178 static_cast<ClassPtr>(RAW_NULL);
179ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
180ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
181ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
182ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
183ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
184ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
185ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
186ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
187ClassPtr Object::weak_serialization_reference_class_ =
188 static_cast<ClassPtr>(RAW_NULL);
189ClassPtr Object::weak_array_class_ = static_cast<ClassPtr>(RAW_NULL);
190
191static void AppendSubString(BaseTextBuffer* buffer,
192 const char* name,
193 intptr_t start_pos,
194 intptr_t len) {
195 buffer->Printf(format: "%.*s", static_cast<int>(len), &name[start_pos]);
196}
197
198// Used to define setters and getters for untagged object fields that are
199// defined with the WSR_COMPRESSED_POINTER_FIELD macro. See
200// PRECOMPILER_WSR_FIELD_DECLARATION in object.h for more information.
201#if defined(DART_PRECOMPILER)
202#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
203 Type##Ptr Class::Name() const { \
204 return Type::RawCast(WeakSerializationReference::Unwrap(untag()->Name())); \
205 }
206#else
207#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
208 void Class::set_##Name(const Type& value) const { \
209 untag()->set_##Name(value.ptr()); \
210 }
211#endif
212
213PRECOMPILER_WSR_FIELD_DEFINITION(ClosureData, Function, parent_function)
214PRECOMPILER_WSR_FIELD_DEFINITION(Function, FunctionType, signature)
215
216#undef PRECOMPILER_WSR_FIELD_DEFINITION
217
218#if defined(_MSC_VER)
219#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
220 if (FLAG_trace_type_checks_verbose) { \
221 OS::PrintErr(format, __VA_ARGS__); \
222 }
223#else
224#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
225 if (FLAG_trace_type_checks_verbose) { \
226 OS::PrintErr(format, ##__VA_ARGS__); \
227 }
228#endif
229
230// Remove private keys, but retain getter/setter/constructor/mixin manglings.
231StringPtr String::RemovePrivateKey(const String& name) {
232 ASSERT(name.IsOneByteString());
233 GrowableArray<uint8_t> without_key(name.Length());
234 intptr_t i = 0;
235 while (i < name.Length()) {
236 while (i < name.Length()) {
237 uint8_t c = name.CharAt(index: i++);
238 if (c == '@') break;
239 without_key.Add(value: c);
240 }
241 while (i < name.Length()) {
242 uint8_t c = name.CharAt(index: i);
243 if ((c < '0') || (c > '9')) break;
244 i++;
245 }
246 }
247
248 return String::FromLatin1(latin1_array: without_key.data(), array_len: without_key.length());
249}
250
251// Takes a vm internal name and makes it suitable for external user.
252//
253// Examples:
254//
255// Internal getter and setter prefixes are changed:
256//
257// get:foo -> foo
258// set:foo -> foo=
259//
260// Private name mangling is removed, possibly multiple times:
261//
262// _ReceivePortImpl@709387912 -> _ReceivePortImpl
263// _ReceivePortImpl@709387912._internal@709387912 ->
264// _ReceivePortImpl._internal
265// _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F
266//
267// The trailing . on the default constructor name is dropped:
268//
269// List. -> List
270//
271// And so forth:
272//
273// get:foo@6328321 -> foo
274// _MyClass@6328321. -> _MyClass
275// _MyClass@6328321.named -> _MyClass.named
276//
277// For extension methods the following demangling is done
278// ext|func -> ext.func (instance extension method)
279// ext|get#prop -> ext.prop (instance extension getter)
280// ext|set#prop -> ext.prop= (instance extension setter)
281// ext|sfunc -> ext.sfunc (static extension method)
282// get:ext|sprop -> ext.sprop (static extension getter)
283// set:ext|sprop -> ext.sprop= (static extension setter)
284//
285const char* String::ScrubName(const String& name, bool is_extension) {
286 Thread* thread = Thread::Current();
287 NoSafepointScope no_safepoint(thread);
288 Zone* zone = thread->zone();
289 ZoneTextBuffer printer(zone);
290
291#if !defined(DART_PRECOMPILED_RUNTIME)
292 if (name.Equals(str: Symbols::TopLevel())) {
293 // Name of invisible top-level class.
294 return "";
295 }
296#endif // !defined(DART_PRECOMPILED_RUNTIME)
297
298 const char* cname = name.ToCString();
299 ASSERT(strlen(cname) == static_cast<size_t>(name.Length()));
300 const intptr_t name_len = name.Length();
301 // First remove all private name mangling and if 'is_extension' is true
302 // substitute the first '|' character with '.'.
303 intptr_t start_pos = 0;
304 intptr_t sum_segment_len = 0;
305 for (intptr_t i = 0; i < name_len; i++) {
306 if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') &&
307 (cname[i + 1] <= '9')) {
308 // Append the current segment to the unmangled name.
309 const intptr_t segment_len = i - start_pos;
310 sum_segment_len += segment_len;
311 AppendSubString(buffer: &printer, name: cname, start_pos, len: segment_len);
312 // Advance until past the name mangling. The private keys are only
313 // numbers so we skip until the first non-number.
314 i++; // Skip the '@'.
315 while ((i < name.Length()) && (name.CharAt(index: i) >= '0') &&
316 (name.CharAt(index: i) <= '9')) {
317 i++;
318 }
319 start_pos = i;
320 i--; // Account for for-loop increment.
321 } else if (is_extension && cname[i] == '|') {
322 // Append the current segment to the unmangled name.
323 const intptr_t segment_len = i - start_pos;
324 AppendSubString(buffer: &printer, name: cname, start_pos, len: segment_len);
325 // Append the '.' character (replaces '|' with '.').
326 AppendSubString(buffer: &printer, name: ".", start_pos: 0, len: 1);
327 start_pos = i + 1;
328 // Account for length of segments added so far.
329 sum_segment_len += (segment_len + 1);
330 }
331 }
332
333 const char* unmangled_name = nullptr;
334 if (start_pos == 0) {
335 // No name unmangling needed, reuse the name that was passed in.
336 unmangled_name = cname;
337 sum_segment_len = name_len;
338 } else if (name.Length() != start_pos) {
339 // Append the last segment.
340 const intptr_t segment_len = name.Length() - start_pos;
341 sum_segment_len += segment_len;
342 AppendSubString(buffer: &printer, name: cname, start_pos, len: segment_len);
343 }
344 if (unmangled_name == nullptr) {
345 // Merge unmangled_segments.
346 unmangled_name = printer.buffer();
347 }
348
349 printer.Clear();
350 intptr_t start = 0;
351 intptr_t len = sum_segment_len;
352 bool is_setter = false;
353 if (is_extension) {
354 // First scan till we see the '.' character.
355 for (intptr_t i = 0; i < len; i++) {
356 if (unmangled_name[i] == '.') {
357 intptr_t slen = i + 1;
358 intptr_t plen = slen - start;
359 AppendSubString(buffer: &printer, name: unmangled_name, start_pos: start, len: plen);
360 unmangled_name += slen;
361 len -= slen;
362 break;
363 } else if (unmangled_name[i] == ':') {
364 if (start != 0) {
365 // Reset and break.
366 start = 0;
367 is_setter = false;
368 break;
369 }
370 if (unmangled_name[0] == 's') {
371 is_setter = true;
372 }
373 start = i + 1;
374 }
375 }
376 }
377 intptr_t dot_pos = -1; // Position of '.' in the name, if any.
378 start = 0;
379 for (intptr_t i = start; i < len; i++) {
380 if (unmangled_name[i] == ':' ||
381 (is_extension && unmangled_name[i] == '#')) {
382 if (start != 0) {
383 // Reset and break.
384 start = 0;
385 dot_pos = -1;
386 break;
387 }
388 ASSERT(start == 0); // Only one : is possible in getters or setters.
389 if (unmangled_name[0] == 's') {
390 ASSERT(!is_setter);
391 is_setter = true;
392 }
393 start = i + 1;
394 } else if (unmangled_name[i] == '.') {
395 if (dot_pos != -1) {
396 // Reset and break.
397 start = 0;
398 dot_pos = -1;
399 break;
400 }
401 ASSERT(dot_pos == -1); // Only one dot is supported.
402 dot_pos = i;
403 }
404 }
405
406 if (!is_extension && (start == 0) && (dot_pos == -1)) {
407 // This unmangled_name is fine as it is.
408 return unmangled_name;
409 }
410
411 // Drop the trailing dot if needed.
412 intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len;
413
414 intptr_t substr_len = end - start;
415 AppendSubString(buffer: &printer, name: unmangled_name, start_pos: start, len: substr_len);
416 if (is_setter) {
417 const char* equals = Symbols::Equals().ToCString();
418 const intptr_t equals_len = strlen(s: equals);
419 AppendSubString(buffer: &printer, name: equals, start_pos: 0, len: equals_len);
420 }
421
422 return printer.buffer();
423}
424
425StringPtr String::ScrubNameRetainPrivate(const String& name,
426 bool is_extension) {
427#if !defined(DART_PRECOMPILED_RUNTIME)
428 intptr_t len = name.Length();
429 intptr_t start = 0;
430 intptr_t at_pos = -1; // Position of '@' in the name, if any.
431 bool is_setter = false;
432
433 String& result = String::Handle();
434
435 // If extension strip out the leading prefix e.g" ext|func would strip out
436 // 'ext|'.
437 if (is_extension) {
438 // First scan till we see the '|' character.
439 for (intptr_t i = 0; i < len; i++) {
440 if (name.CharAt(index: i) == '|') {
441 result = String::SubString(str: name, begin_index: start, length: (i - start));
442 result = String::Concat(str1: result, str2: Symbols::Dot());
443 start = i + 1;
444 break;
445 } else if (name.CharAt(index: i) == ':') {
446 if (start != 0) {
447 // Reset and break.
448 start = 0;
449 is_setter = false;
450 break;
451 }
452 if (name.CharAt(index: 0) == 's') {
453 is_setter = true;
454 }
455 start = i + 1;
456 }
457 }
458 }
459
460 for (intptr_t i = start; i < len; i++) {
461 if (name.CharAt(index: i) == ':' || (is_extension && name.CharAt(index: i) == '#')) {
462 // Only one : is possible in getters or setters.
463 ASSERT(is_extension || start == 0);
464 if (name.CharAt(index: start) == 's') {
465 is_setter = true;
466 }
467 start = i + 1;
468 } else if (name.CharAt(index: i) == '@') {
469 // Setters should have only one @ so we know where to put the =.
470 ASSERT(!is_setter || (at_pos == -1));
471 at_pos = i;
472 }
473 }
474
475 if (start == 0) {
476 // This unmangled_name is fine as it is.
477 return name.ptr();
478 }
479
480 if (is_extension) {
481 const String& fname =
482 String::Handle(ptr: String::SubString(str: name, begin_index: start, length: (len - start)));
483 result = String::Concat(str1: result, str2: fname);
484 } else {
485 result = String::SubString(str: name, begin_index: start, length: (len - start));
486 }
487
488 if (is_setter) {
489 // Setters need to end with '='.
490 if (at_pos == -1) {
491 return String::Concat(str1: result, str2: Symbols::Equals());
492 } else {
493 const String& pre_at =
494 String::Handle(ptr: String::SubString(str: result, begin_index: 0, length: at_pos - 4));
495 const String& post_at =
496 String::Handle(ptr: String::SubString(str: name, begin_index: at_pos, length: len - at_pos));
497 result = String::Concat(str1: pre_at, str2: Symbols::Equals());
498 result = String::Concat(str1: result, str2: post_at);
499 }
500 }
501
502 return result.ptr();
503#endif // !defined(DART_PRECOMPILED_RUNTIME)
504 return name.ptr(); // In AOT, return argument unchanged.
505}
506
507template <typename type>
508static bool IsSpecialCharacter(type value) {
509 return ((value == '"') || (value == '\n') || (value == '\f') ||
510 (value == '\b') || (value == '\t') || (value == '\v') ||
511 (value == '\r') || (value == '\\') || (value == '$'));
512}
513
514static inline bool IsAsciiNonprintable(int32_t c) {
515 return ((0 <= c) && (c < 32)) || (c == 127);
516}
517
518static int32_t EscapeOverhead(int32_t c) {
519 if (IsSpecialCharacter(value: c)) {
520 return 1; // 1 additional byte for the backslash.
521 } else if (IsAsciiNonprintable(c)) {
522 return 3; // 3 additional bytes to encode c as \x00.
523 }
524 return 0;
525}
526
527template <typename type>
528static type SpecialCharacter(type value) {
529 if (value == '"') {
530 return '"';
531 } else if (value == '\n') {
532 return 'n';
533 } else if (value == '\f') {
534 return 'f';
535 } else if (value == '\b') {
536 return 'b';
537 } else if (value == '\t') {
538 return 't';
539 } else if (value == '\v') {
540 return 'v';
541 } else if (value == '\r') {
542 return 'r';
543 } else if (value == '\\') {
544 return '\\';
545 } else if (value == '$') {
546 return '$';
547 }
548 UNREACHABLE();
549 return '\0';
550}
551
552void Object::InitNullAndBool(IsolateGroup* isolate_group) {
553 // Should only be run by the vm isolate.
554 ASSERT(isolate_group == Dart::vm_isolate_group());
555 Thread* thread = Thread::Current();
556 auto heap = isolate_group->heap();
557
558 // TODO(iposva): NoSafepointScope needs to be added here.
559 ASSERT(class_class() == null_);
560
561 // Allocate and initialize the null instance.
562 // 'null_' must be the first object allocated as it is used in allocation to
563 // clear the pointer fields of objects.
564 {
565 uword address =
566 heap->Allocate(thread, size: Instance::InstanceSize(), space: Heap::kOld);
567 null_ = static_cast<InstancePtr>(address + kHeapObjectTag);
568 InitializeObjectVariant<Instance>(address, class_id: kNullCid);
569 null_->untag()->SetCanonical();
570 }
571
572 // Allocate and initialize the bool instances.
573 // These must be allocated such that at kBoolValueBitPosition, the address
574 // of true is 0 and the address of false is 1, and their addresses are
575 // otherwise identical.
576 {
577 // Allocate a dummy bool object to give true the desired alignment.
578 uword address = heap->Allocate(thread, size: Bool::InstanceSize(), space: Heap::kOld);
579 InitializeObject<Bool>(address);
580 static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false;
581 }
582 {
583 // Allocate true.
584 uword address = heap->Allocate(thread, size: Bool::InstanceSize(), space: Heap::kOld);
585 true_ = static_cast<BoolPtr>(address + kHeapObjectTag);
586 InitializeObject<Bool>(address);
587 true_->untag()->value_ = true;
588 true_->untag()->SetCanonical();
589 }
590 {
591 // Allocate false.
592 uword address = heap->Allocate(thread, size: Bool::InstanceSize(), space: Heap::kOld);
593 false_ = static_cast<BoolPtr>(address + kHeapObjectTag);
594 InitializeObject<Bool>(address);
595 false_->untag()->value_ = false;
596 false_->untag()->SetCanonical();
597 }
598
599 // Check that the objects have been allocated at appropriate addresses.
600 ASSERT(static_cast<uword>(true_) ==
601 static_cast<uword>(null_) + kTrueOffsetFromNull);
602 ASSERT(static_cast<uword>(false_) ==
603 static_cast<uword>(null_) + kFalseOffsetFromNull);
604 ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0);
605 ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0);
606 ASSERT(static_cast<uword>(false_) ==
607 (static_cast<uword>(true_) | kBoolValueMask));
608 ASSERT((static_cast<uword>(null_) & kBoolVsNullMask) == 0);
609 ASSERT((static_cast<uword>(true_) & kBoolVsNullMask) != 0);
610 ASSERT((static_cast<uword>(false_) & kBoolVsNullMask) != 0);
611}
612
613void Object::InitVtables() {
614 {
615 Object fake_handle;
616 builtin_vtables_[kObjectCid] = fake_handle.vtable();
617 }
618
619#define INIT_VTABLE(clazz) \
620 { \
621 clazz fake_handle; \
622 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
623 }
624 CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(INIT_VTABLE)
625 INIT_VTABLE(GrowableObjectArray)
626#undef INIT_VTABLE
627
628#define INIT_VTABLE(clazz) \
629 { \
630 Map fake_handle; \
631 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
632 }
633 CLASS_LIST_MAPS(INIT_VTABLE)
634#undef INIT_VTABLE
635
636#define INIT_VTABLE(clazz) \
637 { \
638 Set fake_handle; \
639 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
640 }
641 CLASS_LIST_SETS(INIT_VTABLE)
642#undef INIT_VTABLE
643
644#define INIT_VTABLE(clazz) \
645 { \
646 Array fake_handle; \
647 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
648 }
649 CLASS_LIST_FIXED_LENGTH_ARRAYS(INIT_VTABLE)
650#undef INIT_VTABLE
651
652#define INIT_VTABLE(clazz) \
653 { \
654 String fake_handle; \
655 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
656 }
657 CLASS_LIST_STRINGS(INIT_VTABLE)
658#undef INIT_VTABLE
659
660 {
661 Instance fake_handle;
662 builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable();
663 }
664
665#define INIT_VTABLE(clazz) \
666 { \
667 Instance fake_handle; \
668 builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \
669 }
670 CLASS_LIST_FFI_TYPE_MARKER(INIT_VTABLE)
671#undef INIT_VTABLE
672
673 {
674 Instance fake_handle;
675 builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable();
676 }
677
678 {
679 Pointer fake_handle;
680 builtin_vtables_[kPointerCid] = fake_handle.vtable();
681 }
682
683 {
684 DynamicLibrary fake_handle;
685 builtin_vtables_[kDynamicLibraryCid] = fake_handle.vtable();
686 }
687
688#define INIT_VTABLE(clazz) \
689 { \
690 TypedData fake_internal_handle; \
691 builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \
692 TypedDataView fake_view_handle; \
693 builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \
694 builtin_vtables_[kUnmodifiableTypedData##clazz##ViewCid] = \
695 fake_view_handle.vtable(); \
696 ExternalTypedData fake_external_handle; \
697 builtin_vtables_[kExternalTypedData##clazz##Cid] = \
698 fake_external_handle.vtable(); \
699 }
700 CLASS_LIST_TYPED_DATA(INIT_VTABLE)
701#undef INIT_VTABLE
702
703 {
704 TypedDataView fake_handle;
705 builtin_vtables_[kByteDataViewCid] = fake_handle.vtable();
706 builtin_vtables_[kUnmodifiableByteDataViewCid] = fake_handle.vtable();
707 }
708
709 {
710 Instance fake_handle;
711 builtin_vtables_[kByteBufferCid] = fake_handle.vtable();
712 builtin_vtables_[kNullCid] = fake_handle.vtable();
713 builtin_vtables_[kDynamicCid] = fake_handle.vtable();
714 builtin_vtables_[kVoidCid] = fake_handle.vtable();
715 builtin_vtables_[kNeverCid] = fake_handle.vtable();
716 }
717}
718
719void Object::Init(IsolateGroup* isolate_group) {
720 // Should only be run by the vm isolate.
721 ASSERT(isolate_group == Dart::vm_isolate_group());
722 Heap* heap = isolate_group->heap();
723 Thread* thread = Thread::Current();
724 ASSERT(thread != nullptr);
725 // Ensure lock checks in setters are happy.
726 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
727
728 InitVtables();
729
730// Allocate the read only object handles here.
731#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
732 name##_ = Type::ReadOnlyHandle();
733 SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE)
734#undef INITIALIZE_SHARED_READONLY_HANDLE
735
736 *null_object_ = Object::null();
737 *null_class_ = Class::null();
738 *null_array_ = Array::null();
739 *null_string_ = String::null();
740 *null_instance_ = Instance::null();
741 *null_function_ = Function::null();
742 *null_function_type_ = FunctionType::null();
743 *null_record_type_ = RecordType::null();
744 *null_type_arguments_ = TypeArguments::null();
745 *null_closure_ = Closure::null();
746 *empty_type_arguments_ = TypeArguments::null();
747 *null_abstract_type_ = AbstractType::null();
748 *null_compressed_stackmaps_ = CompressedStackMaps::null();
749 *bool_true_ = true_;
750 *bool_false_ = false_;
751
752 // Initialize the empty array and empty instantiations cache array handles to
753 // null_ in order to be able to check if the empty and zero arrays were
754 // allocated (RAW_NULL is not available).
755 *empty_array_ = Array::null();
756 *empty_instantiations_cache_array_ = Array::null();
757 *empty_subtype_test_cache_array_ = Array::null();
758
759 Class& cls = Class::Handle();
760
761 // Allocate and initialize the class class.
762 {
763 intptr_t size = Class::InstanceSize();
764 uword address = heap->Allocate(thread, size, space: Heap::kOld);
765 class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag);
766 InitializeObject<Class>(address);
767
768 Class fake;
769 // Initialization from Class::New<Class>.
770 // Directly set ptr_ to break a circular dependency: SetRaw will attempt
771 // to lookup class class in the class table where it is not registered yet.
772 cls.ptr_ = class_class_;
773 ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
774 cls.set_instance_size(
775 host_value_in_bytes: Class::InstanceSize(),
776 target_value_in_bytes: compiler::target::RoundedAllocationSize(size: RTN::Class::InstanceSize()));
777 const intptr_t host_next_field_offset = Class::NextFieldOffset();
778 const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset();
779 cls.set_next_field_offset(host_value_in_bytes: host_next_field_offset, target_value_in_bytes: target_next_field_offset);
780 cls.set_id(Class::kClassId);
781 cls.set_state_bits(0);
782 cls.set_is_allocate_finalized();
783 cls.set_is_declaration_loaded();
784 cls.set_is_type_finalized();
785 cls.set_type_arguments_field_offset_in_words(host_value: Class::kNoTypeArguments,
786 target_value: RTN::Class::kNoTypeArguments);
787 cls.set_num_type_arguments_unsafe(0);
788 cls.set_num_native_fields(0);
789 cls.InitEmptyFields();
790 isolate_group->class_table()->Register(cls);
791 }
792
793 // Allocate and initialize the null class.
794 cls = Class::New<Instance, RTN::Instance>(index: kNullCid, isolate_group);
795 cls.set_num_type_arguments_unsafe(0);
796 isolate_group->object_store()->set_null_class(cls);
797
798 // Allocate and initialize Never class.
799 cls = Class::New<Instance, RTN::Instance>(index: kNeverCid, isolate_group);
800 cls.set_num_type_arguments_unsafe(0);
801 cls.set_is_allocate_finalized();
802 cls.set_is_declaration_loaded();
803 cls.set_is_type_finalized();
804 isolate_group->object_store()->set_never_class(cls);
805
806 // Allocate and initialize the free list element class.
807 cls = Class::New<FreeListElement::FakeInstance,
808 RTN::FreeListElement::FakeInstance>(index: kFreeListElement,
809 isolate_group);
810 cls.set_num_type_arguments_unsafe(0);
811 cls.set_is_allocate_finalized();
812 cls.set_is_declaration_loaded();
813 cls.set_is_type_finalized();
814
815 // Allocate and initialize the forwarding corpse class.
816 cls = Class::New<ForwardingCorpse::FakeInstance,
817 RTN::ForwardingCorpse::FakeInstance>(index: kForwardingCorpse,
818 isolate_group);
819 cls.set_num_type_arguments_unsafe(0);
820 cls.set_is_allocate_finalized();
821 cls.set_is_declaration_loaded();
822 cls.set_is_type_finalized();
823
824 // Allocate and initialize Sentinel class.
825 cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group);
826 sentinel_class_ = cls.ptr();
827
828 // Allocate and initialize the sentinel values.
829 {
830 *sentinel_ ^= Sentinel::New();
831 *transition_sentinel_ ^= Sentinel::New();
832 }
833
834 // Allocate and initialize optimizing compiler constants.
835 {
836 *unknown_constant_ ^= Sentinel::New();
837 *non_constant_ ^= Sentinel::New();
838 *optimized_out_ ^= Sentinel::New();
839 }
840
841 // Allocate the remaining VM internal classes.
842 cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group);
843 type_parameters_class_ = cls.ptr();
844
845 cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group);
846 type_arguments_class_ = cls.ptr();
847
848 cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group);
849 patch_class_class_ = cls.ptr();
850
851 cls = Class::New<Function, RTN::Function>(isolate_group);
852 function_class_ = cls.ptr();
853
854 cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group);
855 closure_data_class_ = cls.ptr();
856
857 cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group);
858 ffi_trampoline_data_class_ = cls.ptr();
859
860 cls = Class::New<Field, RTN::Field>(isolate_group);
861 field_class_ = cls.ptr();
862
863 cls = Class::New<Script, RTN::Script>(isolate_group);
864 script_class_ = cls.ptr();
865
866 cls = Class::New<Library, RTN::Library>(isolate_group);
867 library_class_ = cls.ptr();
868
869 cls = Class::New<Namespace, RTN::Namespace>(isolate_group);
870 namespace_class_ = cls.ptr();
871
872 cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group);
873 kernel_program_info_class_ = cls.ptr();
874
875 cls = Class::New<Code, RTN::Code>(isolate_group);
876 code_class_ = cls.ptr();
877
878 cls = Class::New<Instructions, RTN::Instructions>(isolate_group);
879 instructions_class_ = cls.ptr();
880
881 cls =
882 Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
883 instructions_section_class_ = cls.ptr();
884
885 cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group);
886 instructions_table_class_ = cls.ptr();
887
888 cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
889 object_pool_class_ = cls.ptr();
890
891 cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group);
892 pc_descriptors_class_ = cls.ptr();
893
894 cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group);
895 code_source_map_class_ = cls.ptr();
896
897 cls =
898 Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group);
899 compressed_stackmaps_class_ = cls.ptr();
900
901 cls =
902 Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group);
903 var_descriptors_class_ = cls.ptr();
904
905 cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group);
906 exception_handlers_class_ = cls.ptr();
907
908 cls = Class::New<Context, RTN::Context>(isolate_group);
909 context_class_ = cls.ptr();
910
911 cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group);
912 context_scope_class_ = cls.ptr();
913
914 cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group);
915 singletargetcache_class_ = cls.ptr();
916
917 cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group);
918 unlinkedcall_class_ = cls.ptr();
919
920 cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(
921 isolate_group);
922 monomorphicsmiablecall_class_ = cls.ptr();
923
924 cls = Class::New<ICData, RTN::ICData>(isolate_group);
925 icdata_class_ = cls.ptr();
926
927 cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group);
928 megamorphic_cache_class_ = cls.ptr();
929
930 cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group);
931 subtypetestcache_class_ = cls.ptr();
932
933 cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group);
934 loadingunit_class_ = cls.ptr();
935
936 cls = Class::New<ApiError, RTN::ApiError>(isolate_group);
937 api_error_class_ = cls.ptr();
938
939 cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group);
940 language_error_class_ = cls.ptr();
941
942 cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group);
943 unhandled_exception_class_ = cls.ptr();
944
945 cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group);
946 unwind_error_class_ = cls.ptr();
947
948 cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
949 isolate_group);
950 weak_serialization_reference_class_ = cls.ptr();
951
952 cls = Class::New<WeakArray, RTN::WeakArray>(isolate_group);
953 weak_array_class_ = cls.ptr();
954
955 ASSERT(class_class() != null_);
956
957 // Pre-allocate classes in the vm isolate so that we can for example create a
958 // symbol table and populate it with some frequently used strings as symbols.
959 cls = Class::New<Array, RTN::Array>(isolate_group);
960 isolate_group->object_store()->set_array_class(cls);
961 cls.set_type_arguments_field_offset(host_value_in_bytes: Array::type_arguments_offset(),
962 target_value_in_bytes: RTN::Array::type_arguments_offset());
963 cls.set_num_type_arguments_unsafe(1);
964 cls = Class::New<Array, RTN::Array>(index: kImmutableArrayCid, isolate_group);
965 isolate_group->object_store()->set_immutable_array_class(cls);
966 cls.set_type_arguments_field_offset(host_value_in_bytes: Array::type_arguments_offset(),
967 target_value_in_bytes: RTN::Array::type_arguments_offset());
968 cls.set_num_type_arguments_unsafe(1);
969 // In order to be able to canonicalize arguments descriptors early.
970 cls.set_is_prefinalized();
971 cls =
972 Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group);
973 isolate_group->object_store()->set_growable_object_array_class(cls);
974 cls.set_type_arguments_field_offset(
975 host_value_in_bytes: GrowableObjectArray::type_arguments_offset(),
976 target_value_in_bytes: RTN::GrowableObjectArray::type_arguments_offset());
977 cls.set_num_type_arguments_unsafe(1);
978 cls = Class::NewStringClass(class_id: kOneByteStringCid, isolate_group);
979 isolate_group->object_store()->set_one_byte_string_class(cls);
980 cls = Class::NewStringClass(class_id: kTwoByteStringCid, isolate_group);
981 isolate_group->object_store()->set_two_byte_string_class(cls);
982 cls = Class::New<Mint, RTN::Mint>(isolate_group);
983 isolate_group->object_store()->set_mint_class(cls);
984 cls = Class::New<Double, RTN::Double>(isolate_group);
985 isolate_group->object_store()->set_double_class(cls);
986 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
987 isolate_group->object_store()->set_float32x4_class(cls);
988 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
989 isolate_group->object_store()->set_float64x2_class(cls);
990 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
991 isolate_group->object_store()->set_int32x4_class(cls);
992
993 // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
994 // need it when reading in the token stream of bootstrap classes in the VM
995 // isolate.
996 Class::NewExternalTypedDataClass(class_id: kExternalTypedDataUint8ArrayCid,
997 isolate: isolate_group);
998
999 // Needed for object pools of VM isolate stubs.
1000 Class::NewTypedDataClass(class_id: kTypedDataInt8ArrayCid, isolate_group);
1001
1002 // Allocate and initialize the empty_array instance.
1003 {
1004 uword address = heap->Allocate(thread, size: Array::InstanceSize(len: 0), space: Heap::kOld);
1005 InitializeObjectVariant<Array>(address, class_id: kImmutableArrayCid, elements: 0);
1006 Array::initializeHandle(obj: empty_array_,
1007 ptr: static_cast<ArrayPtr>(address + kHeapObjectTag));
1008 empty_array_->untag()->set_length(Smi::New(value: 0));
1009 empty_array_->SetCanonical();
1010 }
1011
1012 Smi& smi = Smi::Handle();
1013 // Allocate and initialize the empty instantiations cache array instance,
1014 // which contains metadata as the first element and a sentinel value
1015 // at the start of the first entry.
1016 {
1017 const intptr_t array_size =
1018 TypeArguments::Cache::kHeaderSize + TypeArguments::Cache::kEntrySize;
1019 uword address =
1020 heap->Allocate(thread, size: Array::InstanceSize(len: array_size), space: Heap::kOld);
1021 InitializeObjectVariant<Array>(address, class_id: kImmutableArrayCid, elements: array_size);
1022 Array::initializeHandle(obj: empty_instantiations_cache_array_,
1023 ptr: static_cast<ArrayPtr>(address + kHeapObjectTag));
1024 empty_instantiations_cache_array_->untag()->set_length(
1025 Smi::New(value: array_size));
1026 // The empty cache has no occupied entries and is not a hash-based cache.
1027 smi = Smi::New(value: 0);
1028 empty_instantiations_cache_array_->SetAt(
1029 index: TypeArguments::Cache::kMetadataIndex, value: smi);
1030 // Make the first (and only) entry unoccupied by setting its first element
1031 // to the sentinel value.
1032 smi = TypeArguments::Cache::Sentinel();
1033 InstantiationsCacheTable table(*empty_instantiations_cache_array_);
1034 table.At(i: 0).Set<TypeArguments::Cache::kSentinelIndex>(smi);
1035 // The other contents of the array are immaterial.
1036 empty_instantiations_cache_array_->SetCanonical();
1037 }
1038
1039 // Allocate and initialize the empty subtype test cache array instance,
1040 // which contains a single unoccupied entry.
1041 {
1042 const intptr_t array_size = SubtypeTestCache::kTestEntryLength;
1043 uword address =
1044 heap->Allocate(thread, size: Array::InstanceSize(len: array_size), space: Heap::kOld);
1045 InitializeObjectVariant<Array>(address, class_id: kImmutableArrayCid, elements: array_size);
1046 Array::initializeHandle(obj: empty_subtype_test_cache_array_,
1047 ptr: static_cast<ArrayPtr>(address + kHeapObjectTag));
1048 empty_subtype_test_cache_array_->untag()->set_length(Smi::New(value: array_size));
1049 // Make the first (and only) entry unoccupied by setting its first element
1050 // to the null value.
1051 empty_subtype_test_cache_array_->SetAt(
1052 index: SubtypeTestCache::kInstanceCidOrSignature, value: Object::null_object());
1053 smi = TypeArguments::Cache::Sentinel();
1054 SubtypeTestCacheTable table(*empty_subtype_test_cache_array_);
1055 table.At(i: 0).Set<SubtypeTestCache::kInstanceCidOrSignature>(
1056 Object::null_object());
1057 // The other contents of the array are immaterial.
1058 empty_subtype_test_cache_array_->SetCanonical();
1059 }
1060
1061 // Allocate and initialize the canonical empty context scope object.
1062 {
1063 uword address =
1064 heap->Allocate(thread, size: ContextScope::InstanceSize(len: 0), space: Heap::kOld);
1065 InitializeObject<ContextScope>(address, elements: 0);
1066 ContextScope::initializeHandle(
1067 obj: empty_context_scope_,
1068 ptr: static_cast<ContextScopePtr>(address + kHeapObjectTag));
1069 empty_context_scope_->StoreNonPointer(
1070 addr: &empty_context_scope_->untag()->num_variables_, value: 0);
1071 empty_context_scope_->StoreNonPointer(
1072 addr: &empty_context_scope_->untag()->is_implicit_, value: true);
1073 empty_context_scope_->SetCanonical();
1074 }
1075
1076 // Allocate and initialize the canonical empty object pool object.
1077 {
1078 uword address =
1079 heap->Allocate(thread, size: ObjectPool::InstanceSize(len: 0), space: Heap::kOld);
1080 InitializeObject<ObjectPool>(address, elements: 0);
1081 ObjectPool::initializeHandle(
1082 obj: empty_object_pool_,
1083 ptr: static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
1084 empty_object_pool_->StoreNonPointer(addr: &empty_object_pool_->untag()->length_,
1085 value: 0);
1086 empty_object_pool_->SetCanonical();
1087 }
1088
1089 // Allocate and initialize the empty_compressed_stackmaps instance.
1090 {
1091 const intptr_t instance_size = CompressedStackMaps::InstanceSize(length: 0);
1092 uword address = heap->Allocate(thread, size: instance_size, space: Heap::kOld);
1093 InitializeObject<CompressedStackMaps>(address, elements: 0);
1094 CompressedStackMaps::initializeHandle(
1095 obj: empty_compressed_stackmaps_,
1096 ptr: static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
1097 empty_compressed_stackmaps_->untag()->payload()->set_flags_and_size(0);
1098 empty_compressed_stackmaps_->SetCanonical();
1099 }
1100
1101 // Allocate and initialize the empty_descriptors instance.
1102 {
1103 uword address =
1104 heap->Allocate(thread, size: PcDescriptors::InstanceSize(len: 0), space: Heap::kOld);
1105 InitializeObject<PcDescriptors>(address, elements: 0);
1106 PcDescriptors::initializeHandle(
1107 obj: empty_descriptors_,
1108 ptr: static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
1109 empty_descriptors_->StoreNonPointer(addr: &empty_descriptors_->untag()->length_,
1110 value: 0);
1111 empty_descriptors_->SetCanonical();
1112 }
1113
1114 // Allocate and initialize the canonical empty variable descriptor object.
1115 {
1116 uword address = heap->Allocate(thread, size: LocalVarDescriptors::InstanceSize(len: 0),
1117 space: Heap::kOld);
1118 InitializeObject<LocalVarDescriptors>(address, elements: 0);
1119 LocalVarDescriptors::initializeHandle(
1120 obj: empty_var_descriptors_,
1121 ptr: static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
1122 empty_var_descriptors_->StoreNonPointer(
1123 addr: &empty_var_descriptors_->untag()->num_entries_, value: 0);
1124 empty_var_descriptors_->SetCanonical();
1125 }
1126
1127 // Allocate and initialize the canonical empty exception handler info object.
1128 // The vast majority of all functions do not contain an exception handler
1129 // and can share this canonical descriptor.
1130 {
1131 uword address =
1132 heap->Allocate(thread, size: ExceptionHandlers::InstanceSize(len: 0), space: Heap::kOld);
1133 InitializeObject<ExceptionHandlers>(address, elements: 0);
1134 ExceptionHandlers::initializeHandle(
1135 obj: empty_exception_handlers_,
1136 ptr: static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1137 empty_exception_handlers_->StoreNonPointer(
1138 addr: &empty_exception_handlers_->untag()->packed_fields_, value: 0);
1139 empty_exception_handlers_->SetCanonical();
1140 }
1141
1142 // Empty exception handlers for async/async* functions.
1143 {
1144 uword address =
1145 heap->Allocate(thread, size: ExceptionHandlers::InstanceSize(len: 0), space: Heap::kOld);
1146 InitializeObject<ExceptionHandlers>(address, elements: 0);
1147 ExceptionHandlers::initializeHandle(
1148 obj: empty_async_exception_handlers_,
1149 ptr: static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1150 empty_async_exception_handlers_->StoreNonPointer(
1151 addr: &empty_async_exception_handlers_->untag()->packed_fields_,
1152 value: UntaggedExceptionHandlers::AsyncHandlerBit::update(value: true, original: 0));
1153 empty_async_exception_handlers_->SetCanonical();
1154 }
1155
1156 // Allocate and initialize the canonical empty type arguments object.
1157 {
1158 uword address =
1159 heap->Allocate(thread, size: TypeArguments::InstanceSize(len: 0), space: Heap::kOld);
1160 InitializeObject<TypeArguments>(address, elements: 0);
1161 TypeArguments::initializeHandle(
1162 obj: empty_type_arguments_,
1163 ptr: static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
1164 empty_type_arguments_->untag()->set_length(Smi::New(value: 0));
1165 empty_type_arguments_->untag()->set_hash(Smi::New(value: 0));
1166 empty_type_arguments_->ComputeHash();
1167 empty_type_arguments_->SetCanonical();
1168 }
1169
1170 // The VM isolate snapshot object table is initialized to an empty array
1171 // as we do not have any VM isolate snapshot at this time.
1172 *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr();
1173
1174 cls = Class::New<Instance, RTN::Instance>(index: kDynamicCid, isolate_group);
1175 cls.set_is_abstract();
1176 cls.set_num_type_arguments_unsafe(0);
1177 cls.set_is_allocate_finalized();
1178 cls.set_is_declaration_loaded();
1179 cls.set_is_type_finalized();
1180 dynamic_class_ = cls.ptr();
1181
1182 cls = Class::New<Instance, RTN::Instance>(index: kVoidCid, isolate_group);
1183 cls.set_num_type_arguments_unsafe(0);
1184 cls.set_is_allocate_finalized();
1185 cls.set_is_declaration_loaded();
1186 cls.set_is_type_finalized();
1187 void_class_ = cls.ptr();
1188
1189 cls = Class::New<Type, RTN::Type>(isolate_group);
1190 cls.set_is_allocate_finalized();
1191 cls.set_is_declaration_loaded();
1192 cls.set_is_type_finalized();
1193
1194 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
1195 cls.set_is_allocate_finalized();
1196 cls.set_is_declaration_loaded();
1197 cls.set_is_type_finalized();
1198
1199 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
1200 cls.set_is_allocate_finalized();
1201 cls.set_is_declaration_loaded();
1202 cls.set_is_type_finalized();
1203
1204 cls = dynamic_class_;
1205 *dynamic_type_ =
1206 Type::New(clazz: cls, arguments: Object::null_type_arguments(), nullability: Nullability::kNullable);
1207 dynamic_type_->SetIsFinalized();
1208 dynamic_type_->ComputeHash();
1209 dynamic_type_->SetCanonical();
1210
1211 cls = void_class_;
1212 *void_type_ =
1213 Type::New(clazz: cls, arguments: Object::null_type_arguments(), nullability: Nullability::kNullable);
1214 void_type_->SetIsFinalized();
1215 void_type_->ComputeHash();
1216 void_type_->SetCanonical();
1217
1218 // Since TypeArguments objects are passed as function arguments, make them
1219 // behave as Dart instances, although they are just VM objects.
1220 // Note that we cannot set the super type to ObjectType, which does not live
1221 // in the vm isolate. See special handling in Class::SuperClass().
1222 cls = type_arguments_class_;
1223 cls.set_interfaces(Object::empty_array());
1224 cls.SetFields(Object::empty_array());
1225 cls.SetFunctions(Object::empty_array());
1226
1227 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1228 isolate_group->object_store()->set_bool_class(cls);
1229
1230 *smi_illegal_cid_ = Smi::New(value: kIllegalCid);
1231 *smi_zero_ = Smi::New(value: 0);
1232
1233 String& error_str = String::Handle();
1234 error_str = String::New(
1235 cstr: "Callbacks into the Dart VM are currently prohibited. Either there are "
1236 "outstanding pointers from Dart_TypedDataAcquireData that have not been "
1237 "released with Dart_TypedDataReleaseData, or a finalizer is running.",
1238 space: Heap::kOld);
1239 *no_callbacks_error_ = ApiError::New(message: error_str, space: Heap::kOld);
1240 error_str = String::New(
1241 cstr: "No api calls are allowed while unwind is in progress", space: Heap::kOld);
1242 *unwind_in_progress_error_ = UnwindError::New(message: error_str, space: Heap::kOld);
1243 error_str = String::New(cstr: "SnapshotWriter Error", space: Heap::kOld);
1244 *snapshot_writer_error_ =
1245 LanguageError::New(formatted_message: error_str, kind: Report::kError, space: Heap::kOld);
1246 error_str = String::New(cstr: "Branch offset overflow", space: Heap::kOld);
1247 *branch_offset_error_ =
1248 LanguageError::New(formatted_message: error_str, kind: Report::kBailout, space: Heap::kOld);
1249 error_str = String::New(cstr: "Speculative inlining failed", space: Heap::kOld);
1250 *speculative_inlining_error_ =
1251 LanguageError::New(formatted_message: error_str, kind: Report::kBailout, space: Heap::kOld);
1252 error_str = String::New(cstr: "Background Compilation Failed", space: Heap::kOld);
1253 *background_compilation_error_ =
1254 LanguageError::New(formatted_message: error_str, kind: Report::kBailout, space: Heap::kOld);
1255 error_str = String::New(cstr: "Out of memory", space: Heap::kOld);
1256 *out_of_memory_error_ =
1257 LanguageError::New(formatted_message: error_str, kind: Report::kError, space: Heap::kOld);
1258
1259 // Allocate the parameter types and names for synthetic getters.
1260 *synthetic_getter_parameter_types_ = Array::New(len: 1, space: Heap::kOld);
1261 synthetic_getter_parameter_types_->SetAt(index: 0, value: Object::dynamic_type());
1262 *synthetic_getter_parameter_names_ = Array::New(len: 1, space: Heap::kOld);
1263 // Fill in synthetic_getter_parameter_names_ later, after symbols are
1264 // initialized (in Object::FinalizeVMIsolate).
1265 // synthetic_getter_parameter_names_ object needs to be created earlier as
1266 // VM isolate snapshot reader references it before Object::FinalizeVMIsolate.
1267
1268 // Some thread fields need to be reinitialized as null constants have not been
1269 // initialized until now.
1270 thread->ClearStickyError();
1271
1272 ASSERT(!null_object_->IsSmi());
1273 ASSERT(!null_class_->IsSmi());
1274 ASSERT(null_class_->IsClass());
1275 ASSERT(!null_array_->IsSmi());
1276 ASSERT(null_array_->IsArray());
1277 ASSERT(!null_string_->IsSmi());
1278 ASSERT(null_string_->IsString());
1279 ASSERT(!null_instance_->IsSmi());
1280 ASSERT(null_instance_->IsInstance());
1281 ASSERT(!null_function_->IsSmi());
1282 ASSERT(null_function_->IsFunction());
1283 ASSERT(!null_function_type_->IsSmi());
1284 ASSERT(null_function_type_->IsFunctionType());
1285 ASSERT(!null_record_type_->IsSmi());
1286 ASSERT(null_record_type_->IsRecordType());
1287 ASSERT(!null_type_arguments_->IsSmi());
1288 ASSERT(null_type_arguments_->IsTypeArguments());
1289 ASSERT(!null_compressed_stackmaps_->IsSmi());
1290 ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps());
1291 ASSERT(!empty_array_->IsSmi());
1292 ASSERT(empty_array_->IsArray());
1293 ASSERT(!empty_instantiations_cache_array_->IsSmi());
1294 ASSERT(empty_instantiations_cache_array_->IsArray());
1295 ASSERT(!empty_subtype_test_cache_array_->IsSmi());
1296 ASSERT(empty_subtype_test_cache_array_->IsArray());
1297 ASSERT(!empty_type_arguments_->IsSmi());
1298 ASSERT(empty_type_arguments_->IsTypeArguments());
1299 ASSERT(!empty_context_scope_->IsSmi());
1300 ASSERT(empty_context_scope_->IsContextScope());
1301 ASSERT(!empty_compressed_stackmaps_->IsSmi());
1302 ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps());
1303 ASSERT(!empty_descriptors_->IsSmi());
1304 ASSERT(empty_descriptors_->IsPcDescriptors());
1305 ASSERT(!empty_var_descriptors_->IsSmi());
1306 ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
1307 ASSERT(!empty_exception_handlers_->IsSmi());
1308 ASSERT(empty_exception_handlers_->IsExceptionHandlers());
1309 ASSERT(!empty_async_exception_handlers_->IsSmi());
1310 ASSERT(empty_async_exception_handlers_->IsExceptionHandlers());
1311 ASSERT(!sentinel_->IsSmi());
1312 ASSERT(sentinel_->IsSentinel());
1313 ASSERT(!transition_sentinel_->IsSmi());
1314 ASSERT(transition_sentinel_->IsSentinel());
1315 ASSERT(!unknown_constant_->IsSmi());
1316 ASSERT(unknown_constant_->IsSentinel());
1317 ASSERT(!non_constant_->IsSmi());
1318 ASSERT(non_constant_->IsSentinel());
1319 ASSERT(!optimized_out_->IsSmi());
1320 ASSERT(optimized_out_->IsSentinel());
1321 ASSERT(!bool_true_->IsSmi());
1322 ASSERT(bool_true_->IsBool());
1323 ASSERT(!bool_false_->IsSmi());
1324 ASSERT(bool_false_->IsBool());
1325 ASSERT(smi_illegal_cid_->IsSmi());
1326 ASSERT(smi_zero_->IsSmi());
1327 ASSERT(!no_callbacks_error_->IsSmi());
1328 ASSERT(no_callbacks_error_->IsApiError());
1329 ASSERT(!unwind_in_progress_error_->IsSmi());
1330 ASSERT(unwind_in_progress_error_->IsUnwindError());
1331 ASSERT(!snapshot_writer_error_->IsSmi());
1332 ASSERT(snapshot_writer_error_->IsLanguageError());
1333 ASSERT(!branch_offset_error_->IsSmi());
1334 ASSERT(branch_offset_error_->IsLanguageError());
1335 ASSERT(!speculative_inlining_error_->IsSmi());
1336 ASSERT(speculative_inlining_error_->IsLanguageError());
1337 ASSERT(!background_compilation_error_->IsSmi());
1338 ASSERT(background_compilation_error_->IsLanguageError());
1339 ASSERT(!out_of_memory_error_->IsSmi());
1340 ASSERT(out_of_memory_error_->IsLanguageError());
1341 ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
1342 ASSERT(vm_isolate_snapshot_object_table_->IsArray());
1343 ASSERT(!synthetic_getter_parameter_types_->IsSmi());
1344 ASSERT(synthetic_getter_parameter_types_->IsArray());
1345 ASSERT(!synthetic_getter_parameter_names_->IsSmi());
1346 ASSERT(synthetic_getter_parameter_names_->IsArray());
1347}
1348
1349void Object::FinishInit(IsolateGroup* isolate_group) {
1350 // The type testing stubs we initialize in AbstractType objects for the
1351 // canonical type of kDynamicCid/kVoidCid need to be set in this
1352 // method, which is called after StubCode::InitOnce().
1353 Code& code = Code::Handle();
1354
1355 code = TypeTestingStubGenerator::DefaultCodeForType(type: *dynamic_type_);
1356 dynamic_type_->InitializeTypeTestingStubNonAtomic(stub: code);
1357
1358 code = TypeTestingStubGenerator::DefaultCodeForType(type: *void_type_);
1359 void_type_->InitializeTypeTestingStubNonAtomic(stub: code);
1360}
1361
1362void Object::Cleanup() {
1363 null_ = static_cast<ObjectPtr>(RAW_NULL);
1364 true_ = static_cast<BoolPtr>(RAW_NULL);
1365 false_ = static_cast<BoolPtr>(RAW_NULL);
1366 class_class_ = static_cast<ClassPtr>(RAW_NULL);
1367 dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
1368 void_class_ = static_cast<ClassPtr>(RAW_NULL);
1369 type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
1370 type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
1371 patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
1372 function_class_ = static_cast<ClassPtr>(RAW_NULL);
1373 closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1374 ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1375 field_class_ = static_cast<ClassPtr>(RAW_NULL);
1376 script_class_ = static_cast<ClassPtr>(RAW_NULL);
1377 library_class_ = static_cast<ClassPtr>(RAW_NULL);
1378 namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
1379 kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
1380 code_class_ = static_cast<ClassPtr>(RAW_NULL);
1381 instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
1382 instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
1383 instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
1384 object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
1385 pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1386 code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
1387 compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
1388 var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1389 exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
1390 context_class_ = static_cast<ClassPtr>(RAW_NULL);
1391 context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
1392 singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1393 unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
1394 monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL);
1395 icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
1396 megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
1397 subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1398 loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
1399 api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1400 language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1401 unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
1402 unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1403}
1404
1405// An object visitor which will mark all visited objects. This is used to
1406// premark all objects in the vm_isolate_ heap. Also precalculates hash
1407// codes so that we can get the identity hash code of objects in the read-
1408// only VM isolate.
1409class FinalizeVMIsolateVisitor : public ObjectVisitor {
1410 public:
1411 FinalizeVMIsolateVisitor()
1412#if defined(HASH_IN_OBJECT_HEADER)
1413 : counter_(1337)
1414#endif
1415 {
1416 }
1417
1418 void VisitObject(ObjectPtr obj) {
1419 // Free list elements should never be marked.
1420 ASSERT(!obj->untag()->IsMarked());
1421 // No forwarding corpses in the VM isolate.
1422 ASSERT(!obj->IsForwardingCorpse());
1423 if (!obj->IsFreeListElement()) {
1424 obj->untag()->SetMarkBitUnsynchronized();
1425 Object::FinalizeReadOnlyObject(object: obj);
1426#if defined(HASH_IN_OBJECT_HEADER)
1427 // These objects end up in the read-only VM isolate which is shared
1428 // between isolates, so we have to prepopulate them with identity hash
1429 // codes, since we can't add hash codes later.
1430 if (Object::GetCachedHash(obj) == 0) {
1431 // Some classes have identity hash codes that depend on their contents,
1432 // not per object.
1433 ASSERT(!obj->IsStringInstance());
1434 if (obj == Object::null()) {
1435 Object::SetCachedHashIfNotSet(obj, hash: kNullIdentityHash);
1436 } else if (obj == Object::bool_true().ptr()) {
1437 Object::SetCachedHashIfNotSet(obj, hash: kTrueIdentityHash);
1438 } else if (obj == Object::bool_false().ptr()) {
1439 Object::SetCachedHashIfNotSet(obj, hash: kFalseIdentityHash);
1440 } else if (!obj->IsMint() && !obj->IsDouble()) {
1441 counter_ += 2011; // The year Dart was announced and a prime.
1442 counter_ &= 0x3fffffff;
1443 if (counter_ == 0) counter_++;
1444 Object::SetCachedHashIfNotSet(obj, hash: counter_);
1445 }
1446 }
1447#endif
1448#if !defined(DART_PRECOMPILED_RUNTIME)
1449 if (obj->IsClass()) {
1450 // Won't be able to update read-only VM isolate classes if implementors
1451 // are discovered later.
1452 static_cast<ClassPtr>(obj)->untag()->implementor_cid_ = kDynamicCid;
1453 }
1454#endif
1455 }
1456 }
1457
1458 private:
1459#if defined(HASH_IN_OBJECT_HEADER)
1460 int32_t counter_;
1461#endif
1462};
1463
1464#define SET_CLASS_NAME(class_name, name) \
1465 cls = class_name##_class(); \
1466 cls.set_name(Symbols::name());
1467
1468void Object::FinalizeVMIsolate(IsolateGroup* isolate_group) {
1469 // Should only be run by the vm isolate.
1470 ASSERT(isolate_group == Dart::vm_isolate_group());
1471
1472 // Finish initialization of synthetic_getter_parameter_names_ which was
1473 // Started in Object::InitOnce()
1474 synthetic_getter_parameter_names_->SetAt(index: 0, value: Symbols::This());
1475
1476 // Set up names for all VM singleton classes.
1477 Class& cls = Class::Handle();
1478
1479 SET_CLASS_NAME(class, Class);
1480 SET_CLASS_NAME(dynamic, Dynamic);
1481 SET_CLASS_NAME(void, Void);
1482 SET_CLASS_NAME(type_parameters, TypeParameters);
1483 SET_CLASS_NAME(type_arguments, TypeArguments);
1484 SET_CLASS_NAME(patch_class, PatchClass);
1485 SET_CLASS_NAME(function, Function);
1486 SET_CLASS_NAME(closure_data, ClosureData);
1487 SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData);
1488 SET_CLASS_NAME(field, Field);
1489 SET_CLASS_NAME(script, Script);
1490 SET_CLASS_NAME(library, LibraryClass);
1491 SET_CLASS_NAME(namespace, Namespace);
1492 SET_CLASS_NAME(kernel_program_info, KernelProgramInfo);
1493 SET_CLASS_NAME(weak_serialization_reference, WeakSerializationReference);
1494 SET_CLASS_NAME(weak_array, WeakArray);
1495 SET_CLASS_NAME(code, Code);
1496 SET_CLASS_NAME(instructions, Instructions);
1497 SET_CLASS_NAME(instructions_section, InstructionsSection);
1498 SET_CLASS_NAME(instructions_table, InstructionsTable);
1499 SET_CLASS_NAME(object_pool, ObjectPool);
1500 SET_CLASS_NAME(code_source_map, CodeSourceMap);
1501 SET_CLASS_NAME(pc_descriptors, PcDescriptors);
1502 SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps);
1503 SET_CLASS_NAME(var_descriptors, LocalVarDescriptors);
1504 SET_CLASS_NAME(exception_handlers, ExceptionHandlers);
1505 SET_CLASS_NAME(context, Context);
1506 SET_CLASS_NAME(context_scope, ContextScope);
1507 SET_CLASS_NAME(sentinel, Sentinel);
1508 SET_CLASS_NAME(singletargetcache, SingleTargetCache);
1509 SET_CLASS_NAME(unlinkedcall, UnlinkedCall);
1510 SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall);
1511 SET_CLASS_NAME(icdata, ICData);
1512 SET_CLASS_NAME(megamorphic_cache, MegamorphicCache);
1513 SET_CLASS_NAME(subtypetestcache, SubtypeTestCache);
1514 SET_CLASS_NAME(loadingunit, LoadingUnit);
1515 SET_CLASS_NAME(api_error, ApiError);
1516 SET_CLASS_NAME(language_error, LanguageError);
1517 SET_CLASS_NAME(unhandled_exception, UnhandledException);
1518 SET_CLASS_NAME(unwind_error, UnwindError);
1519
1520 // Set up names for classes which are also pre-allocated in the vm isolate.
1521 cls = isolate_group->object_store()->array_class();
1522 cls.set_name(Symbols::_List());
1523 cls = isolate_group->object_store()->one_byte_string_class();
1524 cls.set_name(Symbols::OneByteString());
1525 cls = isolate_group->object_store()->never_class();
1526 cls.set_name(Symbols::Never());
1527
1528 // Set up names for the pseudo-classes for free list elements and forwarding
1529 // corpses. Mainly this makes VM debugging easier.
1530 cls = isolate_group->class_table()->At(cid: kFreeListElement);
1531 cls.set_name(Symbols::FreeListElement());
1532 cls = isolate_group->class_table()->At(cid: kForwardingCorpse);
1533 cls.set_name(Symbols::ForwardingCorpse());
1534
1535#if defined(DART_PRECOMPILER)
1536 const auto& function =
1537 Function::Handle(StubCode::UnknownDartCode().function());
1538 function.set_name(Symbols::OptimizedOut());
1539#endif // defined(DART_PRECOMPILER)
1540
1541 {
1542 ASSERT(isolate_group == Dart::vm_isolate_group());
1543 Thread* thread = Thread::Current();
1544 WritableVMIsolateScope scope(thread);
1545 HeapIterationScope iteration(thread);
1546 FinalizeVMIsolateVisitor premarker;
1547 ASSERT(isolate_group->heap()->UsedInWords(Heap::kNew) == 0);
1548 iteration.IterateOldObjectsNoImagePages(visitor: &premarker);
1549 // Make the VM isolate read-only again after setting all objects as marked.
1550 // Note objects in image pages are already pre-marked.
1551 }
1552}
1553
1554void Object::FinalizeReadOnlyObject(ObjectPtr object) {
1555 NoSafepointScope no_safepoint;
1556 intptr_t cid = object->GetClassId();
1557 if (cid == kOneByteStringCid) {
1558 OneByteStringPtr str = static_cast<OneByteStringPtr>(object);
1559 if (String::GetCachedHash(obj: str) == 0) {
1560 intptr_t hash = String::Hash(raw: str);
1561 String::SetCachedHashIfNotSet(obj: str, hash);
1562 }
1563 intptr_t size = OneByteString::UnroundedSize(str);
1564 ASSERT(size <= str->untag()->HeapSize());
1565 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: str) + size), c: 0,
1566 n: str->untag()->HeapSize() - size);
1567 } else if (cid == kTwoByteStringCid) {
1568 TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object);
1569 if (String::GetCachedHash(obj: str) == 0) {
1570 intptr_t hash = String::Hash(raw: str);
1571 String::SetCachedHashIfNotSet(obj: str, hash);
1572 }
1573 ASSERT(String::GetCachedHash(str) != 0);
1574 intptr_t size = TwoByteString::UnroundedSize(str);
1575 ASSERT(size <= str->untag()->HeapSize());
1576 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: str) + size), c: 0,
1577 n: str->untag()->HeapSize() - size);
1578 } else if (cid == kExternalOneByteStringCid) {
1579 ExternalOneByteStringPtr str =
1580 static_cast<ExternalOneByteStringPtr>(object);
1581 if (String::GetCachedHash(obj: str) == 0) {
1582 intptr_t hash = String::Hash(raw: str);
1583 String::SetCachedHashIfNotSet(obj: str, hash);
1584 }
1585 } else if (cid == kExternalTwoByteStringCid) {
1586 ExternalTwoByteStringPtr str =
1587 static_cast<ExternalTwoByteStringPtr>(object);
1588 if (String::GetCachedHash(obj: str) == 0) {
1589 intptr_t hash = String::Hash(raw: str);
1590 String::SetCachedHashIfNotSet(obj: str, hash);
1591 }
1592 } else if (cid == kCodeSourceMapCid) {
1593 CodeSourceMapPtr map = CodeSourceMap::RawCast(raw: object);
1594 intptr_t size = CodeSourceMap::UnroundedSize(map);
1595 ASSERT(size <= map->untag()->HeapSize());
1596 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: map) + size), c: 0,
1597 n: map->untag()->HeapSize() - size);
1598 } else if (cid == kCompressedStackMapsCid) {
1599 CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(raw: object);
1600 intptr_t size = CompressedStackMaps::UnroundedSize(maps);
1601 ASSERT(size <= maps->untag()->HeapSize());
1602 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: maps) + size), c: 0,
1603 n: maps->untag()->HeapSize() - size);
1604 } else if (cid == kPcDescriptorsCid) {
1605 PcDescriptorsPtr desc = PcDescriptors::RawCast(raw: object);
1606 intptr_t size = PcDescriptors::UnroundedSize(desc);
1607 ASSERT(size <= desc->untag()->HeapSize());
1608 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: desc) + size), c: 0,
1609 n: desc->untag()->HeapSize() - size);
1610 }
1611}
1612
1613void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
1614 ASSERT(Isolate::Current() == Dart::vm_isolate());
1615 *vm_isolate_snapshot_object_table_ = table.ptr();
1616}
1617
1618// Make unused space in an object whose type has been transformed safe
1619// for traversing during GC.
1620// The unused part of the transformed object is marked as an TypedDataInt8Array
1621// object.
1622void Object::MakeUnusedSpaceTraversable(const Object& obj,
1623 intptr_t original_size,
1624 intptr_t used_size) {
1625 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1626 ASSERT(!obj.IsNull());
1627 ASSERT(original_size >= used_size);
1628 if (original_size > used_size) {
1629 intptr_t leftover_size = original_size - used_size;
1630
1631 uword addr = UntaggedObject::ToAddr(raw_obj: obj.ptr()) + used_size;
1632 if (leftover_size >= TypedData::InstanceSize(lengthInBytes: 0)) {
1633 // Update the leftover space as a TypedDataInt8Array object.
1634 TypedDataPtr raw =
1635 static_cast<TypedDataPtr>(UntaggedObject::FromAddr(addr));
1636 uword new_tags =
1637 UntaggedObject::ClassIdTag::update(value: kTypedDataInt8ArrayCid, original: 0);
1638 new_tags = UntaggedObject::SizeTag::update(size: leftover_size, tag: new_tags);
1639 const bool is_old = obj.ptr()->IsOldObject();
1640 new_tags = UntaggedObject::OldBit::update(value: is_old, original: new_tags);
1641 new_tags = UntaggedObject::OldAndNotMarkedBit::update(value: is_old, original: new_tags);
1642 new_tags =
1643 UntaggedObject::OldAndNotRememberedBit::update(value: is_old, original: new_tags);
1644 new_tags = UntaggedObject::NewBit::update(value: !is_old, original: new_tags);
1645 // On architectures with a relaxed memory model, the concurrent marker may
1646 // observe the write of the filler object's header before observing the
1647 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1648 // the marker won't dereference it.
1649 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1650 raw->untag()->tags_ = new_tags;
1651
1652 intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(lengthInBytes: 0));
1653 ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
1654 raw->untag()->set_length(Smi::New(value: leftover_len));
1655 raw->untag()->RecomputeDataField();
1656 } else {
1657 // Update the leftover space as a basic object.
1658 ASSERT(leftover_size == Object::InstanceSize());
1659 ObjectPtr raw = static_cast<ObjectPtr>(UntaggedObject::FromAddr(addr));
1660 uword new_tags = UntaggedObject::ClassIdTag::update(value: kInstanceCid, original: 0);
1661 new_tags = UntaggedObject::SizeTag::update(size: leftover_size, tag: new_tags);
1662 const bool is_old = obj.ptr()->IsOldObject();
1663 new_tags = UntaggedObject::OldBit::update(value: is_old, original: new_tags);
1664 new_tags = UntaggedObject::OldAndNotMarkedBit::update(value: is_old, original: new_tags);
1665 new_tags =
1666 UntaggedObject::OldAndNotRememberedBit::update(value: is_old, original: new_tags);
1667 new_tags = UntaggedObject::NewBit::update(value: !is_old, original: new_tags);
1668 // On architectures with a relaxed memory model, the concurrent marker may
1669 // observe the write of the filler object's header before observing the
1670 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1671 // the marker won't dereference it.
1672 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1673 raw->untag()->tags_ = new_tags;
1674 }
1675 }
1676}
1677
1678void Object::VerifyBuiltinVtables() {
1679#if defined(DEBUG)
1680 ASSERT(builtin_vtables_[kIllegalCid] == 0);
1681 ASSERT(builtin_vtables_[kFreeListElement] == 0);
1682 ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
1683 ClassTable* table = IsolateGroup::Current()->class_table();
1684 for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) {
1685 if (table->HasValidClassAt(cid)) {
1686 ASSERT(builtin_vtables_[cid] != 0);
1687 }
1688 }
1689#endif
1690}
1691
1692void Object::RegisterClass(const Class& cls,
1693 const String& name,
1694 const Library& lib) {
1695 ASSERT(name.Length() > 0);
1696 ASSERT(name.CharAt(0) != '_');
1697 cls.set_name(name);
1698 lib.AddClass(cls);
1699}
1700
1701void Object::RegisterPrivateClass(const Class& cls,
1702 const String& public_class_name,
1703 const Library& lib) {
1704 ASSERT(public_class_name.Length() > 0);
1705 ASSERT(public_class_name.CharAt(0) == '_');
1706 String& str = String::Handle();
1707 str = lib.PrivateName(name: public_class_name);
1708 cls.set_name(str);
1709 lib.AddClass(cls);
1710}
1711
1712// Initialize a new isolate from source or from a snapshot.
1713//
1714// There are three possibilities:
1715// 1. Running a Kernel binary. This function will bootstrap from the KERNEL
1716// file.
1717// 2. There is no vm snapshot. This function will bootstrap from source.
1718// 3. There is a vm snapshot. The caller should initialize from the snapshot.
1719//
1720// A non-null kernel argument indicates (1).
1721// A nullptr kernel indicates (2) or (3).
1722ErrorPtr Object::Init(IsolateGroup* isolate_group,
1723 const uint8_t* kernel_buffer,
1724 intptr_t kernel_buffer_size) {
1725 Thread* thread = Thread::Current();
1726 Zone* zone = thread->zone();
1727 ASSERT(isolate_group == thread->isolate_group());
1728 TIMELINE_DURATION(thread, Isolate, "Object::Init");
1729
1730#if defined(DART_PRECOMPILED_RUNTIME)
1731 const bool bootstrapping = false;
1732#else
1733 const bool is_kernel = (kernel_buffer != nullptr);
1734 const bool bootstrapping =
1735 (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel;
1736#endif // defined(DART_PRECOMPILED_RUNTIME).
1737
1738 if (bootstrapping) {
1739#if !defined(DART_PRECOMPILED_RUNTIME)
1740 // Object::Init version when we are bootstrapping from source or from a
1741 // Kernel binary.
1742 // This will initialize isolate group object_store, shared by all isolates
1743 // running in the isolate group.
1744 ObjectStore* object_store = isolate_group->object_store();
1745 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
1746
1747 Class& cls = Class::Handle(zone);
1748 Type& type = Type::Handle(zone);
1749 Array& array = Array::Handle(zone);
1750 WeakArray& weak_array = WeakArray::Handle(zone);
1751 Library& lib = Library::Handle(zone);
1752 TypeArguments& type_args = TypeArguments::Handle(zone);
1753
1754 // All RawArray fields will be initialized to an empty array, therefore
1755 // initialize array class first.
1756 cls = Class::New<Array, RTN::Array>(isolate_group);
1757 ASSERT(object_store->array_class() == Class::null());
1758 object_store->set_array_class(cls);
1759
1760 // VM classes that are parameterized (Array, ImmutableArray,
1761 // GrowableObjectArray, Map, ConstMap,
1762 // Set, ConstSet) are also pre-finalized, so
1763 // CalculateFieldOffsets() is not called, so we need to set the offset
1764 // of their type_arguments_ field, which is explicitly
1765 // declared in their respective Raw* classes.
1766 cls.set_type_arguments_field_offset(host_value_in_bytes: Array::type_arguments_offset(),
1767 target_value_in_bytes: RTN::Array::type_arguments_offset());
1768 cls.set_num_type_arguments_unsafe(1);
1769
1770 // Set up the growable object array class (Has to be done after the array
1771 // class is setup as one of its field is an array object).
1772 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
1773 isolate_group);
1774 object_store->set_growable_object_array_class(cls);
1775 cls.set_type_arguments_field_offset(
1776 host_value_in_bytes: GrowableObjectArray::type_arguments_offset(),
1777 target_value_in_bytes: RTN::GrowableObjectArray::type_arguments_offset());
1778 cls.set_num_type_arguments_unsafe(1);
1779
1780 // Initialize hash set for regexp_table_.
1781 const intptr_t kInitialCanonicalRegExpSize = 4;
1782 weak_array = HashTables::New<CanonicalRegExpSet>(
1783 initial_capacity: kInitialCanonicalRegExpSize, space: Heap::kOld);
1784 object_store->set_regexp_table(weak_array);
1785
1786 // Initialize hash set for canonical types.
1787 const intptr_t kInitialCanonicalTypeSize = 16;
1788 array = HashTables::New<CanonicalTypeSet>(initial_capacity: kInitialCanonicalTypeSize,
1789 space: Heap::kOld);
1790 object_store->set_canonical_types(array);
1791
1792 // Initialize hash set for canonical function types.
1793 const intptr_t kInitialCanonicalFunctionTypeSize = 16;
1794 array = HashTables::New<CanonicalFunctionTypeSet>(
1795 initial_capacity: kInitialCanonicalFunctionTypeSize, space: Heap::kOld);
1796 object_store->set_canonical_function_types(array);
1797
1798 // Initialize hash set for canonical record types.
1799 const intptr_t kInitialCanonicalRecordTypeSize = 16;
1800 array = HashTables::New<CanonicalRecordTypeSet>(
1801 initial_capacity: kInitialCanonicalRecordTypeSize, space: Heap::kOld);
1802 object_store->set_canonical_record_types(array);
1803
1804 // Initialize hash set for canonical type parameters.
1805 const intptr_t kInitialCanonicalTypeParameterSize = 4;
1806 array = HashTables::New<CanonicalTypeParameterSet>(
1807 initial_capacity: kInitialCanonicalTypeParameterSize, space: Heap::kOld);
1808 object_store->set_canonical_type_parameters(array);
1809
1810 // Initialize hash set for canonical_type_arguments_.
1811 const intptr_t kInitialCanonicalTypeArgumentsSize = 4;
1812 array = HashTables::New<CanonicalTypeArgumentsSet>(
1813 initial_capacity: kInitialCanonicalTypeArgumentsSize, space: Heap::kOld);
1814 object_store->set_canonical_type_arguments(array);
1815
1816 // Setup type class early in the process.
1817 const Class& type_cls =
1818 Class::Handle(zone, ptr: Class::New<Type, RTN::Type>(isolate_group));
1819 const Class& function_type_cls = Class::Handle(
1820 zone, ptr: Class::New<FunctionType, RTN::FunctionType>(isolate_group));
1821 const Class& record_type_cls = Class::Handle(
1822 zone, ptr: Class::New<RecordType, RTN::RecordType>(isolate_group));
1823 const Class& type_parameter_cls = Class::Handle(
1824 zone, ptr: Class::New<TypeParameter, RTN::TypeParameter>(isolate_group));
1825 const Class& library_prefix_cls = Class::Handle(
1826 zone, ptr: Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group));
1827
1828 // Pre-allocate the OneByteString class needed by the symbol table.
1829 cls = Class::NewStringClass(class_id: kOneByteStringCid, isolate_group);
1830 object_store->set_one_byte_string_class(cls);
1831
1832 // Pre-allocate the TwoByteString class needed by the symbol table.
1833 cls = Class::NewStringClass(class_id: kTwoByteStringCid, isolate_group);
1834 object_store->set_two_byte_string_class(cls);
1835
1836 // Setup the symbol table for the symbols created in the isolate.
1837 Symbols::SetupSymbolTable(isolate_group);
1838
1839 // Set up the libraries array before initializing the core library.
1840 const GrowableObjectArray& libraries =
1841 GrowableObjectArray::Handle(zone, ptr: GrowableObjectArray::New(space: Heap::kOld));
1842 object_store->set_libraries(libraries);
1843
1844 // Pre-register the core library.
1845 Library::InitCoreLibrary(isolate_group);
1846
1847 // Basic infrastructure has been setup, initialize the class dictionary.
1848 const Library& core_lib = Library::Handle(zone, ptr: Library::CoreLibrary());
1849 ASSERT(!core_lib.IsNull());
1850
1851 const GrowableObjectArray& pending_classes =
1852 GrowableObjectArray::Handle(zone, ptr: GrowableObjectArray::New());
1853 object_store->set_pending_classes(pending_classes);
1854
1855 // Now that the symbol table is initialized and that the core dictionary as
1856 // well as the core implementation dictionary have been setup, preallocate
1857 // remaining classes and register them by name in the dictionaries.
1858 String& name = String::Handle(zone);
1859 cls = object_store->array_class(); // Was allocated above.
1860 RegisterPrivateClass(cls, public_class_name: Symbols::_List(), lib: core_lib);
1861 pending_classes.Add(value: cls);
1862 // We cannot use NewNonParameterizedType(), because Array is
1863 // parameterized. Warning: class _List has not been patched yet. Its
1864 // declared number of type parameters is still 0. It will become 1 after
1865 // patching. The array type allocated below represents the raw type _List
1866 // and not _List<E> as we could expect. Use with caution.
1867 type = Type::New(clazz: Class::Handle(zone, ptr: cls.ptr()),
1868 arguments: Object::null_type_arguments(), nullability: Nullability::kNonNullable);
1869 type.SetIsFinalized();
1870 type ^= type.Canonicalize(thread);
1871 object_store->set_array_type(type);
1872
1873 cls = object_store->growable_object_array_class(); // Was allocated above.
1874 RegisterPrivateClass(cls, public_class_name: Symbols::_GrowableList(), lib: core_lib);
1875 pending_classes.Add(value: cls);
1876
1877 cls = Class::New<Array, RTN::Array>(index: kImmutableArrayCid, isolate_group);
1878 object_store->set_immutable_array_class(cls);
1879 cls.set_type_arguments_field_offset(host_value_in_bytes: Array::type_arguments_offset(),
1880 target_value_in_bytes: RTN::Array::type_arguments_offset());
1881 cls.set_num_type_arguments_unsafe(1);
1882 ASSERT(object_store->immutable_array_class() !=
1883 object_store->array_class());
1884 cls.set_is_prefinalized();
1885 RegisterPrivateClass(cls, public_class_name: Symbols::_ImmutableList(), lib: core_lib);
1886 pending_classes.Add(value: cls);
1887
1888 cls = object_store->one_byte_string_class(); // Was allocated above.
1889 RegisterPrivateClass(cls, public_class_name: Symbols::OneByteString(), lib: core_lib);
1890 pending_classes.Add(value: cls);
1891
1892 cls = object_store->two_byte_string_class(); // Was allocated above.
1893 RegisterPrivateClass(cls, public_class_name: Symbols::TwoByteString(), lib: core_lib);
1894 pending_classes.Add(value: cls);
1895
1896 cls = Class::NewStringClass(class_id: kExternalOneByteStringCid, isolate_group);
1897 object_store->set_external_one_byte_string_class(cls);
1898 RegisterPrivateClass(cls, public_class_name: Symbols::ExternalOneByteString(), lib: core_lib);
1899 pending_classes.Add(value: cls);
1900
1901 cls = Class::NewStringClass(class_id: kExternalTwoByteStringCid, isolate_group);
1902 object_store->set_external_two_byte_string_class(cls);
1903 RegisterPrivateClass(cls, public_class_name: Symbols::ExternalTwoByteString(), lib: core_lib);
1904 pending_classes.Add(value: cls);
1905
1906 // Pre-register the isolate library so the native class implementations can
1907 // be hooked up before compiling it.
1908 Library& isolate_lib = Library::Handle(
1909 zone, ptr: Library::LookupLibrary(thread, url: Symbols::DartIsolate()));
1910 if (isolate_lib.IsNull()) {
1911 isolate_lib = Library::NewLibraryHelper(url: Symbols::DartIsolate(), import_core_lib: true);
1912 isolate_lib.SetLoadRequested();
1913 isolate_lib.Register(thread);
1914 }
1915 object_store->set_bootstrap_library(index: ObjectStore::kIsolate, value: isolate_lib);
1916 ASSERT(!isolate_lib.IsNull());
1917 ASSERT(isolate_lib.ptr() == Library::IsolateLibrary());
1918
1919 cls = Class::New<Capability, RTN::Capability>(isolate_group);
1920 RegisterPrivateClass(cls, public_class_name: Symbols::_Capability(), lib: isolate_lib);
1921 pending_classes.Add(value: cls);
1922
1923 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
1924 RegisterPrivateClass(cls, public_class_name: Symbols::_RawReceivePort(), lib: isolate_lib);
1925 pending_classes.Add(value: cls);
1926
1927 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
1928 RegisterPrivateClass(cls, public_class_name: Symbols::_SendPort(), lib: isolate_lib);
1929 pending_classes.Add(value: cls);
1930
1931 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
1932 isolate_group);
1933 RegisterPrivateClass(cls, public_class_name: Symbols::_TransferableTypedDataImpl(),
1934 lib: isolate_lib);
1935 pending_classes.Add(value: cls);
1936
1937 const Class& stacktrace_cls = Class::Handle(
1938 zone, ptr: Class::New<StackTrace, RTN::StackTrace>(isolate_group));
1939 RegisterPrivateClass(cls: stacktrace_cls, public_class_name: Symbols::_StackTrace(), lib: core_lib);
1940 pending_classes.Add(value: stacktrace_cls);
1941 // Super type set below, after Object is allocated.
1942
1943 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
1944 RegisterPrivateClass(cls, public_class_name: Symbols::_RegExp(), lib: core_lib);
1945 pending_classes.Add(value: cls);
1946
1947 // Initialize the base interfaces used by the core VM classes.
1948
1949 // Allocate and initialize the pre-allocated classes in the core library.
1950 // The script and token index of these pre-allocated classes is set up when
1951 // the corelib script is compiled.
1952 cls = Class::New<Instance, RTN::Instance>(index: kInstanceCid, isolate_group);
1953 object_store->set_object_class(cls);
1954 cls.set_name(Symbols::Object());
1955 cls.set_num_type_arguments_unsafe(0);
1956 cls.set_is_prefinalized();
1957 cls.set_is_const();
1958 core_lib.AddClass(cls);
1959 pending_classes.Add(value: cls);
1960 type = Type::NewNonParameterizedType(type_class: cls);
1961 ASSERT(type.IsCanonical());
1962 object_store->set_object_type(type);
1963 type = type.ToNullability(value: Nullability::kLegacy, space: Heap::kOld);
1964 ASSERT(type.IsCanonical());
1965 object_store->set_legacy_object_type(type);
1966 type = type.ToNullability(value: Nullability::kNonNullable, space: Heap::kOld);
1967 ASSERT(type.IsCanonical());
1968 object_store->set_non_nullable_object_type(type);
1969 type = type.ToNullability(value: Nullability::kNullable, space: Heap::kOld);
1970 ASSERT(type.IsCanonical());
1971 object_store->set_nullable_object_type(type);
1972
1973 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1974 object_store->set_bool_class(cls);
1975 RegisterClass(cls, name: Symbols::Bool(), lib: core_lib);
1976 pending_classes.Add(value: cls);
1977
1978 cls = Class::New<Instance, RTN::Instance>(index: kNullCid, isolate_group);
1979 object_store->set_null_class(cls);
1980 cls.set_num_type_arguments_unsafe(0);
1981 cls.set_is_prefinalized();
1982 RegisterClass(cls, name: Symbols::Null(), lib: core_lib);
1983 pending_classes.Add(value: cls);
1984
1985 cls = Class::New<Instance, RTN::Instance>(index: kNeverCid, isolate_group);
1986 cls.set_num_type_arguments_unsafe(0);
1987 cls.set_is_allocate_finalized();
1988 cls.set_is_declaration_loaded();
1989 cls.set_is_type_finalized();
1990 cls.set_name(Symbols::Never());
1991 object_store->set_never_class(cls);
1992
1993 ASSERT(!library_prefix_cls.IsNull());
1994 RegisterPrivateClass(cls: library_prefix_cls, public_class_name: Symbols::_LibraryPrefix(),
1995 lib: core_lib);
1996 pending_classes.Add(value: library_prefix_cls);
1997
1998 RegisterPrivateClass(cls: type_cls, public_class_name: Symbols::_Type(), lib: core_lib);
1999 pending_classes.Add(value: type_cls);
2000
2001 RegisterPrivateClass(cls: function_type_cls, public_class_name: Symbols::_FunctionType(), lib: core_lib);
2002 pending_classes.Add(value: function_type_cls);
2003
2004 RegisterPrivateClass(cls: record_type_cls, public_class_name: Symbols::_RecordType(), lib: core_lib);
2005 pending_classes.Add(value: record_type_cls);
2006
2007 RegisterPrivateClass(cls: type_parameter_cls, public_class_name: Symbols::_TypeParameter(),
2008 lib: core_lib);
2009 pending_classes.Add(value: type_parameter_cls);
2010
2011 cls = Class::New<Integer, RTN::Integer>(isolate_group);
2012 object_store->set_integer_implementation_class(cls);
2013 RegisterPrivateClass(cls, public_class_name: Symbols::_IntegerImplementation(), lib: core_lib);
2014 pending_classes.Add(value: cls);
2015
2016 cls = Class::New<Smi, RTN::Smi>(isolate_group);
2017 object_store->set_smi_class(cls);
2018 RegisterPrivateClass(cls, public_class_name: Symbols::_Smi(), lib: core_lib);
2019 pending_classes.Add(value: cls);
2020
2021 cls = Class::New<Mint, RTN::Mint>(isolate_group);
2022 object_store->set_mint_class(cls);
2023 RegisterPrivateClass(cls, public_class_name: Symbols::_Mint(), lib: core_lib);
2024 pending_classes.Add(value: cls);
2025
2026 cls = Class::New<Double, RTN::Double>(isolate_group);
2027 object_store->set_double_class(cls);
2028 RegisterPrivateClass(cls, public_class_name: Symbols::_Double(), lib: core_lib);
2029 pending_classes.Add(value: cls);
2030
2031 // Class that represents the Dart class _Closure and C++ class Closure.
2032 cls = Class::New<Closure, RTN::Closure>(isolate_group);
2033 object_store->set_closure_class(cls);
2034 RegisterPrivateClass(cls, public_class_name: Symbols::_Closure(), lib: core_lib);
2035 pending_classes.Add(value: cls);
2036
2037 cls = Class::New<Record, RTN::Record>(isolate_group);
2038 RegisterPrivateClass(cls, public_class_name: Symbols::_Record(), lib: core_lib);
2039 pending_classes.Add(value: cls);
2040
2041 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
2042 object_store->set_weak_property_class(cls);
2043 RegisterPrivateClass(cls, public_class_name: Symbols::_WeakProperty(), lib: core_lib);
2044
2045 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
2046 cls.set_type_arguments_field_offset(
2047 host_value_in_bytes: WeakReference::type_arguments_offset(),
2048 target_value_in_bytes: RTN::WeakReference::type_arguments_offset());
2049 cls.set_num_type_arguments_unsafe(1);
2050 object_store->set_weak_reference_class(cls);
2051 RegisterPrivateClass(cls, public_class_name: Symbols::_WeakReference(), lib: core_lib);
2052
2053 // Pre-register the mirrors library so we can place the vm class
2054 // MirrorReference there rather than the core library.
2055 lib = Library::LookupLibrary(thread, url: Symbols::DartMirrors());
2056 if (lib.IsNull()) {
2057 lib = Library::NewLibraryHelper(url: Symbols::DartMirrors(), import_core_lib: true);
2058 lib.SetLoadRequested();
2059 lib.Register(thread);
2060 }
2061 object_store->set_bootstrap_library(index: ObjectStore::kMirrors, value: lib);
2062 ASSERT(!lib.IsNull());
2063 ASSERT(lib.ptr() == Library::MirrorsLibrary());
2064
2065 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2066 RegisterPrivateClass(cls, public_class_name: Symbols::_MirrorReference(), lib);
2067
2068 // Pre-register the collection library so we can place the vm class
2069 // Map there rather than the core library.
2070 lib = Library::LookupLibrary(thread, url: Symbols::DartCollection());
2071 if (lib.IsNull()) {
2072 lib = Library::NewLibraryHelper(url: Symbols::DartCollection(), import_core_lib: true);
2073 lib.SetLoadRequested();
2074 lib.Register(thread);
2075 }
2076
2077 object_store->set_bootstrap_library(index: ObjectStore::kCollection, value: lib);
2078 ASSERT(!lib.IsNull());
2079 ASSERT(lib.ptr() == Library::CollectionLibrary());
2080 cls = Class::New<Map, RTN::Map>(isolate_group);
2081 object_store->set_map_impl_class(cls);
2082 cls.set_type_arguments_field_offset(host_value_in_bytes: Map::type_arguments_offset(),
2083 target_value_in_bytes: RTN::Map::type_arguments_offset());
2084 cls.set_num_type_arguments_unsafe(2);
2085 RegisterPrivateClass(cls, public_class_name: Symbols::_Map(), lib);
2086 pending_classes.Add(value: cls);
2087
2088 cls = Class::New<Map, RTN::Map>(index: kConstMapCid, isolate_group);
2089 object_store->set_const_map_impl_class(cls);
2090 cls.set_type_arguments_field_offset(host_value_in_bytes: Map::type_arguments_offset(),
2091 target_value_in_bytes: RTN::Map::type_arguments_offset());
2092 cls.set_num_type_arguments_unsafe(2);
2093 cls.set_is_prefinalized();
2094 RegisterPrivateClass(cls, public_class_name: Symbols::_ConstMap(), lib);
2095 pending_classes.Add(value: cls);
2096
2097 cls = Class::New<Set, RTN::Set>(isolate_group);
2098 object_store->set_set_impl_class(cls);
2099 cls.set_type_arguments_field_offset(host_value_in_bytes: Set::type_arguments_offset(),
2100 target_value_in_bytes: RTN::Set::type_arguments_offset());
2101 cls.set_num_type_arguments_unsafe(1);
2102 RegisterPrivateClass(cls, public_class_name: Symbols::_Set(), lib);
2103 pending_classes.Add(value: cls);
2104
2105 cls = Class::New<Set, RTN::Set>(index: kConstSetCid, isolate_group);
2106 object_store->set_const_set_impl_class(cls);
2107 cls.set_type_arguments_field_offset(host_value_in_bytes: Set::type_arguments_offset(),
2108 target_value_in_bytes: RTN::Set::type_arguments_offset());
2109 cls.set_num_type_arguments_unsafe(1);
2110 cls.set_is_prefinalized();
2111 RegisterPrivateClass(cls, public_class_name: Symbols::_ConstSet(), lib);
2112 pending_classes.Add(value: cls);
2113
2114 // Pre-register the async library so we can place the vm class
2115 // FutureOr there rather than the core library.
2116 lib = Library::LookupLibrary(thread, url: Symbols::DartAsync());
2117 if (lib.IsNull()) {
2118 lib = Library::NewLibraryHelper(url: Symbols::DartAsync(), import_core_lib: true);
2119 lib.SetLoadRequested();
2120 lib.Register(thread);
2121 }
2122 object_store->set_bootstrap_library(index: ObjectStore::kAsync, value: lib);
2123 ASSERT(!lib.IsNull());
2124 ASSERT(lib.ptr() == Library::AsyncLibrary());
2125 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2126 cls.set_type_arguments_field_offset(host_value_in_bytes: FutureOr::type_arguments_offset(),
2127 target_value_in_bytes: RTN::FutureOr::type_arguments_offset());
2128 cls.set_num_type_arguments_unsafe(1);
2129 RegisterClass(cls, name: Symbols::FutureOr(), lib);
2130 pending_classes.Add(value: cls);
2131 object_store->set_future_or_class(cls);
2132
2133 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2134 RegisterPrivateClass(cls, public_class_name: Symbols::_SuspendState(), lib);
2135 pending_classes.Add(value: cls);
2136
2137 // Pre-register the developer library so we can place the vm class
2138 // UserTag there rather than the core library.
2139 lib = Library::LookupLibrary(thread, url: Symbols::DartDeveloper());
2140 if (lib.IsNull()) {
2141 lib = Library::NewLibraryHelper(url: Symbols::DartDeveloper(), import_core_lib: true);
2142 lib.SetLoadRequested();
2143 lib.Register(thread);
2144 }
2145 object_store->set_bootstrap_library(index: ObjectStore::kDeveloper, value: lib);
2146 ASSERT(!lib.IsNull());
2147 ASSERT(lib.ptr() == Library::DeveloperLibrary());
2148 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2149 RegisterPrivateClass(cls, public_class_name: Symbols::_UserTag(), lib);
2150 pending_classes.Add(value: cls);
2151
2152 // Setup some default native field classes which can be extended for
2153 // specifying native fields in dart classes.
2154 Library::InitNativeWrappersLibrary(isolate_group, is_kernel_file: is_kernel);
2155 ASSERT(object_store->native_wrappers_library() != Library::null());
2156
2157 // Pre-register the typed_data library so the native class implementations
2158 // can be hooked up before compiling it.
2159 lib = Library::LookupLibrary(thread, url: Symbols::DartTypedData());
2160 if (lib.IsNull()) {
2161 lib = Library::NewLibraryHelper(url: Symbols::DartTypedData(), import_core_lib: true);
2162 lib.SetLoadRequested();
2163 lib.Register(thread);
2164 }
2165 object_store->set_bootstrap_library(index: ObjectStore::kTypedData, value: lib);
2166 ASSERT(!lib.IsNull());
2167 ASSERT(lib.ptr() == Library::TypedDataLibrary());
2168#define REGISTER_TYPED_DATA_CLASS(clazz) \
2169 cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group); \
2170 RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
2171
2172 DART_CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
2173#undef REGISTER_TYPED_DATA_CLASS
2174#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2175 cls = \
2176 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2177 RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \
2178 pending_classes.Add(cls); \
2179 cls = Class::NewUnmodifiableTypedDataViewClass( \
2180 kUnmodifiableTypedData##clazz##ViewCid, isolate_group); \
2181 RegisterPrivateClass(cls, Symbols::_Unmodifiable##clazz##View(), lib); \
2182 pending_classes.Add(cls);
2183
2184 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
2185
2186 cls = Class::NewTypedDataViewClass(class_id: kByteDataViewCid, isolate_group);
2187 RegisterPrivateClass(cls, public_class_name: Symbols::_ByteDataView(), lib);
2188 pending_classes.Add(value: cls);
2189 cls = Class::NewUnmodifiableTypedDataViewClass(class_id: kUnmodifiableByteDataViewCid,
2190 isolate_group);
2191 RegisterPrivateClass(cls, public_class_name: Symbols::_UnmodifiableByteDataView(), lib);
2192 pending_classes.Add(value: cls);
2193
2194#undef REGISTER_TYPED_DATA_VIEW_CLASS
2195#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2196 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2197 isolate_group); \
2198 RegisterPrivateClass(cls, Symbols::_External##clazz(), lib);
2199
2200 cls = Class::New<Instance, RTN::Instance>(index: kByteBufferCid, isolate_group,
2201 /*register_class=*/false);
2202 cls.set_instance_size(host_value_in_bytes: 0, target_value_in_bytes: 0);
2203 cls.set_next_field_offset(host_value_in_bytes: -kWordSize, target_value_in_bytes: -compiler::target::kWordSize);
2204 isolate_group->class_table()->Register(cls);
2205 RegisterPrivateClass(cls, public_class_name: Symbols::_ByteBuffer(), lib);
2206 pending_classes.Add(value: cls);
2207
2208 CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
2209#undef REGISTER_EXT_TYPED_DATA_CLASS
2210 // Register Float32x4, Int32x4, and Float64x2 in the object store.
2211 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2212 RegisterPrivateClass(cls, public_class_name: Symbols::_Float32x4(), lib);
2213 pending_classes.Add(value: cls);
2214 object_store->set_float32x4_class(cls);
2215
2216 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2217 /*register_class=*/true,
2218 /*is_abstract=*/true);
2219 RegisterClass(cls, name: Symbols::Float32x4(), lib);
2220 cls.set_num_type_arguments_unsafe(0);
2221 cls.set_is_prefinalized();
2222 type = Type::NewNonParameterizedType(type_class: cls);
2223 object_store->set_float32x4_type(type);
2224
2225 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2226 RegisterPrivateClass(cls, public_class_name: Symbols::_Int32x4(), lib);
2227 pending_classes.Add(value: cls);
2228 object_store->set_int32x4_class(cls);
2229
2230 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2231 /*register_class=*/true,
2232 /*is_abstract=*/true);
2233 RegisterClass(cls, name: Symbols::Int32x4(), lib);
2234 cls.set_num_type_arguments_unsafe(0);
2235 cls.set_is_prefinalized();
2236 type = Type::NewNonParameterizedType(type_class: cls);
2237 object_store->set_int32x4_type(type);
2238
2239 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2240 RegisterPrivateClass(cls, public_class_name: Symbols::_Float64x2(), lib);
2241 pending_classes.Add(value: cls);
2242 object_store->set_float64x2_class(cls);
2243
2244 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2245 /*register_class=*/true,
2246 /*is_abstract=*/true);
2247 RegisterClass(cls, name: Symbols::Float64x2(), lib);
2248 cls.set_num_type_arguments_unsafe(0);
2249 cls.set_is_prefinalized();
2250 type = Type::NewNonParameterizedType(type_class: cls);
2251 object_store->set_float64x2_type(type);
2252
2253 // Set the super type of class StackTrace to Object type so that the
2254 // 'toString' method is implemented.
2255 type = object_store->object_type();
2256 stacktrace_cls.set_super_type(type);
2257
2258 // Abstract class that represents the Dart class Type.
2259 // Note that this class is implemented by Dart class _AbstractType.
2260 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2261 /*register_class=*/true,
2262 /*is_abstract=*/true);
2263 cls.set_num_type_arguments_unsafe(0);
2264 cls.set_is_prefinalized();
2265 RegisterClass(cls, name: Symbols::Type(), lib: core_lib);
2266 pending_classes.Add(value: cls);
2267 type = Type::NewNonParameterizedType(type_class: cls);
2268 object_store->set_type_type(type);
2269
2270 // Abstract class that represents the Dart class Function.
2271 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2272 /*register_class=*/true,
2273 /*is_abstract=*/true);
2274 cls.set_num_type_arguments_unsafe(0);
2275 cls.set_is_prefinalized();
2276 RegisterClass(cls, name: Symbols::Function(), lib: core_lib);
2277 pending_classes.Add(value: cls);
2278 type = Type::NewNonParameterizedType(type_class: cls);
2279 object_store->set_function_type(type);
2280
2281 // Abstract class that represents the Dart class Record.
2282 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2283 /*register_class=*/true,
2284 /*is_abstract=*/true);
2285 RegisterClass(cls, name: Symbols::Record(), lib: core_lib);
2286 pending_classes.Add(value: cls);
2287 object_store->set_record_class(cls);
2288
2289 cls = Class::New<Number, RTN::Number>(isolate_group);
2290 RegisterClass(cls, name: Symbols::Number(), lib: core_lib);
2291 pending_classes.Add(value: cls);
2292 type = Type::NewNonParameterizedType(type_class: cls);
2293 object_store->set_number_type(type);
2294
2295 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2296 /*register_class=*/true,
2297 /*is_abstract=*/true);
2298 RegisterClass(cls, name: Symbols::Int(), lib: core_lib);
2299 cls.set_num_type_arguments_unsafe(0);
2300 cls.set_is_prefinalized();
2301 pending_classes.Add(value: cls);
2302 type = Type::NewNonParameterizedType(type_class: cls);
2303 object_store->set_int_type(type);
2304 type = type.ToNullability(value: Nullability::kLegacy, space: Heap::kOld);
2305 object_store->set_legacy_int_type(type);
2306 type = type.ToNullability(value: Nullability::kNonNullable, space: Heap::kOld);
2307 object_store->set_non_nullable_int_type(type);
2308 type = type.ToNullability(value: Nullability::kNullable, space: Heap::kOld);
2309 object_store->set_nullable_int_type(type);
2310
2311 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2312 /*register_class=*/true,
2313 /*is_abstract=*/true);
2314 RegisterClass(cls, name: Symbols::Double(), lib: core_lib);
2315 cls.set_num_type_arguments_unsafe(0);
2316 cls.set_is_prefinalized();
2317 pending_classes.Add(value: cls);
2318 type = Type::NewNonParameterizedType(type_class: cls);
2319 object_store->set_double_type(type);
2320 type = type.ToNullability(value: Nullability::kNullable, space: Heap::kOld);
2321 object_store->set_nullable_double_type(type);
2322
2323 name = Symbols::_String().ptr();
2324 cls = Class::New<Instance, RTN::Instance>(index: kIllegalCid, isolate_group,
2325 /*register_class=*/true,
2326 /*is_abstract=*/true);
2327 RegisterClass(cls, name, lib: core_lib);
2328 cls.set_num_type_arguments_unsafe(0);
2329 cls.set_is_prefinalized();
2330 pending_classes.Add(value: cls);
2331 type = Type::NewNonParameterizedType(type_class: cls);
2332 object_store->set_string_type(type);
2333 type = type.ToNullability(value: Nullability::kLegacy, space: Heap::kOld);
2334 object_store->set_legacy_string_type(type);
2335
2336 cls = object_store->bool_class();
2337 type = Type::NewNonParameterizedType(type_class: cls);
2338 object_store->set_bool_type(type);
2339
2340 cls = object_store->smi_class();
2341 type = Type::NewNonParameterizedType(type_class: cls);
2342 object_store->set_smi_type(type);
2343 type = type.ToNullability(value: Nullability::kLegacy, space: Heap::kOld);
2344
2345 cls = object_store->mint_class();
2346 type = Type::NewNonParameterizedType(type_class: cls);
2347 object_store->set_mint_type(type);
2348
2349 // The classes 'void' and 'dynamic' are phony classes to make type checking
2350 // more regular; they live in the VM isolate. The class 'void' is not
2351 // registered in the class dictionary because its name is a reserved word.
2352 // The class 'dynamic' is registered in the class dictionary because its
2353 // name is a built-in identifier (this is wrong). The corresponding types
2354 // are stored in the object store.
2355 cls = object_store->null_class();
2356 type =
2357 Type::New(clazz: cls, arguments: Object::null_type_arguments(), nullability: Nullability::kNullable);
2358 type.SetIsFinalized();
2359 type ^= type.Canonicalize(thread);
2360 object_store->set_null_type(type);
2361 cls.set_declaration_type(type);
2362 ASSERT(type.IsNullable());
2363
2364 // Consider removing when/if Null becomes an ordinary class.
2365 type = object_store->object_type();
2366 cls.set_super_type(type);
2367
2368 cls = object_store->never_class();
2369 type = Type::New(clazz: cls, arguments: Object::null_type_arguments(),
2370 nullability: Nullability::kNonNullable);
2371 type.SetIsFinalized();
2372 type ^= type.Canonicalize(thread);
2373 object_store->set_never_type(type);
2374 type_args = TypeArguments::New(len: 1);
2375 type_args.SetTypeAt(index: 0, value: type);
2376 type_args = type_args.Canonicalize(thread);
2377 object_store->set_type_argument_never(type_args);
2378
2379 // Create and cache commonly used type arguments <int>, <double>,
2380 // <String>, <String, dynamic> and <String, String>.
2381 type_args = TypeArguments::New(len: 1);
2382 type = object_store->int_type();
2383 type_args.SetTypeAt(index: 0, value: type);
2384 type_args = type_args.Canonicalize(thread);
2385 object_store->set_type_argument_int(type_args);
2386 type_args = TypeArguments::New(len: 1);
2387 type = object_store->legacy_int_type();
2388 type_args.SetTypeAt(index: 0, value: type);
2389 type_args = type_args.Canonicalize(thread);
2390 object_store->set_type_argument_legacy_int(type_args);
2391
2392 type_args = TypeArguments::New(len: 1);
2393 type = object_store->double_type();
2394 type_args.SetTypeAt(index: 0, value: type);
2395 type_args = type_args.Canonicalize(thread);
2396 object_store->set_type_argument_double(type_args);
2397
2398 type_args = TypeArguments::New(len: 1);
2399 type = object_store->string_type();
2400 type_args.SetTypeAt(index: 0, value: type);
2401 type_args = type_args.Canonicalize(thread);
2402 object_store->set_type_argument_string(type_args);
2403 type_args = TypeArguments::New(len: 1);
2404 type = object_store->legacy_string_type();
2405 type_args.SetTypeAt(index: 0, value: type);
2406 type_args = type_args.Canonicalize(thread);
2407 object_store->set_type_argument_legacy_string(type_args);
2408
2409 type_args = TypeArguments::New(len: 2);
2410 type = object_store->string_type();
2411 type_args.SetTypeAt(index: 0, value: type);
2412 type_args.SetTypeAt(index: 1, value: Object::dynamic_type());
2413 type_args = type_args.Canonicalize(thread);
2414 object_store->set_type_argument_string_dynamic(type_args);
2415
2416 type_args = TypeArguments::New(len: 2);
2417 type = object_store->string_type();
2418 type_args.SetTypeAt(index: 0, value: type);
2419 type_args.SetTypeAt(index: 1, value: type);
2420 type_args = type_args.Canonicalize(thread);
2421 object_store->set_type_argument_string_string(type_args);
2422
2423 lib = Library::LookupLibrary(thread, url: Symbols::DartFfi());
2424 if (lib.IsNull()) {
2425 lib = Library::NewLibraryHelper(url: Symbols::DartFfi(), import_core_lib: true);
2426 lib.SetLoadRequested();
2427 lib.Register(thread);
2428 }
2429 object_store->set_bootstrap_library(index: ObjectStore::kFfi, value: lib);
2430
2431 cls = Class::New<Instance, RTN::Instance>(index: kFfiNativeTypeCid, isolate_group);
2432 cls.set_num_type_arguments_unsafe(0);
2433 cls.set_is_prefinalized();
2434 pending_classes.Add(value: cls);
2435 object_store->set_ffi_native_type_class(cls);
2436 RegisterClass(cls, name: Symbols::FfiNativeType(), lib);
2437
2438#define REGISTER_FFI_TYPE_MARKER(clazz) \
2439 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); \
2440 cls.set_num_type_arguments_unsafe(0); \
2441 cls.set_is_prefinalized(); \
2442 pending_classes.Add(cls); \
2443 RegisterClass(cls, Symbols::Ffi##clazz(), lib);
2444 CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_TYPE_MARKER);
2445#undef REGISTER_FFI_TYPE_MARKER
2446
2447 cls = Class::New<Instance, RTN::Instance>(index: kFfiNativeFunctionCid,
2448 isolate_group);
2449 cls.set_type_arguments_field_offset(host_value_in_bytes: Instance::NextFieldOffset(),
2450 target_value_in_bytes: RTN::Instance::NextFieldOffset());
2451 cls.set_num_type_arguments_unsafe(1);
2452 cls.set_is_prefinalized();
2453 pending_classes.Add(value: cls);
2454 RegisterClass(cls, name: Symbols::FfiNativeFunction(), lib);
2455
2456 cls = Class::NewPointerClass(class_id: kPointerCid, isolate_group);
2457 object_store->set_ffi_pointer_class(cls);
2458 pending_classes.Add(value: cls);
2459 RegisterClass(cls, name: Symbols::FfiPointer(), lib);
2460
2461 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(index: kDynamicLibraryCid,
2462 isolate_group);
2463 cls.set_instance_size(host_value_in_bytes: DynamicLibrary::InstanceSize(),
2464 target_value_in_bytes: compiler::target::RoundedAllocationSize(
2465 size: RTN::DynamicLibrary::InstanceSize()));
2466 cls.set_is_prefinalized();
2467 pending_classes.Add(value: cls);
2468 RegisterClass(cls, name: Symbols::FfiDynamicLibrary(), lib);
2469
2470 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2471 object_store->set_native_finalizer_class(cls);
2472 RegisterPrivateClass(cls, public_class_name: Symbols::_NativeFinalizer(), lib);
2473
2474 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2475 cls.set_type_arguments_field_offset(
2476 host_value_in_bytes: Finalizer::type_arguments_offset(),
2477 target_value_in_bytes: RTN::Finalizer::type_arguments_offset());
2478 cls.set_num_type_arguments_unsafe(1);
2479 object_store->set_finalizer_class(cls);
2480 pending_classes.Add(value: cls);
2481 RegisterPrivateClass(cls, public_class_name: Symbols::_FinalizerImpl(), lib: core_lib);
2482
2483 // Pre-register the internal library so we can place the vm class
2484 // FinalizerEntry there rather than the core library.
2485 lib = Library::LookupLibrary(thread, url: Symbols::DartInternal());
2486 if (lib.IsNull()) {
2487 lib = Library::NewLibraryHelper(url: Symbols::DartInternal(), import_core_lib: true);
2488 lib.SetLoadRequested();
2489 lib.Register(thread);
2490 }
2491 object_store->set_bootstrap_library(index: ObjectStore::kInternal, value: lib);
2492 ASSERT(!lib.IsNull());
2493 ASSERT(lib.ptr() == Library::InternalLibrary());
2494
2495 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2496 object_store->set_finalizer_entry_class(cls);
2497 pending_classes.Add(value: cls);
2498 RegisterClass(cls, name: Symbols::FinalizerEntry(), lib);
2499
2500 // Finish the initialization by compiling the bootstrap scripts containing
2501 // the base interfaces and the implementation of the internal classes.
2502 const Error& error = Error::Handle(
2503 zone, ptr: Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
2504 if (!error.IsNull()) {
2505 return error.ptr();
2506 }
2507
2508 isolate_group->class_table()->CopySizesFromClassObjects();
2509
2510 ClassFinalizer::VerifyBootstrapClasses();
2511
2512 // Set up the intrinsic state of all functions (core, math and typed data).
2513 compiler::Intrinsifier::InitializeState();
2514
2515 // Adds static const fields (class ids) to the class 'ClassID');
2516 lib = Library::LookupLibrary(thread, url: Symbols::DartInternal());
2517 ASSERT(!lib.IsNull());
2518 cls = lib.LookupClassAllowPrivate(name: Symbols::ClassID());
2519 ASSERT(!cls.IsNull());
2520 const bool injected = cls.InjectCIDFields();
2521 ASSERT(injected);
2522
2523 // Set up recognized state of all functions (core, math and typed data).
2524 MethodRecognizer::InitializeState();
2525#endif // !defined(DART_PRECOMPILED_RUNTIME)
2526 } else {
2527 // Object::Init version when we are running in a version of dart that has a
2528 // full snapshot linked in and an isolate is initialized using the full
2529 // snapshot.
2530 ObjectStore* object_store = isolate_group->object_store();
2531 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
2532
2533 Class& cls = Class::Handle(zone);
2534
2535 // Set up empty classes in the object store, these will get initialized
2536 // correctly when we read from the snapshot. This is done to allow
2537 // bootstrapping of reading classes from the snapshot. Some classes are not
2538 // stored in the object store. Yet we still need to create their Class
2539 // object so that they get put into the class_table (as a side effect of
2540 // Class::New()).
2541 cls = Class::New<Instance, RTN::Instance>(index: kInstanceCid, isolate_group);
2542 object_store->set_object_class(cls);
2543
2544 cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group);
2545 cls = Class::New<Type, RTN::Type>(isolate_group);
2546 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
2547 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
2548 cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group);
2549
2550 cls = Class::New<Array, RTN::Array>(isolate_group);
2551 object_store->set_array_class(cls);
2552
2553 cls = Class::New<Array, RTN::Array>(index: kImmutableArrayCid, isolate_group);
2554 object_store->set_immutable_array_class(cls);
2555
2556 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
2557 isolate_group);
2558 object_store->set_growable_object_array_class(cls);
2559
2560 cls = Class::New<Map, RTN::Map>(isolate_group);
2561 object_store->set_map_impl_class(cls);
2562
2563 cls = Class::New<Map, RTN::Map>(index: kConstMapCid, isolate_group);
2564 object_store->set_const_map_impl_class(cls);
2565
2566 cls = Class::New<Set, RTN::Set>(isolate_group);
2567 object_store->set_set_impl_class(cls);
2568
2569 cls = Class::New<Set, RTN::Set>(index: kConstSetCid, isolate_group);
2570 object_store->set_const_set_impl_class(cls);
2571
2572 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2573 object_store->set_float32x4_class(cls);
2574
2575 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2576 object_store->set_int32x4_class(cls);
2577
2578 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2579 object_store->set_float64x2_class(cls);
2580
2581#define REGISTER_TYPED_DATA_CLASS(clazz) \
2582 cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate_group);
2583 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
2584#undef REGISTER_TYPED_DATA_CLASS
2585#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2586 cls = \
2587 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2588 cls = Class::NewUnmodifiableTypedDataViewClass( \
2589 kUnmodifiableTypedData##clazz##ViewCid, isolate_group);
2590 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
2591#undef REGISTER_TYPED_DATA_VIEW_CLASS
2592 cls = Class::NewTypedDataViewClass(class_id: kByteDataViewCid, isolate_group);
2593 cls = Class::NewUnmodifiableTypedDataViewClass(class_id: kUnmodifiableByteDataViewCid,
2594 isolate_group);
2595#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2596 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2597 isolate_group);
2598 CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
2599#undef REGISTER_EXT_TYPED_DATA_CLASS
2600
2601 cls = Class::New<Instance, RTN::Instance>(index: kFfiNativeTypeCid, isolate_group);
2602 object_store->set_ffi_native_type_class(cls);
2603
2604#define REGISTER_FFI_CLASS(clazz) \
2605 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group);
2606 CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_CLASS);
2607#undef REGISTER_FFI_CLASS
2608
2609 cls = Class::New<Instance, RTN::Instance>(index: kFfiNativeFunctionCid,
2610 isolate_group);
2611
2612 cls = Class::NewPointerClass(class_id: kPointerCid, isolate_group);
2613 object_store->set_ffi_pointer_class(cls);
2614
2615 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(index: kDynamicLibraryCid,
2616 isolate_group);
2617
2618 cls = Class::New<Instance, RTN::Instance>(index: kByteBufferCid, isolate_group,
2619 /*register_isolate_group=*/register_class: false);
2620 cls.set_instance_size_in_words(host_value: 0, target_value: 0);
2621 isolate_group->class_table()->Register(cls);
2622
2623 cls = Class::New<Integer, RTN::Integer>(isolate_group);
2624 object_store->set_integer_implementation_class(cls);
2625
2626 cls = Class::New<Smi, RTN::Smi>(isolate_group);
2627 object_store->set_smi_class(cls);
2628
2629 cls = Class::New<Mint, RTN::Mint>(isolate_group);
2630 object_store->set_mint_class(cls);
2631
2632 cls = Class::New<Double, RTN::Double>(isolate_group);
2633 object_store->set_double_class(cls);
2634
2635 cls = Class::New<Closure, RTN::Closure>(isolate_group);
2636 object_store->set_closure_class(cls);
2637
2638 cls = Class::New<Record, RTN::Record>(isolate_group);
2639
2640 cls = Class::NewStringClass(class_id: kOneByteStringCid, isolate_group);
2641 object_store->set_one_byte_string_class(cls);
2642
2643 cls = Class::NewStringClass(class_id: kTwoByteStringCid, isolate_group);
2644 object_store->set_two_byte_string_class(cls);
2645
2646 cls = Class::NewStringClass(class_id: kExternalOneByteStringCid, isolate_group);
2647 object_store->set_external_one_byte_string_class(cls);
2648
2649 cls = Class::NewStringClass(class_id: kExternalTwoByteStringCid, isolate_group);
2650 object_store->set_external_two_byte_string_class(cls);
2651
2652 cls = Class::New<Bool, RTN::Bool>(isolate_group);
2653 object_store->set_bool_class(cls);
2654
2655 cls = Class::New<Instance, RTN::Instance>(index: kNullCid, isolate_group);
2656 object_store->set_null_class(cls);
2657
2658 cls = Class::New<Instance, RTN::Instance>(index: kNeverCid, isolate_group);
2659 object_store->set_never_class(cls);
2660
2661 cls = Class::New<Capability, RTN::Capability>(isolate_group);
2662 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
2663 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
2664 cls = Class::New<StackTrace, RTN::StackTrace>(isolate_group);
2665 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2666 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
2667 cls = Class::New<Number, RTN::Number>(isolate_group);
2668
2669 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
2670 object_store->set_weak_property_class(cls);
2671 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
2672 object_store->set_weak_reference_class(cls);
2673 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2674 object_store->set_finalizer_class(cls);
2675 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2676 object_store->set_native_finalizer_class(cls);
2677 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2678 object_store->set_finalizer_entry_class(cls);
2679
2680 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2681 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2682 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2683 object_store->set_future_or_class(cls);
2684 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
2685 isolate_group);
2686 }
2687 return Error::null();
2688}
2689
2690#if defined(DEBUG)
2691bool Object::InVMIsolateHeap() const {
2692 return ptr()->untag()->InVMIsolateHeap();
2693}
2694#endif // DEBUG
2695
2696void Object::Print() const {
2697 THR_Print("%s\n", ToCString());
2698}
2699
2700StringPtr Object::DictionaryName() const {
2701 return String::null();
2702}
2703
2704void Object::InitializeObject(uword address,
2705 intptr_t class_id,
2706 intptr_t size,
2707 bool compressed,
2708 uword ptr_field_start_offset,
2709 uword ptr_field_end_offset) {
2710 // Note: we skip the header word here to avoid a racy read in the concurrent
2711 // marker from observing the null object when it reads into a heap page
2712 // allocated after marking started.
2713 uword cur = address + sizeof(UntaggedObject);
2714 uword ptr_field_start = address + ptr_field_start_offset;
2715 uword ptr_field_end = address + ptr_field_end_offset;
2716 uword end = address + size;
2717 // The start of pointer fields should always be past the object header, even
2718 // if there are no pointer fields (ptr_field_end < ptr_field_start).
2719 ASSERT(cur <= ptr_field_start);
2720 // The start of pointer fields can be at the end for empty payload objects.
2721 ASSERT(ptr_field_start <= end);
2722 // The end of pointer fields should always be before the end, as the end of
2723 // pointer fields is inclusive (the address of the last field to initialize).
2724 ASSERT(ptr_field_end < end);
2725 bool needs_init = true;
2726 if (IsTypedDataBaseClassId(index: class_id) || class_id == kArrayCid) {
2727 // If the size is greater than both kNewAllocatableSize and
2728 // kAllocatablePageSize, the object must have been allocated to a new
2729 // large page, which must already have been zero initialized by the OS.
2730 // Note that zero is a GC-safe value.
2731 //
2732 // For arrays, the caller will then initialize the fields to null with
2733 // safepoint checks to avoid blocking for the full duration of
2734 // initializing this array.
2735 needs_init =
2736 IsAllocatableInNewSpace(size) || IsAllocatableViaFreeLists(size);
2737 }
2738 if (needs_init) {
2739 // Initialize the memory prior to any pointer fields with 0. (This loop
2740 // and the next will be a no-op if the object has no pointer fields.)
2741 uword initial_value = 0;
2742 while (cur < ptr_field_start) {
2743 *reinterpret_cast<uword*>(cur) = initial_value;
2744 cur += kWordSize;
2745 }
2746 // Initialize any pointer fields with Object::null().
2747 initial_value = static_cast<uword>(null_);
2748#if defined(DART_COMPRESSED_POINTERS)
2749 if (compressed) {
2750 initial_value &= 0xFFFFFFFF;
2751 initial_value |= initial_value << 32;
2752 }
2753 const bool has_pointer_fields = ptr_field_start <= ptr_field_end;
2754 // If there are compressed pointer fields and the first compressed pointer
2755 // field is not at a word start, then initialize it to Object::null().
2756 if (compressed && has_pointer_fields &&
2757 (ptr_field_start % kWordSize != 0)) {
2758 *reinterpret_cast<compressed_uword*>(ptr_field_start) = initial_value;
2759 }
2760#endif
2761 while (cur <= ptr_field_end) {
2762 *reinterpret_cast<uword*>(cur) = initial_value;
2763 cur += kWordSize;
2764 }
2765 // Initialize the memory after any pointer fields with 0, unless this is
2766 // an instructions object in which case we use the break instruction.
2767 initial_value = class_id == kInstructionsCid ? kBreakInstructionFiller : 0;
2768#if defined(DART_COMPRESSED_POINTERS)
2769 // If there are compressed pointer fields and the last compressed pointer
2770 // field is the start of a word, then initialize the other part of the word
2771 // to the new initial value.
2772 //
2773 // (We're guaranteed there's always space in the object after the last
2774 // pointer field in this case since objects are allocated in multiples of
2775 // the word size.)
2776 if (compressed && has_pointer_fields && (ptr_field_end % kWordSize == 0)) {
2777 *reinterpret_cast<compressed_uword*>(ptr_field_end +
2778 kCompressedWordSize) = initial_value;
2779 }
2780#endif
2781 while (cur < end) {
2782 *reinterpret_cast<uword*>(cur) = initial_value;
2783 cur += kWordSize;
2784 }
2785 } else {
2786 // Check that MemorySanitizer understands this is initialized.
2787 MSAN_CHECK_INITIALIZED(reinterpret_cast<void*>(address), size);
2788#if defined(DEBUG)
2789 const uword initial_value = 0;
2790 while (cur < end) {
2791 ASSERT_EQUAL(*reinterpret_cast<uword*>(cur), initial_value);
2792 cur += kWordSize;
2793 }
2794#endif
2795 }
2796 uword tags = 0;
2797 ASSERT(class_id != kIllegalCid);
2798 tags = UntaggedObject::ClassIdTag::update(value: class_id, original: tags);
2799 tags = UntaggedObject::SizeTag::update(size, tag: tags);
2800 const bool is_old =
2801 (address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
2802 tags = UntaggedObject::OldBit::update(value: is_old, original: tags);
2803 tags = UntaggedObject::OldAndNotMarkedBit::update(value: is_old, original: tags);
2804 tags = UntaggedObject::OldAndNotRememberedBit::update(value: is_old, original: tags);
2805 tags = UntaggedObject::NewBit::update(value: !is_old, original: tags);
2806 tags = UntaggedObject::ImmutableBit::update(
2807 value: ShouldHaveImmutabilityBitSet(index: class_id), original: tags);
2808#if defined(HASH_IN_OBJECT_HEADER)
2809 tags = UntaggedObject::HashTag::update(value: 0, original: tags);
2810#endif
2811 reinterpret_cast<UntaggedObject*>(address)->tags_ = tags;
2812}
2813
2814void Object::CheckHandle() const {
2815#if defined(DEBUG)
2816 if (ptr_ != Object::null()) {
2817 intptr_t cid = ptr_->GetClassIdMayBeSmi();
2818 if (cid >= kNumPredefinedCids) {
2819 cid = kInstanceCid;
2820 }
2821 ASSERT(vtable() == builtin_vtables_[cid]);
2822 }
2823#endif
2824}
2825
2826ObjectPtr Object::Allocate(intptr_t cls_id,
2827 intptr_t size,
2828 Heap::Space space,
2829 bool compressed,
2830 uword ptr_field_start_offset,
2831 uword ptr_field_end_offset) {
2832 ASSERT(Utils::IsAligned(size, kObjectAlignment));
2833 Thread* thread = Thread::Current();
2834 ASSERT(thread->execution_state() == Thread::kThreadInVM);
2835 ASSERT(thread->no_safepoint_scope_depth() == 0);
2836 ASSERT(thread->no_callback_scope_depth() == 0);
2837 Heap* heap = thread->heap();
2838
2839 uword address = heap->Allocate(thread, size, space);
2840 if (UNLIKELY(address == 0)) {
2841 // SuspendLongJumpScope during Dart entry ensures that if a longjmp base is
2842 // available, it is the innermost error handler, so check for a longjmp base
2843 // before checking for an exit frame.
2844 if (thread->long_jump_base() != nullptr) {
2845 Report::LongJump(error: Object::out_of_memory_error());
2846 UNREACHABLE();
2847 } else if (thread->top_exit_frame_info() != 0) {
2848 // Use the preallocated out of memory exception to avoid calling
2849 // into dart code or allocating any code.
2850 Exceptions::ThrowOOM();
2851 UNREACHABLE();
2852 } else {
2853 // Nowhere to propagate an exception to.
2854 OUT_OF_MEMORY();
2855 }
2856 }
2857
2858 ObjectPtr raw_obj;
2859 NoSafepointScope no_safepoint(thread);
2860 InitializeObject(address, class_id: cls_id, size, compressed, ptr_field_start_offset,
2861 ptr_field_end_offset);
2862 raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag);
2863 ASSERT(cls_id == UntaggedObject::ClassIdTag::decode(raw_obj->untag()->tags_));
2864 if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) {
2865 // Black allocation. Prevents a data race between the mutator and
2866 // concurrent marker on ARM and ARM64 (the marker may observe a
2867 // publishing store of this object before the stores that initialize its
2868 // slots), and helps the collection to finish sooner.
2869 // release: Setting the mark bit must not be ordered after a publishing
2870 // store of this object. Compare Scavenger::ScavengePointer.
2871 raw_obj->untag()->SetMarkBitRelease();
2872 heap->old_space()->AllocateBlack(size);
2873 }
2874
2875#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2876 HeapProfileSampler& heap_sampler = thread->heap_sampler();
2877 if (heap_sampler.HasOutstandingSample()) {
2878 thread->IncrementNoCallbackScopeDepth();
2879 void* data = heap_sampler.InvokeCallbackForLastSample(cid: cls_id);
2880 heap->SetHeapSamplingData(obj: raw_obj, data);
2881 thread->DecrementNoCallbackScopeDepth();
2882 }
2883#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2884
2885#if !defined(PRODUCT)
2886 auto class_table = thread->isolate_group()->class_table();
2887 if (class_table->ShouldTraceAllocationFor(cid: cls_id)) {
2888 uint32_t hash =
2889 HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread, obj: raw_obj);
2890 Profiler::SampleAllocation(thread, cid: cls_id, identity_hash: hash);
2891 }
2892#endif // !defined(PRODUCT)
2893 return raw_obj;
2894}
2895
2896class WriteBarrierUpdateVisitor : public ObjectPointerVisitor {
2897 public:
2898 explicit WriteBarrierUpdateVisitor(Thread* thread, ObjectPtr obj)
2899 : ObjectPointerVisitor(thread->isolate_group()),
2900 thread_(thread),
2901 old_obj_(obj) {
2902 ASSERT(old_obj_->IsOldObject());
2903 }
2904
2905 void VisitPointers(ObjectPtr* from, ObjectPtr* to) override {
2906 if (old_obj_->IsArray()) {
2907 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2908 ObjectPtr value = *slot;
2909 if (value->IsHeapObject()) {
2910 old_obj_->untag()->CheckArrayPointerStore(addr: slot, value, thread: thread_);
2911 }
2912 }
2913 } else {
2914 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2915 ObjectPtr value = *slot;
2916 if (value->IsHeapObject()) {
2917 old_obj_->untag()->CheckHeapPointerStore(value, thread: thread_);
2918 }
2919 }
2920 }
2921 }
2922
2923#if defined(DART_COMPRESSED_POINTERS)
2924 void VisitCompressedPointers(uword heap_base,
2925 CompressedObjectPtr* from,
2926 CompressedObjectPtr* to) override {
2927 if (old_obj_->IsArray()) {
2928 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2929 ObjectPtr value = slot->Decompress(heap_base);
2930 if (value->IsHeapObject()) {
2931 old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
2932 }
2933 }
2934 } else {
2935 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2936 ObjectPtr value = slot->Decompress(heap_base);
2937 if (value->IsHeapObject()) {
2938 old_obj_->untag()->CheckHeapPointerStore(value, thread_);
2939 }
2940 }
2941 }
2942 }
2943#endif
2944
2945 private:
2946 Thread* thread_;
2947 ObjectPtr old_obj_;
2948
2949 DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor);
2950};
2951
2952#if defined(DEBUG)
2953bool Object::IsZoneHandle() const {
2954 return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this));
2955}
2956
2957bool Object::IsReadOnlyHandle() const {
2958 return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this));
2959}
2960
2961bool Object::IsNotTemporaryScopedHandle() const {
2962 return (IsZoneHandle() || IsReadOnlyHandle());
2963}
2964#endif
2965
2966ObjectPtr Object::Clone(const Object& orig,
2967 Heap::Space space,
2968 bool load_with_relaxed_atomics) {
2969 // Generic function types should be cloned with FunctionType::Clone.
2970 ASSERT(!orig.IsFunctionType() || !FunctionType::Cast(orig).IsGeneric());
2971 const Class& cls = Class::Handle(ptr: orig.clazz());
2972 intptr_t size = orig.ptr()->untag()->HeapSize();
2973 // All fields (including non-SmiPtr fields) will be initialized with Smi 0,
2974 // but the contents of the original object are copied over before the thread
2975 // is allowed to reach a safepoint.
2976 ObjectPtr raw_clone =
2977 Object::Allocate(cls_id: cls.id(), size, space, compressed: cls.HasCompressedPointers(),
2978 ptr_field_start_offset: from_offset<Object>(), ptr_field_end_offset: to_offset<Object>());
2979 NoSafepointScope no_safepoint;
2980 // Copy the body of the original into the clone.
2981 uword orig_addr = UntaggedObject::ToAddr(raw_obj: orig.ptr());
2982 uword clone_addr = UntaggedObject::ToAddr(raw_obj: raw_clone);
2983 const intptr_t kHeaderSizeInBytes = sizeof(UntaggedObject);
2984 if (load_with_relaxed_atomics) {
2985 auto orig_atomics_ptr = reinterpret_cast<std::atomic<uword>*>(orig_addr);
2986 auto clone_ptr = reinterpret_cast<uword*>(clone_addr);
2987 for (intptr_t i = kHeaderSizeInBytes / kWordSize; i < size / kWordSize;
2988 i++) {
2989 *(clone_ptr + i) =
2990 (orig_atomics_ptr + i)->load(m: std::memory_order_relaxed);
2991 }
2992 } else {
2993 memmove(dest: reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
2994 src: reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
2995 n: size - kHeaderSizeInBytes);
2996 }
2997
2998 if (IsTypedDataClassId(index: raw_clone->GetClassId())) {
2999 auto raw_typed_data = TypedData::RawCast(raw: raw_clone);
3000 raw_typed_data.untag()->RecomputeDataField();
3001 }
3002
3003 // Add clone to store buffer, if needed.
3004 if (!raw_clone->IsOldObject()) {
3005 // No need to remember an object in new space.
3006 return raw_clone;
3007 }
3008 WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
3009 raw_clone->untag()->VisitPointers(visitor: &visitor);
3010 return raw_clone;
3011}
3012
3013bool Class::HasCompressedPointers() const {
3014 const intptr_t cid = id();
3015 switch (cid) {
3016 case kByteBufferCid:
3017 return ByteBuffer::ContainsCompressedPointers();
3018#define HANDLE_CASE(clazz) \
3019 case k##clazz##Cid: \
3020 return dart::clazz::ContainsCompressedPointers();
3021 CLASS_LIST(HANDLE_CASE)
3022#undef HANDLE_CASE
3023#define HANDLE_CASE(clazz) \
3024 case kTypedData##clazz##Cid: \
3025 return dart::TypedData::ContainsCompressedPointers(); \
3026 case kTypedData##clazz##ViewCid: \
3027 case kUnmodifiableTypedData##clazz##ViewCid: \
3028 return dart::TypedDataView::ContainsCompressedPointers(); \
3029 case kExternalTypedData##clazz##Cid: \
3030 return dart::ExternalTypedData::ContainsCompressedPointers();
3031 CLASS_LIST_TYPED_DATA(HANDLE_CASE)
3032#undef HANDLE_CASE
3033 default:
3034 if (cid >= kNumPredefinedCids) {
3035 return dart::Instance::ContainsCompressedPointers();
3036 }
3037 }
3038 FATAL("Unsupported class for compressed pointers translation: %s (id=%" Pd
3039 ", kNumPredefinedCids=%" Pd ")\n",
3040 ToCString(), cid, kNumPredefinedCids);
3041 return false;
3042}
3043
3044StringPtr Class::Name() const {
3045 return untag()->name();
3046}
3047
3048StringPtr Class::ScrubbedName() const {
3049 return Symbols::New(thread: Thread::Current(), cstr: ScrubbedNameCString());
3050}
3051
3052const char* Class::ScrubbedNameCString() const {
3053 return String::ScrubName(name: String::Handle(ptr: Name()));
3054}
3055
3056StringPtr Class::UserVisibleName() const {
3057#if !defined(PRODUCT)
3058 ASSERT(untag()->user_name() != String::null());
3059 return untag()->user_name();
3060#endif // !defined(PRODUCT)
3061 // No caching in PRODUCT, regenerate.
3062 return Symbols::New(thread: Thread::Current(), cstr: GenerateUserVisibleName());
3063}
3064
3065const char* Class::UserVisibleNameCString() const {
3066#if !defined(PRODUCT)
3067 ASSERT(untag()->user_name() != String::null());
3068 return String::Handle(ptr: untag()->user_name()).ToCString();
3069#endif // !defined(PRODUCT)
3070 return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate.
3071}
3072
3073const char* Class::NameCString(NameVisibility name_visibility) const {
3074 switch (name_visibility) {
3075 case Object::kInternalName:
3076 return String::Handle(ptr: Name()).ToCString();
3077 case Object::kScrubbedName:
3078 return ScrubbedNameCString();
3079 case Object::kUserVisibleName:
3080 return UserVisibleNameCString();
3081 default:
3082 UNREACHABLE();
3083 return nullptr;
3084 }
3085}
3086
3087ClassPtr Class::Mixin() const {
3088 if (is_transformed_mixin_application()) {
3089 const Array& interfaces = Array::Handle(ptr: this->interfaces());
3090 const Type& mixin_type =
3091 Type::Handle(ptr: Type::RawCast(raw: interfaces.At(index: interfaces.Length() - 1)));
3092 return mixin_type.type_class();
3093 }
3094 return ptr();
3095}
3096
3097NNBDMode Class::nnbd_mode() const {
3098 return Library::Handle(ptr: library()).nnbd_mode();
3099}
3100
3101bool Class::IsInFullSnapshot() const {
3102 NoSafepointScope no_safepoint;
3103 return UntaggedLibrary::InFullSnapshotBit::decode(
3104 value: untag()->library()->untag()->flags_);
3105}
3106
3107TypePtr Class::RareType() const {
3108 if (!IsGeneric()) {
3109 return DeclarationType();
3110 }
3111 ASSERT(is_declaration_loaded());
3112 Thread* const thread = Thread::Current();
3113 Zone* const zone = thread->zone();
3114 const auto& inst_to_bounds =
3115 TypeArguments::Handle(zone, ptr: InstantiateToBounds(thread));
3116 ASSERT(inst_to_bounds.ptr() != Object::empty_type_arguments().ptr());
3117 auto& type = Type::Handle(
3118 zone, ptr: Type::New(clazz: *this, arguments: inst_to_bounds, nullability: Nullability::kNonNullable));
3119 type ^= ClassFinalizer::FinalizeType(type);
3120 return type.ptr();
3121}
3122
3123template <class FakeObject, class TargetFakeObject>
3124ClassPtr Class::New(IsolateGroup* isolate_group, bool register_class) {
3125 ASSERT(Object::class_class() != Class::null());
3126 const auto& result = Class::Handle(ptr: Object::Allocate<Class>(space: Heap::kOld));
3127 Object::VerifyBuiltinVtable<FakeObject>(FakeObject::kClassId);
3128 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
3129 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
3130 result.set_instance_size(host_value_in_bytes: FakeObject::InstanceSize(),
3131 target_value_in_bytes: compiler::target::RoundedAllocationSize(
3132 size: TargetFakeObject::InstanceSize()));
3133 result.set_type_arguments_field_offset_in_words(host_value: kNoTypeArguments,
3134 target_value: RTN::Class::kNoTypeArguments);
3135 const intptr_t host_next_field_offset = FakeObject::NextFieldOffset();
3136 const intptr_t target_next_field_offset = TargetFakeObject::NextFieldOffset();
3137 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
3138 target_value_in_bytes: target_next_field_offset);
3139 COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid));
3140 result.set_id(FakeObject::kClassId);
3141 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
3142 result.set_num_type_arguments_unsafe(0);
3143 result.set_num_native_fields(0);
3144 result.set_state_bits(0);
3145 if (IsInternalOnlyClassId(FakeObject::kClassId) ||
3146 (FakeObject::kClassId == kTypeArgumentsCid)) {
3147 // VM internal classes are done. There is no finalization needed or
3148 // possible in this case.
3149 result.set_is_declaration_loaded();
3150 result.set_is_type_finalized();
3151 result.set_is_allocate_finalized();
3152 } else if (FakeObject::kClassId != kClosureCid) {
3153 // VM backed classes are almost ready: run checks and resolve class
3154 // references, but do not recompute size.
3155 result.set_is_prefinalized();
3156 }
3157 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
3158 result.InitEmptyFields();
3159 if (register_class) {
3160 isolate_group->class_table()->Register(cls: result);
3161 }
3162 return result.ptr();
3163}
3164
3165#if !defined(DART_PRECOMPILED_RUNTIME)
3166static void ReportTooManyTypeArguments(const Class& cls) {
3167 Report::MessageF(kind: Report::kError, script: Script::Handle(ptr: cls.script()),
3168 token_pos: cls.token_pos(), report_after_token: Report::AtLocation,
3169 format: "too many type parameters declared in class '%s' or in its "
3170 "super classes",
3171 String::Handle(ptr: cls.Name()).ToCString());
3172 UNREACHABLE();
3173}
3174#endif // !defined(DART_PRECOMPILED_RUNTIME)
3175
3176void Class::set_num_type_arguments(intptr_t value) const {
3177#if defined(DART_PRECOMPILED_RUNTIME)
3178 UNREACHABLE();
3179#else
3180 if (!Utils::IsInt(N: 16, value)) {
3181 ReportTooManyTypeArguments(cls: *this);
3182 }
3183 // We allow concurrent calculation of the number of type arguments. If two
3184 // threads perform this operation it doesn't matter which one wins.
3185 DEBUG_ONLY(intptr_t old_value = num_type_arguments());
3186 DEBUG_ASSERT(old_value == kUnknownNumTypeArguments || old_value == value);
3187 StoreNonPointer<int16_t, int16_t, std::memory_order_relaxed>(
3188 addr: &untag()->num_type_arguments_, value);
3189#endif // defined(DART_PRECOMPILED_RUNTIME)
3190}
3191
3192void Class::set_num_type_arguments_unsafe(intptr_t value) const {
3193 StoreNonPointer(addr: &untag()->num_type_arguments_, value);
3194}
3195
3196void Class::set_has_pragma(bool value) const {
3197 set_state_bits(HasPragmaBit::update(value, original: state_bits()));
3198}
3199
3200void Class::set_is_isolate_unsendable(bool value) const {
3201 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3202 set_state_bits(IsIsolateUnsendableBit::update(value, original: state_bits()));
3203}
3204
3205void Class::set_is_isolate_unsendable_due_to_pragma(bool value) const {
3206 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3207 set_state_bits(
3208 IsIsolateUnsendableDueToPragmaBit::update(value, original: state_bits()));
3209}
3210
3211void Class::set_is_future_subtype(bool value) const {
3212 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3213 set_state_bits(IsFutureSubtypeBit::update(value, original: state_bits()));
3214}
3215
3216void Class::set_can_be_future(bool value) const {
3217 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3218 set_state_bits(CanBeFutureBit::update(value, original: state_bits()));
3219}
3220
3221// Initialize class fields of type Array with empty array.
3222void Class::InitEmptyFields() const {
3223 if (Object::empty_array().ptr() == Array::null()) {
3224 // The empty array has not been initialized yet.
3225 return;
3226 }
3227 untag()->set_interfaces(Object::empty_array().ptr());
3228 untag()->set_constants(Object::null_array().ptr());
3229 set_functions(Object::empty_array());
3230 set_fields(Object::empty_array());
3231 set_invocation_dispatcher_cache(Object::empty_array());
3232}
3233
3234ArrayPtr Class::OffsetToFieldMap(
3235 ClassTable* class_table /* = nullptr */) const {
3236 ASSERT(is_finalized());
3237 if (untag()->offset_in_words_to_field<std::memory_order_acquire>() ==
3238 Array::null()) {
3239 // Even if multiple threads are calling this concurrently, all of them would
3240 // compute the same array, so we intentionally don't acquire any locks here.
3241 const intptr_t length = untag()->host_instance_size_in_words_;
3242 const Array& array = Array::Handle(ptr: Array::New(len: length, space: Heap::kOld));
3243 Class& cls = Class::Handle(ptr: this->ptr());
3244 Array& fields = Array::Handle();
3245 Field& f = Field::Handle();
3246 while (!cls.IsNull()) {
3247 fields = cls.fields();
3248 for (intptr_t i = 0; i < fields.Length(); ++i) {
3249 f ^= fields.At(index: i);
3250 if (f.is_instance()) {
3251 array.SetAt(index: f.HostOffset() >> kCompressedWordSizeLog2, value: f);
3252 }
3253 }
3254 cls = cls.SuperClass(class_table);
3255 }
3256 untag()->set_offset_in_words_to_field<std::memory_order_release>(
3257 array.ptr());
3258 }
3259 return untag()->offset_in_words_to_field<std::memory_order_acquire>();
3260}
3261
3262bool Class::HasInstanceFields() const {
3263 const Array& field_array = Array::Handle(ptr: fields());
3264 Field& field = Field::Handle();
3265 for (intptr_t i = 0; i < field_array.Length(); ++i) {
3266 field ^= field_array.At(index: i);
3267 if (!field.is_static()) {
3268 return true;
3269 }
3270 }
3271 return false;
3272}
3273
3274class FunctionName {
3275 public:
3276 FunctionName(const String& name, String* tmp_string)
3277 : name_(name), tmp_string_(tmp_string) {}
3278 bool Matches(const Function& function) const {
3279 if (name_.IsSymbol()) {
3280 return name_.ptr() == function.name();
3281 } else {
3282 *tmp_string_ = function.name();
3283 return name_.Equals(str: *tmp_string_);
3284 }
3285 }
3286 intptr_t Hash() const { return name_.Hash(); }
3287
3288 private:
3289 const String& name_;
3290 String* tmp_string_;
3291};
3292
3293// Traits for looking up Functions by name.
3294class ClassFunctionsTraits {
3295 public:
3296 static const char* Name() { return "ClassFunctionsTraits"; }
3297 static bool ReportStats() { return false; }
3298
3299 // Called when growing the table.
3300 static bool IsMatch(const Object& a, const Object& b) {
3301 ASSERT(a.IsFunction() && b.IsFunction());
3302 // Function objects are always canonical.
3303 return a.ptr() == b.ptr();
3304 }
3305 static bool IsMatch(const FunctionName& name, const Object& obj) {
3306 return name.Matches(function: Function::Cast(obj));
3307 }
3308 static uword Hash(const Object& key) {
3309 return String::HashRawSymbol(symbol: Function::Cast(obj: key).name());
3310 }
3311 static uword Hash(const FunctionName& name) { return name.Hash(); }
3312};
3313typedef UnorderedHashSet<ClassFunctionsTraits> ClassFunctionsSet;
3314
3315void Class::SetFunctions(const Array& value) const {
3316 ASSERT(!value.IsNull());
3317 const intptr_t len = value.Length();
3318#if defined(DEBUG)
3319 Thread* thread = Thread::Current();
3320 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
3321 if (is_finalized()) {
3322 Function& function = Function::Handle();
3323 FunctionType& signature = FunctionType::Handle();
3324 for (intptr_t i = 0; i < len; ++i) {
3325 function ^= value.At(i);
3326 signature = function.signature();
3327 ASSERT(signature.IsFinalized());
3328 }
3329 }
3330#endif
3331 set_functions(value);
3332 if (len >= kFunctionLookupHashThreshold) {
3333 ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(initial_capacity: len, space: Heap::kOld));
3334 Function& func = Function::Handle();
3335 for (intptr_t i = 0; i < len; ++i) {
3336 func ^= value.At(index: i);
3337 // Verify that all the functions in the array have this class as owner.
3338 ASSERT(func.Owner() == ptr());
3339 set.Insert(key: func);
3340 }
3341 untag()->set_functions_hash_table(set.Release().ptr());
3342 } else {
3343 untag()->set_functions_hash_table(Array::null());
3344 }
3345}
3346
3347void Class::AddFunction(const Function& function) const {
3348#if defined(DEBUG)
3349 Thread* thread = Thread::Current();
3350 ASSERT(thread->IsDartMutatorThread());
3351 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
3352 ASSERT(!is_finalized() ||
3353 FunctionType::Handle(function.signature()).IsFinalized());
3354#endif
3355 const Array& arr = Array::Handle(ptr: functions());
3356 const Array& new_array =
3357 Array::Handle(ptr: Array::Grow(source: arr, new_length: arr.Length() + 1, space: Heap::kOld));
3358 new_array.SetAt(index: arr.Length(), value: function);
3359 set_functions(new_array);
3360 // Add to hash table, if any.
3361 const intptr_t new_len = new_array.Length();
3362 if (new_len == kFunctionLookupHashThreshold) {
3363 // Transition to using hash table.
3364 SetFunctions(new_array);
3365 } else if (new_len > kFunctionLookupHashThreshold) {
3366 ClassFunctionsSet set(untag()->functions_hash_table());
3367 set.Insert(key: function);
3368 untag()->set_functions_hash_table(set.Release().ptr());
3369 }
3370}
3371
3372intptr_t Class::FindFunctionIndex(const Function& needle) const {
3373 Thread* thread = Thread::Current();
3374 if (EnsureIsFinalized(thread) != Error::null()) {
3375 return -1;
3376 }
3377 REUSABLE_ARRAY_HANDLESCOPE(thread);
3378 REUSABLE_FUNCTION_HANDLESCOPE(thread);
3379 Array& funcs = thread->ArrayHandle();
3380 Function& function = thread->FunctionHandle();
3381 funcs = current_functions();
3382 ASSERT(!funcs.IsNull());
3383 const intptr_t len = funcs.Length();
3384 for (intptr_t i = 0; i < len; i++) {
3385 function ^= funcs.At(index: i);
3386 if (needle.ptr() == function.ptr()) {
3387 return i;
3388 }
3389 }
3390 // No function found.
3391 return -1;
3392}
3393
3394FunctionPtr Class::FunctionFromIndex(intptr_t idx) const {
3395 const Array& funcs = Array::Handle(ptr: current_functions());
3396 if ((idx < 0) || (idx >= funcs.Length())) {
3397 return Function::null();
3398 }
3399 Function& func = Function::Handle();
3400 func ^= funcs.At(index: idx);
3401 ASSERT(!func.IsNull());
3402 return func.ptr();
3403}
3404
3405FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
3406 Function& func = Function::Handle(ptr: FunctionFromIndex(idx));
3407 if (func.IsNull() || !func.HasImplicitClosureFunction()) {
3408 return Function::null();
3409 }
3410 func = func.ImplicitClosureFunction();
3411 ASSERT(!func.IsNull());
3412 return func.ptr();
3413}
3414
3415intptr_t Class::FindImplicitClosureFunctionIndex(const Function& needle) const {
3416 Thread* thread = Thread::Current();
3417 if (EnsureIsFinalized(thread) != Error::null()) {
3418 return -1;
3419 }
3420 REUSABLE_ARRAY_HANDLESCOPE(thread);
3421 REUSABLE_FUNCTION_HANDLESCOPE(thread);
3422 Array& funcs = thread->ArrayHandle();
3423 Function& function = thread->FunctionHandle();
3424 funcs = current_functions();
3425 ASSERT(!funcs.IsNull());
3426 Function& implicit_closure = Function::Handle(zone: thread->zone());
3427 const intptr_t len = funcs.Length();
3428 for (intptr_t i = 0; i < len; i++) {
3429 function ^= funcs.At(index: i);
3430 implicit_closure = function.implicit_closure_function();
3431 if (implicit_closure.IsNull()) {
3432 // Skip non-implicit closure functions.
3433 continue;
3434 }
3435 if (needle.ptr() == implicit_closure.ptr()) {
3436 return i;
3437 }
3438 }
3439 // No function found.
3440 return -1;
3441}
3442
3443intptr_t Class::FindInvocationDispatcherFunctionIndex(
3444 const Function& needle) const {
3445 Thread* thread = Thread::Current();
3446 if (EnsureIsFinalized(thread) != Error::null()) {
3447 return -1;
3448 }
3449 REUSABLE_ARRAY_HANDLESCOPE(thread);
3450 REUSABLE_OBJECT_HANDLESCOPE(thread);
3451 Array& funcs = thread->ArrayHandle();
3452 Object& object = thread->ObjectHandle();
3453 funcs = invocation_dispatcher_cache();
3454 ASSERT(!funcs.IsNull());
3455 const intptr_t len = funcs.Length();
3456 for (intptr_t i = 0; i < len; i++) {
3457 object = funcs.At(index: i);
3458 // The invocation_dispatcher_cache is a table with some entries that
3459 // are functions.
3460 if (object.IsFunction()) {
3461 if (Function::Cast(obj: object).ptr() == needle.ptr()) {
3462 return i;
3463 }
3464 }
3465 }
3466 // No function found.
3467 return -1;
3468}
3469
3470FunctionPtr Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const {
3471 Thread* thread = Thread::Current();
3472 REUSABLE_ARRAY_HANDLESCOPE(thread);
3473 REUSABLE_OBJECT_HANDLESCOPE(thread);
3474 Array& dispatcher_cache = thread->ArrayHandle();
3475 Object& object = thread->ObjectHandle();
3476 dispatcher_cache = invocation_dispatcher_cache();
3477 object = dispatcher_cache.At(index: idx);
3478 if (!object.IsFunction()) {
3479 return Function::null();
3480 }
3481 return Function::Cast(obj: object).ptr();
3482}
3483
3484void Class::set_state_bits(intptr_t bits) const {
3485 StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
3486 addr: &untag()->state_bits_, value: static_cast<uint32_t>(bits));
3487}
3488
3489void Class::set_library(const Library& value) const {
3490 untag()->set_library(value.ptr());
3491}
3492
3493void Class::set_type_parameters(const TypeParameters& value) const {
3494 ASSERT((num_type_arguments() == kUnknownNumTypeArguments) ||
3495 is_prefinalized());
3496 untag()->set_type_parameters(value.ptr());
3497}
3498
3499void Class::set_functions(const Array& value) const {
3500 // Ensure all writes to the [Function]s are visible by the time the array
3501 // is visible.
3502 untag()->set_functions<std::memory_order_release>(value.ptr());
3503}
3504
3505void Class::set_fields(const Array& value) const {
3506 // Ensure all writes to the [Field]s are visible by the time the array
3507 // is visible.
3508 untag()->set_fields<std::memory_order_release>(value.ptr());
3509}
3510
3511void Class::set_invocation_dispatcher_cache(const Array& cache) const {
3512 // Ensure all writes to the cache are visible by the time the array
3513 // is visible.
3514 untag()->set_invocation_dispatcher_cache<std::memory_order_release>(
3515 cache.ptr());
3516}
3517
3518void Class::set_declaration_instance_type_arguments(
3519 const TypeArguments& value) const {
3520 ASSERT(value.IsNull() || (value.IsCanonical() && value.IsOld()));
3521 ASSERT((declaration_instance_type_arguments() == TypeArguments::null()) ||
3522 (declaration_instance_type_arguments() == value.ptr()));
3523 untag()->set_declaration_instance_type_arguments<std::memory_order_release>(
3524 value.ptr());
3525}
3526
3527TypeArgumentsPtr Class::GetDeclarationInstanceTypeArguments() const {
3528 const intptr_t num_type_arguments = NumTypeArguments();
3529 if (num_type_arguments == 0) {
3530 return TypeArguments::null();
3531 }
3532 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3533 return declaration_instance_type_arguments();
3534 }
3535 Thread* thread = Thread::Current();
3536 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3537 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3538 return declaration_instance_type_arguments();
3539 }
3540 Zone* zone = thread->zone();
3541 auto& args = TypeArguments::Handle(zone);
3542 auto& type = AbstractType::Handle(zone);
3543 const intptr_t num_type_parameters = NumTypeParameters(thread);
3544 if (num_type_arguments == num_type_parameters) {
3545 type = DeclarationType();
3546 args = Type::Cast(obj: type).arguments();
3547 } else {
3548 type = super_type();
3549 const auto& super_args = TypeArguments::Handle(
3550 zone, ptr: Type::Cast(obj: type).GetInstanceTypeArguments(thread));
3551 if ((num_type_parameters == 0) ||
3552 (!super_args.IsNull() && (super_args.Length() == num_type_arguments))) {
3553 args = super_args.ptr();
3554 } else {
3555 args = TypeArguments::New(len: num_type_arguments);
3556 const intptr_t offset = num_type_arguments - num_type_parameters;
3557 for (intptr_t i = 0; i < offset; ++i) {
3558 type = super_args.TypeAtNullSafe(index: i);
3559 args.SetTypeAt(index: i, value: type);
3560 }
3561 type = DeclarationType();
3562 const auto& decl_args =
3563 TypeArguments::Handle(zone, ptr: Type::Cast(obj: type).arguments());
3564 for (intptr_t i = 0; i < num_type_parameters; ++i) {
3565 type = decl_args.TypeAt(index: i);
3566 args.SetTypeAt(index: offset + i, value: type);
3567 }
3568 }
3569 }
3570 args = args.Canonicalize(thread);
3571 set_declaration_instance_type_arguments(args);
3572 return args.ptr();
3573}
3574
3575TypeArgumentsPtr Class::GetInstanceTypeArguments(
3576 Thread* thread,
3577 const TypeArguments& type_arguments,
3578 bool canonicalize) const {
3579 const intptr_t num_type_arguments = NumTypeArguments();
3580 if (num_type_arguments == 0) {
3581 return TypeArguments::null();
3582 }
3583 Zone* zone = thread->zone();
3584 auto& args = TypeArguments::Handle(zone);
3585 const intptr_t num_type_parameters = NumTypeParameters(thread);
3586 ASSERT(type_arguments.IsNull() ||
3587 type_arguments.Length() == num_type_parameters);
3588 if (num_type_arguments == num_type_parameters) {
3589 args = type_arguments.ptr();
3590 } else {
3591 args = GetDeclarationInstanceTypeArguments();
3592 if (num_type_parameters == 0) {
3593 return args.ptr();
3594 }
3595 args = args.InstantiateFrom(
3596 instantiator_type_arguments: TypeArguments::Handle(
3597 zone, ptr: type_arguments.ToInstantiatorTypeArguments(thread, cls: *this)),
3598 function_type_arguments: Object::null_type_arguments(), num_free_fun_type_params: kAllFree, space: Heap::kOld);
3599 }
3600 if (canonicalize) {
3601 args = args.Canonicalize(thread);
3602 }
3603 return args.ptr();
3604}
3605
3606intptr_t Class::NumTypeParameters(Thread* thread) const {
3607 if (!is_declaration_loaded()) {
3608 ASSERT(is_prefinalized());
3609 const intptr_t cid = id();
3610 if ((cid == kArrayCid) || (cid == kImmutableArrayCid) ||
3611 (cid == kGrowableObjectArrayCid)) {
3612 return 1; // List's type parameter may not have been parsed yet.
3613 }
3614 return 0;
3615 }
3616 if (type_parameters() == TypeParameters::null()) {
3617 return 0;
3618 }
3619 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
3620 TypeParameters& type_params = thread->TypeParametersHandle();
3621 type_params = type_parameters();
3622 return type_params.Length();
3623}
3624
3625intptr_t Class::ComputeNumTypeArguments() const {
3626 ASSERT(is_declaration_loaded());
3627 Thread* thread = Thread::Current();
3628 Zone* zone = thread->zone();
3629 auto isolate_group = thread->isolate_group();
3630 const intptr_t num_type_params = NumTypeParameters();
3631
3632 if ((super_type() == AbstractType::null()) ||
3633 (super_type() == isolate_group->object_store()->object_type())) {
3634 return num_type_params;
3635 }
3636
3637 const auto& sup_type = Type::Handle(zone, ptr: super_type());
3638 const auto& sup_class = Class::Handle(zone, ptr: sup_type.type_class());
3639 const intptr_t sup_class_num_type_args = sup_class.NumTypeArguments();
3640 if (num_type_params == 0) {
3641 return sup_class_num_type_args;
3642 }
3643
3644 const auto& sup_type_args = TypeArguments::Handle(zone, ptr: sup_type.arguments());
3645 if (sup_type_args.IsNull()) {
3646 // The super type is raw or the super class is non generic.
3647 // In either case, overlapping is not possible.
3648 return sup_class_num_type_args + num_type_params;
3649 }
3650
3651 const intptr_t sup_type_args_length = sup_type_args.Length();
3652 // Determine the maximum overlap of a prefix of the vector consisting of the
3653 // type parameters of this class with a suffix of the vector consisting of the
3654 // type arguments of the super type of this class.
3655 // The number of own type arguments of this class is the number of its type
3656 // parameters minus the number of type arguments in the overlap.
3657 // Attempt to overlap the whole vector of type parameters; reduce the size
3658 // of the vector (keeping the first type parameter) until it fits or until
3659 // its size is zero.
3660 auto& sup_type_arg = AbstractType::Handle(zone);
3661 for (intptr_t num_overlapping_type_args =
3662 (num_type_params < sup_type_args_length) ? num_type_params
3663 : sup_type_args_length;
3664 num_overlapping_type_args > 0; num_overlapping_type_args--) {
3665 intptr_t i = 0;
3666 for (; i < num_overlapping_type_args; i++) {
3667 sup_type_arg = sup_type_args.TypeAt(index: sup_type_args_length -
3668 num_overlapping_type_args + i);
3669 ASSERT(!sup_type_arg.IsNull());
3670 if (!sup_type_arg.IsTypeParameter()) break;
3671 // The only type parameters appearing in the type arguments of the super
3672 // type are those declared by this class. Their finalized indices depend
3673 // on the number of type arguments being computed here. Therefore, they
3674 // cannot possibly be finalized yet.
3675 ASSERT(!TypeParameter::Cast(sup_type_arg).IsFinalized());
3676 if (TypeParameter::Cast(obj: sup_type_arg).index() != i ||
3677 TypeParameter::Cast(obj: sup_type_arg).IsNullable()) {
3678 break;
3679 }
3680 }
3681 if (i == num_overlapping_type_args) {
3682 // Overlap found.
3683 return sup_class_num_type_args + num_type_params -
3684 num_overlapping_type_args;
3685 }
3686 }
3687 // No overlap found.
3688 return sup_class_num_type_args + num_type_params;
3689}
3690
3691intptr_t Class::NumTypeArguments() const {
3692 // Return cached value if already calculated.
3693 intptr_t num_type_args = num_type_arguments();
3694 if (num_type_args != kUnknownNumTypeArguments) {
3695 return num_type_args;
3696 }
3697
3698#if defined(DART_PRECOMPILED_RUNTIME)
3699 UNREACHABLE();
3700 return 0;
3701#else
3702 num_type_args = ComputeNumTypeArguments();
3703 ASSERT(num_type_args != kUnknownNumTypeArguments);
3704 set_num_type_arguments(num_type_args);
3705 return num_type_args;
3706#endif // defined(DART_PRECOMPILED_RUNTIME)
3707}
3708
3709TypeArgumentsPtr Class::InstantiateToBounds(Thread* thread) const {
3710 const auto& type_params =
3711 TypeParameters::Handle(zone: thread->zone(), ptr: type_parameters());
3712 if (type_params.IsNull()) {
3713 return Object::empty_type_arguments().ptr();
3714 }
3715 return type_params.defaults();
3716}
3717
3718ClassPtr Class::SuperClass(ClassTable* class_table /* = nullptr */) const {
3719 Thread* thread = Thread::Current();
3720 Zone* zone = thread->zone();
3721 if (class_table == nullptr) {
3722 class_table = thread->isolate_group()->class_table();
3723 }
3724
3725 if (super_type() == AbstractType::null()) {
3726 if (id() == kTypeArgumentsCid) {
3727 // Pretend TypeArguments objects are Dart instances.
3728 return class_table->At(cid: kInstanceCid);
3729 }
3730 return Class::null();
3731 }
3732 const AbstractType& sup_type = AbstractType::Handle(zone, ptr: super_type());
3733 const intptr_t type_class_id = sup_type.type_class_id();
3734 return class_table->At(cid: type_class_id);
3735}
3736
3737void Class::set_super_type(const Type& value) const {
3738 ASSERT(value.IsNull() || !value.IsDynamicType());
3739 untag()->set_super_type(value.ptr());
3740}
3741
3742TypeParameterPtr Class::TypeParameterAt(intptr_t index,
3743 Nullability nullability) const {
3744 ASSERT(index >= 0 && index < NumTypeParameters());
3745 TypeParameter& type_param =
3746 TypeParameter::Handle(ptr: TypeParameter::New(owner: *this, base: 0, index, nullability));
3747 // Finalize type parameter only if its declaring class is
3748 // finalized and available in the current class table.
3749 if (is_type_finalized() && (type_param.parameterized_class() == ptr())) {
3750 type_param ^= ClassFinalizer::FinalizeType(type: type_param);
3751 }
3752 return type_param.ptr();
3753}
3754
3755intptr_t Class::UnboxedFieldSizeInBytesByCid(intptr_t cid) {
3756 switch (cid) {
3757 case kDoubleCid:
3758 return sizeof(UntaggedDouble::value_);
3759 case kFloat32x4Cid:
3760 return sizeof(UntaggedFloat32x4::value_);
3761 case kFloat64x2Cid:
3762 return sizeof(UntaggedFloat64x2::value_);
3763 default:
3764 return sizeof(UntaggedMint::value_);
3765 }
3766}
3767
3768UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
3769 Array& flds = Array::Handle(ptr: fields());
3770 const Class& super = Class::Handle(ptr: SuperClass());
3771 intptr_t host_offset = 0;
3772 UnboxedFieldBitmap host_bitmap{};
3773 // Target offsets might differ if the word size are different
3774 intptr_t target_offset = 0;
3775 intptr_t host_type_args_field_offset = kNoTypeArguments;
3776 intptr_t target_type_args_field_offset = RTN::Class::kNoTypeArguments;
3777 if (super.IsNull()) {
3778 host_offset = Instance::NextFieldOffset();
3779 target_offset = RTN::Instance::NextFieldOffset();
3780 ASSERT(host_offset > 0);
3781 ASSERT(target_offset > 0);
3782 } else {
3783 ASSERT(super.is_finalized() || super.is_prefinalized());
3784 host_type_args_field_offset = super.host_type_arguments_field_offset();
3785 target_type_args_field_offset = super.target_type_arguments_field_offset();
3786 host_offset = super.host_next_field_offset();
3787 ASSERT(host_offset > 0);
3788 target_offset = super.target_next_field_offset();
3789 ASSERT(target_offset > 0);
3790 // We should never call CalculateFieldOffsets for native wrapper
3791 // classes, assert this.
3792 ASSERT(num_native_fields() == 0);
3793 const intptr_t num_native_fields = super.num_native_fields();
3794 set_num_native_fields(num_native_fields);
3795 if (num_native_fields > 0 || is_isolate_unsendable_due_to_pragma()) {
3796 set_is_isolate_unsendable(true);
3797 }
3798
3799 host_bitmap = IsolateGroup::Current()->class_table()->GetUnboxedFieldsMapAt(
3800 cid: super.id());
3801 }
3802 // If the super class is parameterized, use the same type_arguments field,
3803 // otherwise, if this class is the first in the super chain to be
3804 // parameterized, introduce a new type_arguments field.
3805 if (host_type_args_field_offset == kNoTypeArguments) {
3806 ASSERT(target_type_args_field_offset == RTN::Class::kNoTypeArguments);
3807 if (IsGeneric()) {
3808 // The instance needs a type_arguments field.
3809 host_type_args_field_offset = host_offset;
3810 target_type_args_field_offset = target_offset;
3811 host_offset += kCompressedWordSize;
3812 target_offset += compiler::target::kCompressedWordSize;
3813 }
3814 } else {
3815 ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments);
3816 }
3817
3818 set_type_arguments_field_offset(host_value_in_bytes: host_type_args_field_offset,
3819 target_value_in_bytes: target_type_args_field_offset);
3820 ASSERT(host_offset > 0);
3821 ASSERT(target_offset > 0);
3822 Field& field = Field::Handle();
3823 const intptr_t len = flds.Length();
3824 for (intptr_t i = 0; i < len; i++) {
3825 field ^= flds.At(index: i);
3826 // Offset is computed only for instance fields.
3827 if (!field.is_static()) {
3828 ASSERT(field.HostOffset() == 0);
3829 ASSERT(field.TargetOffset() == 0);
3830 field.SetOffset(host_offset_in_bytes: host_offset, target_offset_in_bytes: target_offset);
3831
3832 if (field.is_unboxed()) {
3833 const intptr_t field_size =
3834 UnboxedFieldSizeInBytesByCid(cid: field.guarded_cid());
3835
3836 const intptr_t host_num_words = field_size / kCompressedWordSize;
3837 const intptr_t host_next_offset = host_offset + field_size;
3838 const intptr_t host_next_position =
3839 host_next_offset / kCompressedWordSize;
3840
3841 const intptr_t target_next_offset = target_offset + field_size;
3842 const intptr_t target_next_position =
3843 target_next_offset / compiler::target::kCompressedWordSize;
3844
3845 // The bitmap has fixed length. Checks if the offset position is smaller
3846 // than its length. If it is not, than the field should be boxed
3847 if (host_next_position <= UnboxedFieldBitmap::Length() &&
3848 target_next_position <= UnboxedFieldBitmap::Length()) {
3849 for (intptr_t j = 0; j < host_num_words; j++) {
3850 // Activate the respective bit in the bitmap, indicating that the
3851 // content is not a pointer
3852 host_bitmap.Set(host_offset / kCompressedWordSize);
3853 host_offset += kCompressedWordSize;
3854 }
3855
3856 ASSERT(host_offset == host_next_offset);
3857 target_offset = target_next_offset;
3858 } else {
3859 // Make the field boxed
3860 field.set_is_unboxed(false);
3861 host_offset += kCompressedWordSize;
3862 target_offset += compiler::target::kCompressedWordSize;
3863 }
3864 } else {
3865 host_offset += kCompressedWordSize;
3866 target_offset += compiler::target::kCompressedWordSize;
3867 }
3868 }
3869 }
3870
3871 const intptr_t host_instance_size = RoundedAllocationSize(size: host_offset);
3872 const intptr_t target_instance_size =
3873 compiler::target::RoundedAllocationSize(size: target_offset);
3874 if (!Utils::IsInt(N: 32, value: target_instance_size)) {
3875 // Many parts of the compiler assume offsets can be represented with
3876 // int32_t.
3877 FATAL("Too many fields in %s\n", UserVisibleNameCString());
3878 }
3879 set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
3880 set_next_field_offset(host_value_in_bytes: host_offset, target_value_in_bytes: target_offset);
3881 return host_bitmap;
3882}
3883
3884void Class::AddInvocationDispatcher(const String& target_name,
3885 const Array& args_desc,
3886 const Function& dispatcher) const {
3887 auto thread = Thread::Current();
3888 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
3889
3890 ASSERT(target_name.ptr() == dispatcher.name());
3891
3892 DispatcherSet dispatchers(invocation_dispatcher_cache() ==
3893 Array::empty_array().ptr()
3894 ? HashTables::New<DispatcherSet>(initial_capacity: 4, space: Heap::kOld)
3895 : invocation_dispatcher_cache());
3896 dispatchers.Insert(key: dispatcher);
3897 set_invocation_dispatcher_cache(dispatchers.Release());
3898}
3899
3900FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
3901 const Array& args_desc,
3902 UntaggedFunction::Kind kind,
3903 bool create_if_absent) const {
3904 ASSERT(kind == UntaggedFunction::kNoSuchMethodDispatcher ||
3905 kind == UntaggedFunction::kInvokeFieldDispatcher ||
3906 kind == UntaggedFunction::kDynamicInvocationForwarder);
3907 auto thread = Thread::Current();
3908 auto Z = thread->zone();
3909 auto& function = Function::Handle(zone: Z);
3910
3911 // First we'll try to find it without using locks.
3912 DispatcherKey key(target_name, args_desc, kind);
3913 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3914 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3915 function ^= dispatchers.GetOrNull(key);
3916 dispatchers.Release();
3917 }
3918 if (!function.IsNull() || !create_if_absent) {
3919 return function.ptr();
3920 }
3921
3922 // If we failed to find it and possibly need to create it, use a write lock.
3923 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3924
3925 // Try to find it again & return if it was added in the meantime.
3926 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3927 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3928 function ^= dispatchers.GetOrNull(key);
3929 dispatchers.Release();
3930 }
3931 if (!function.IsNull()) return function.ptr();
3932
3933 // Otherwise create it & add it.
3934 function = CreateInvocationDispatcher(target_name, args_desc, kind);
3935 AddInvocationDispatcher(target_name, args_desc, dispatcher: function);
3936 return function.ptr();
3937}
3938
3939FunctionPtr Class::CreateInvocationDispatcher(
3940 const String& target_name,
3941 const Array& args_desc,
3942 UntaggedFunction::Kind kind) const {
3943 Thread* thread = Thread::Current();
3944 Zone* zone = thread->zone();
3945 FunctionType& signature = FunctionType::Handle(zone, ptr: FunctionType::New());
3946 Function& invocation = Function::Handle(
3947 zone, ptr: Function::New(
3948 signature,
3949 name: String::Handle(zone, ptr: Symbols::New(thread, str: target_name)), kind,
3950 is_static: false, // Not static.
3951 is_const: false, // Not const.
3952 is_abstract: false, // Not abstract.
3953 is_external: false, // Not external.
3954 is_native: false, // Not native.
3955 owner: *this, token_pos: TokenPosition::kMinSource));
3956 ArgumentsDescriptor desc(args_desc);
3957 const intptr_t type_args_len = desc.TypeArgsLen();
3958 if (type_args_len > 0) {
3959 // Make dispatcher function generic, since type arguments are passed.
3960 const auto& type_parameters =
3961 TypeParameters::Handle(zone, ptr: TypeParameters::New(count: type_args_len));
3962 // Allow any type, as any type checking is compiled into the dispatcher.
3963 auto& bound = Type::Handle(
3964 zone, ptr: IsolateGroup::Current()->object_store()->nullable_object_type());
3965 for (intptr_t i = 0; i < type_args_len; i++) {
3966 // The name of the type parameter does not matter, as a type error using
3967 // it should never be thrown.
3968 type_parameters.SetNameAt(index: i, value: Symbols::OptimizedOut());
3969 type_parameters.SetBoundAt(index: i, value: bound);
3970 // Type arguments will always be provided, so the default is not used.
3971 type_parameters.SetDefaultAt(index: i, value: Object::dynamic_type());
3972 }
3973 signature.SetTypeParameters(type_parameters);
3974 }
3975
3976 signature.set_num_fixed_parameters(desc.PositionalCount());
3977 signature.SetNumOptionalParameters(num_optional_parameters: desc.NamedCount(),
3978 are_optional_positional: false); // Not positional.
3979 signature.set_parameter_types(
3980 Array::Handle(zone, ptr: Array::New(len: desc.Count(), space: Heap::kOld)));
3981 invocation.CreateNameArray();
3982 signature.CreateNameArrayIncludingFlags();
3983 // Receiver.
3984 signature.SetParameterTypeAt(index: 0, value: Object::dynamic_type());
3985 invocation.SetParameterNameAt(index: 0, value: Symbols::This());
3986 // Remaining positional parameters.
3987 for (intptr_t i = 1; i < desc.PositionalCount(); i++) {
3988 signature.SetParameterTypeAt(index: i, value: Object::dynamic_type());
3989 char name[64];
3990 Utils::SNPrint(str: name, size: 64, format: ":p%" Pd, i);
3991 invocation.SetParameterNameAt(
3992 index: i, value: String::Handle(zone, ptr: Symbols::New(thread, cstr: name)));
3993 }
3994
3995 // Named parameters.
3996 for (intptr_t i = 0; i < desc.NamedCount(); i++) {
3997 const intptr_t param_index = desc.PositionAt(i);
3998 const auto& param_name = String::Handle(zone, ptr: desc.NameAt(i));
3999 signature.SetParameterTypeAt(index: param_index, value: Object::dynamic_type());
4000 signature.SetParameterNameAt(index: param_index, value: param_name);
4001 }
4002 signature.FinalizeNameArray();
4003 signature.set_result_type(Object::dynamic_type());
4004 invocation.set_is_debuggable(false);
4005 invocation.set_is_visible(false);
4006 invocation.set_is_reflectable(false);
4007 invocation.set_saved_args_desc(args_desc);
4008
4009 signature ^= ClassFinalizer::FinalizeType(type: signature);
4010 invocation.SetSignature(signature);
4011
4012 return invocation.ptr();
4013}
4014
4015// Method extractors are used to create implicit closures from methods.
4016// When an expression obj.M is evaluated for the first time and receiver obj
4017// does not have a getter called M but has a method called M then an extractor
4018// is created and injected as a getter (under the name get:M) into the class
4019// owning method M.
4020FunctionPtr Function::CreateMethodExtractor(const String& getter_name) const {
4021 Thread* thread = Thread::Current();
4022 Zone* zone = thread->zone();
4023 ASSERT(Field::IsGetterName(getter_name));
4024 const Function& closure_function =
4025 Function::Handle(zone, ptr: ImplicitClosureFunction());
4026
4027 const Class& owner = Class::Handle(zone, ptr: closure_function.Owner());
4028 FunctionType& signature = FunctionType::Handle(zone, ptr: FunctionType::New());
4029 const Function& extractor = Function::Handle(
4030 zone,
4031 ptr: Function::New(signature,
4032 name: String::Handle(zone, ptr: Symbols::New(thread, str: getter_name)),
4033 kind: UntaggedFunction::kMethodExtractor,
4034 is_static: false, // Not static.
4035 is_const: false, // Not const.
4036 is_abstract: is_abstract(),
4037 is_external: false, // Not external.
4038 is_native: false, // Not native.
4039 owner, token_pos: TokenPosition::kMethodExtractor));
4040
4041 // Initialize signature: receiver is a single fixed parameter.
4042 const intptr_t kNumParameters = 1;
4043 signature.set_num_fixed_parameters(kNumParameters);
4044 signature.SetNumOptionalParameters(num_optional_parameters: 0, are_optional_positional: false);
4045 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
4046#if !defined(DART_PRECOMPILED_RUNTIME)
4047 extractor.set_positional_parameter_names(
4048 Object::synthetic_getter_parameter_names());
4049#endif
4050 signature.set_result_type(Object::dynamic_type());
4051
4052 extractor.InheritKernelOffsetFrom(src: *this);
4053
4054 extractor.set_extracted_method_closure(closure_function);
4055 extractor.set_is_debuggable(false);
4056 extractor.set_is_visible(false);
4057
4058 signature ^= ClassFinalizer::FinalizeType(type: signature);
4059 extractor.SetSignature(signature);
4060
4061 owner.AddFunction(function: extractor);
4062
4063 return extractor.ptr();
4064}
4065
4066FunctionPtr Function::GetMethodExtractor(const String& getter_name) const {
4067 ASSERT(Field::IsGetterName(getter_name));
4068 const Function& closure_function =
4069 Function::Handle(ptr: ImplicitClosureFunction());
4070 const Class& owner = Class::Handle(ptr: closure_function.Owner());
4071 Thread* thread = Thread::Current();
4072 if (owner.EnsureIsFinalized(thread) != Error::null()) {
4073 return Function::null();
4074 }
4075 IsolateGroup* group = thread->isolate_group();
4076 Function& result = Function::Handle(
4077 ptr: Resolver::ResolveDynamicFunction(zone: thread->zone(), receiver_class: owner, function_name: getter_name));
4078 if (result.IsNull()) {
4079 SafepointWriteRwLocker ml(thread, group->program_lock());
4080 result = owner.LookupDynamicFunctionUnsafe(name: getter_name);
4081 if (result.IsNull()) {
4082 result = CreateMethodExtractor(getter_name);
4083 }
4084 }
4085 ASSERT(result.kind() == UntaggedFunction::kMethodExtractor);
4086 return result.ptr();
4087}
4088
4089// Record field getters are used to access fields of arbitrary
4090// record instances dynamically.
4091FunctionPtr Class::CreateRecordFieldGetter(const String& getter_name) const {
4092 Thread* thread = Thread::Current();
4093 Zone* zone = thread->zone();
4094 ASSERT(IsRecordClass());
4095 ASSERT(Field::IsGetterName(getter_name));
4096 FunctionType& signature = FunctionType::Handle(zone, ptr: FunctionType::New());
4097 const Function& getter = Function::Handle(
4098 zone,
4099 ptr: Function::New(signature,
4100 name: String::Handle(zone, ptr: Symbols::New(thread, str: getter_name)),
4101 kind: UntaggedFunction::kRecordFieldGetter,
4102 is_static: false, // Not static.
4103 is_const: false, // Not const.
4104 is_abstract: false, // Not abstract.
4105 is_external: false, // Not external.
4106 is_native: false, // Not native.
4107 owner: *this, token_pos: TokenPosition::kMinSource));
4108
4109 // Initialize signature: receiver is a single fixed parameter.
4110 const intptr_t kNumParameters = 1;
4111 signature.set_num_fixed_parameters(kNumParameters);
4112 signature.SetNumOptionalParameters(num_optional_parameters: 0, are_optional_positional: false);
4113 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
4114#if !defined(DART_PRECOMPILED_RUNTIME)
4115 getter.set_positional_parameter_names(
4116 Object::synthetic_getter_parameter_names());
4117#endif
4118 signature.set_result_type(Object::dynamic_type());
4119
4120 getter.set_is_debuggable(false);
4121 getter.set_is_visible(false);
4122
4123 signature ^= ClassFinalizer::FinalizeType(type: signature);
4124 getter.SetSignature(signature);
4125
4126 AddFunction(function: getter);
4127
4128 return getter.ptr();
4129}
4130
4131FunctionPtr Class::GetRecordFieldGetter(const String& getter_name) const {
4132 ASSERT(IsRecordClass());
4133 ASSERT(Field::IsGetterName(getter_name));
4134 Thread* thread = Thread::Current();
4135 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4136 Function& result = Function::Handle(zone: thread->zone(),
4137 ptr: LookupDynamicFunctionUnsafe(name: getter_name));
4138 if (result.IsNull()) {
4139 result = CreateRecordFieldGetter(getter_name);
4140 }
4141 ASSERT(result.kind() == UntaggedFunction::kRecordFieldGetter);
4142 return result.ptr();
4143}
4144
4145bool FindPragmaInMetadata(Thread* T,
4146 const Object& metadata_obj,
4147 const String& pragma_name,
4148 bool multiple,
4149 Object* options) {
4150 auto IG = T->isolate_group();
4151 auto Z = T->zone();
4152
4153 // If there is a compile-time error while evaluating the metadata, we will
4154 // simply claim there was no @pragma annotation.
4155 if (metadata_obj.IsNull() || metadata_obj.IsLanguageError()) {
4156 return false;
4157 }
4158 ASSERT(metadata_obj.IsArray());
4159
4160 auto& metadata = Array::Cast(obj: metadata_obj);
4161 auto& pragma_class = Class::Handle(zone: Z, ptr: IG->object_store()->pragma_class());
4162 if (pragma_class.IsNull()) {
4163 // Precompiler may drop pragma class.
4164 return false;
4165 }
4166 auto& pragma_name_field =
4167 Field::Handle(zone: Z, ptr: pragma_class.LookupField(name: Symbols::name()));
4168 auto& pragma_options_field =
4169 Field::Handle(zone: Z, ptr: pragma_class.LookupField(name: Symbols::options()));
4170
4171 auto& pragma = Object::Handle(zone: Z);
4172 bool found = false;
4173 auto& options_value = Object::Handle(zone: Z);
4174 auto& results = GrowableObjectArray::Handle(zone: Z);
4175 if (multiple) {
4176 ASSERT(options != nullptr);
4177 results ^= GrowableObjectArray::New(capacity: 1);
4178 }
4179 for (intptr_t i = 0; i < metadata.Length(); ++i) {
4180 pragma = metadata.At(index: i);
4181 if (pragma.clazz() != pragma_class.ptr() ||
4182 Instance::Cast(obj: pragma).GetField(field: pragma_name_field) !=
4183 pragma_name.ptr()) {
4184 continue;
4185 }
4186 options_value = Instance::Cast(obj: pragma).GetField(field: pragma_options_field);
4187 found = true;
4188 if (multiple) {
4189 results.Add(value: options_value);
4190 continue;
4191 }
4192 if (options != nullptr) {
4193 *options = options_value.ptr();
4194 }
4195 return true;
4196 }
4197
4198 if (found && options != nullptr) {
4199 *options = results.ptr();
4200 }
4201 return false;
4202}
4203
4204bool Library::FindPragma(Thread* T,
4205 bool only_core,
4206 const Object& obj,
4207 const String& pragma_name,
4208 bool multiple,
4209 Object* options) {
4210 auto Z = T->zone();
4211 auto& lib = Library::Handle(zone: Z);
4212
4213 if (obj.IsLibrary()) {
4214 lib = Library::Cast(obj).ptr();
4215 } else if (obj.IsClass()) {
4216 auto& klass = Class::Cast(obj);
4217 if (!klass.has_pragma()) return false;
4218 lib = klass.library();
4219 } else if (obj.IsFunction()) {
4220 auto& function = Function::Cast(obj);
4221 if (!function.has_pragma()) return false;
4222 lib = Class::Handle(zone: Z, ptr: function.Owner()).library();
4223 } else if (obj.IsField()) {
4224 auto& field = Field::Cast(obj);
4225 if (!field.has_pragma()) return false;
4226 lib = Class::Handle(zone: Z, ptr: field.Owner()).library();
4227 } else {
4228 UNREACHABLE();
4229 }
4230
4231 if (only_core && !lib.IsAnyCoreLibrary()) {
4232 return false;
4233 }
4234
4235 Object& metadata_obj = Object::Handle(zone: Z, ptr: lib.GetMetadata(declaration: obj));
4236 if (metadata_obj.IsUnwindError()) {
4237 Report::LongJump(error: UnwindError::Cast(obj: metadata_obj));
4238 }
4239
4240 return FindPragmaInMetadata(T, metadata_obj, pragma_name, multiple, options);
4241}
4242
4243bool Function::IsDynamicInvocationForwarderName(const String& name) {
4244 return IsDynamicInvocationForwarderName(name: name.ptr());
4245}
4246
4247bool Function::IsDynamicInvocationForwarderName(StringPtr name) {
4248 return String::StartsWith(str: name, prefix: Symbols::DynamicPrefix().ptr());
4249}
4250
4251StringPtr Function::DemangleDynamicInvocationForwarderName(const String& name) {
4252 const intptr_t kDynamicPrefixLength = 4; // "dyn:"
4253 ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength);
4254 return Symbols::New(thread: Thread::Current(), str: name, begin_index: kDynamicPrefixLength,
4255 length: name.Length() - kDynamicPrefixLength);
4256}
4257
4258StringPtr Function::CreateDynamicInvocationForwarderName(const String& name) {
4259 return Symbols::FromConcat(thread: Thread::Current(), str1: Symbols::DynamicPrefix(), str2: name);
4260}
4261
4262#if !defined(DART_PRECOMPILED_RUNTIME)
4263FunctionPtr Function::CreateDynamicInvocationForwarder(
4264 const String& mangled_name) const {
4265 Thread* thread = Thread::Current();
4266 Zone* zone = thread->zone();
4267
4268 Function& forwarder = Function::Handle(zone);
4269 forwarder ^= Object::Clone(orig: *this, space: Heap::kOld);
4270
4271 forwarder.reset_unboxed_parameters_and_return();
4272
4273 forwarder.set_name(mangled_name);
4274 forwarder.set_is_native(false);
4275 // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized
4276 // kind when creating the dynamic invocation forwarder.
4277 forwarder.set_kind(UntaggedFunction::kDynamicInvocationForwarder);
4278 forwarder.set_modifier(UntaggedFunction::kNoModifier);
4279 forwarder.set_is_debuggable(false);
4280
4281 // TODO(vegorov) for error reporting reasons it is better to make this
4282 // function visible and instead use a TailCall to invoke the target.
4283 // Our TailCall instruction is not ready for such usage though it
4284 // blocks inlining and can't take Function-s only Code objects.
4285 forwarder.set_is_visible(false);
4286
4287 forwarder.ClearICDataArray();
4288 forwarder.ClearCode();
4289 forwarder.set_usage_counter(0);
4290 forwarder.set_deoptimization_counter(0);
4291 forwarder.set_optimized_instruction_count(0);
4292 forwarder.set_inlining_depth(0);
4293 forwarder.set_optimized_call_site_count(0);
4294
4295 forwarder.InheritKernelOffsetFrom(src: *this);
4296 forwarder.SetForwardingTarget(*this);
4297
4298 return forwarder.ptr();
4299}
4300
4301FunctionPtr Function::GetDynamicInvocationForwarder(
4302 const String& mangled_name,
4303 bool allow_add /*=true*/) const {
4304 ASSERT(IsDynamicInvocationForwarderName(mangled_name));
4305 auto thread = Thread::Current();
4306 auto zone = thread->zone();
4307 const Class& owner = Class::Handle(zone, ptr: Owner());
4308 Function& result = Function::Handle(zone);
4309
4310 // First we'll try to find it without using locks.
4311 result = owner.GetInvocationDispatcher(
4312 target_name: mangled_name, args_desc: Array::null_array(),
4313 kind: UntaggedFunction::kDynamicInvocationForwarder,
4314 /*create_if_absent=*/false);
4315 if (!result.IsNull()) return result.ptr();
4316
4317 const bool needs_dyn_forwarder =
4318 kernel::NeedsDynamicInvocationForwarder(function: *this);
4319 if (!needs_dyn_forwarder) {
4320 return ptr();
4321 }
4322
4323 if (!allow_add) {
4324 return Function::null();
4325 }
4326
4327 // If we failed to find it and possibly need to create it, use a write lock.
4328 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4329
4330 // Try to find it again & return if it was added in the mean time.
4331 result = owner.GetInvocationDispatcher(
4332 target_name: mangled_name, args_desc: Array::null_array(),
4333 kind: UntaggedFunction::kDynamicInvocationForwarder,
4334 /*create_if_absent=*/false);
4335 if (!result.IsNull()) return result.ptr();
4336
4337 // Otherwise create it & add it.
4338 result = CreateDynamicInvocationForwarder(mangled_name);
4339 owner.AddInvocationDispatcher(target_name: mangled_name, args_desc: Array::null_array(), dispatcher: result);
4340 return result.ptr();
4341}
4342
4343#endif
4344
4345bool AbstractType::InstantiateAndTestSubtype(
4346 AbstractType* subtype,
4347 AbstractType* supertype,
4348 const TypeArguments& instantiator_type_args,
4349 const TypeArguments& function_type_args) {
4350 if (!subtype->IsInstantiated()) {
4351 *subtype = subtype->InstantiateFrom(
4352 instantiator_type_arguments: instantiator_type_args, function_type_arguments: function_type_args, num_free_fun_type_params: kAllFree, space: Heap::kOld);
4353 }
4354 if (!supertype->IsInstantiated()) {
4355 *supertype = supertype->InstantiateFrom(
4356 instantiator_type_arguments: instantiator_type_args, function_type_arguments: function_type_args, num_free_fun_type_params: kAllFree, space: Heap::kOld);
4357 }
4358 return subtype->IsSubtypeOf(other: *supertype, space: Heap::kOld);
4359}
4360
4361ArrayPtr Class::invocation_dispatcher_cache() const {
4362 return untag()->invocation_dispatcher_cache<std::memory_order_acquire>();
4363}
4364
4365void Class::Finalize() const {
4366 auto thread = Thread::Current();
4367 auto isolate_group = thread->isolate_group();
4368 ASSERT(!thread->isolate_group()->all_classes_finalized());
4369 ASSERT(!is_finalized());
4370 // Prefinalized classes have a VM internal representation and no Dart fields.
4371 // Their instance size is precomputed and field offsets are known.
4372 if (!is_prefinalized()) {
4373 // Compute offsets of instance fields, instance size and bitmap for unboxed
4374 // fields.
4375 const auto host_bitmap = CalculateFieldOffsets();
4376 if (ptr() == isolate_group->class_table()->At(cid: id())) {
4377 if (!ClassTable::IsTopLevelCid(cid: id())) {
4378 // Unless class is top-level, which don't get instantiated,
4379 // sets the new size in the class table.
4380 isolate_group->class_table()->UpdateClassSize(cid: id(), raw_cls: ptr());
4381 isolate_group->class_table()->SetUnboxedFieldsMapAt(cid: id(), map: host_bitmap);
4382 }
4383 }
4384 }
4385
4386#if defined(DEBUG)
4387 if (is_const()) {
4388 // Double-check that all fields are final (CFE should guarantee that if it
4389 // marks the class as having a constant constructor).
4390 auto Z = thread->zone();
4391 const auto& super_class = Class::Handle(Z, SuperClass());
4392 ASSERT(super_class.IsNull() || super_class.is_const());
4393 const auto& fields = Array::Handle(Z, this->fields());
4394 auto& field = Field::Handle(Z);
4395 for (intptr_t i = 0; i < fields.Length(); ++i) {
4396 field ^= fields.At(i);
4397 ASSERT(field.is_static() || field.is_final());
4398 }
4399 }
4400#endif
4401
4402 set_is_finalized();
4403}
4404
4405#if defined(DEBUG)
4406static bool IsMutatorOrAtDeoptSafepoint() {
4407 Thread* thread = Thread::Current();
4408 return thread->IsDartMutatorThread() || thread->OwnsDeoptSafepoint();
4409}
4410#endif
4411
4412#if !defined(DART_PRECOMPILED_RUNTIME)
4413
4414class CHACodeArray : public WeakCodeReferences {
4415 public:
4416 explicit CHACodeArray(const Class& cls)
4417 : WeakCodeReferences(WeakArray::Handle(ptr: cls.dependent_code())),
4418 cls_(cls) {}
4419
4420 virtual void UpdateArrayTo(const WeakArray& value) {
4421 // TODO(fschneider): Fails for classes in the VM isolate.
4422 cls_.set_dependent_code(value);
4423 }
4424
4425 virtual void ReportDeoptimization(const Code& code) {
4426 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4427 Function& function = Function::Handle(ptr: code.function());
4428 THR_Print("Deoptimizing %s because CHA optimized (%s).\n",
4429 function.ToFullyQualifiedCString(), cls_.ToCString());
4430 }
4431 }
4432
4433 virtual void ReportSwitchingCode(const Code& code) {
4434 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4435 Function& function = Function::Handle(ptr: code.function());
4436 THR_Print(
4437 "Switching %s to unoptimized code because CHA invalid"
4438 " (%s)\n",
4439 function.ToFullyQualifiedCString(), cls_.ToCString());
4440 }
4441 }
4442
4443 private:
4444 const Class& cls_;
4445 DISALLOW_COPY_AND_ASSIGN(CHACodeArray);
4446};
4447
4448void Class::RegisterCHACode(const Code& code) {
4449 if (FLAG_trace_cha) {
4450 THR_Print("RegisterCHACode '%s' depends on class '%s'\n",
4451 Function::Handle(code.function()).ToQualifiedCString(),
4452 ToCString());
4453 }
4454 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
4455 ASSERT(code.is_optimized());
4456 CHACodeArray a(*this);
4457 a.Register(value: code);
4458}
4459
4460void Class::DisableCHAOptimizedCode(const Class& subclass) {
4461 DEBUG_ASSERT(
4462 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4463 CHACodeArray a(*this);
4464 if (FLAG_trace_deoptimization && a.HasCodes()) {
4465 if (subclass.IsNull()) {
4466 THR_Print("Deopt for CHA (all)\n");
4467 } else {
4468 THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString());
4469 }
4470 }
4471 a.DisableCode(/*are_mutators_stopped=*/false);
4472}
4473
4474void Class::DisableAllCHAOptimizedCode() {
4475 DisableCHAOptimizedCode(subclass: Class::Handle());
4476}
4477
4478WeakArrayPtr Class::dependent_code() const {
4479 DEBUG_ASSERT(
4480 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
4481 return untag()->dependent_code();
4482}
4483
4484void Class::set_dependent_code(const WeakArray& array) const {
4485 DEBUG_ASSERT(
4486 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4487 untag()->set_dependent_code(array.ptr());
4488}
4489
4490#endif // !defined(DART_PRECOMPILED_RUNTIME)
4491
4492bool Class::TraceAllocation(IsolateGroup* isolate_group) const {
4493#ifndef PRODUCT
4494 auto class_table = isolate_group->class_table();
4495 return class_table->ShouldTraceAllocationFor(cid: id());
4496#else
4497 return false;
4498#endif
4499}
4500
4501void Class::SetTraceAllocation(bool trace_allocation) const {
4502#ifndef PRODUCT
4503 auto isolate_group = IsolateGroup::Current();
4504 const bool changed = trace_allocation != this->TraceAllocation(isolate_group);
4505 if (changed) {
4506 auto class_table = isolate_group->class_table();
4507 class_table->SetTraceAllocationFor(cid: id(), trace: trace_allocation);
4508 DisableAllocationStub();
4509 }
4510#else
4511 UNREACHABLE();
4512#endif
4513}
4514
4515// Conventions:
4516// * For throwing a NSM in a library or top-level class (i.e., level is
4517// kTopLevel), if a method was found but was incompatible, we pass the
4518// signature of the found method as a string, otherwise the null instance.
4519// * Otherwise, for throwing a NSM in a class klass we use its runtime type as
4520// receiver, i.e., klass.RareType().
4521static ObjectPtr ThrowNoSuchMethod(const Instance& receiver,
4522 const String& function_name,
4523 const Array& arguments,
4524 const Array& argument_names,
4525 const InvocationMirror::Level level,
4526 const InvocationMirror::Kind kind) {
4527 const Smi& invocation_type =
4528 Smi::Handle(ptr: Smi::New(value: InvocationMirror::EncodeType(level, kind)));
4529
4530 ASSERT(!receiver.IsNull() || level == InvocationMirror::Level::kTopLevel);
4531 ASSERT(level != InvocationMirror::Level::kTopLevel || receiver.IsString());
4532 const Array& args = Array::Handle(ptr: Array::New(len: 7));
4533 args.SetAt(index: 0, value: receiver);
4534 args.SetAt(index: 1, value: function_name);
4535 args.SetAt(index: 2, value: invocation_type);
4536 args.SetAt(index: 3, value: Object::smi_zero()); // Type arguments length.
4537 args.SetAt(index: 4, value: Object::null_type_arguments());
4538 args.SetAt(index: 5, value: arguments);
4539 args.SetAt(index: 6, value: argument_names);
4540
4541 const Library& libcore = Library::Handle(ptr: Library::CoreLibrary());
4542 const Class& cls =
4543 Class::Handle(ptr: libcore.LookupClass(name: Symbols::NoSuchMethodError()));
4544 ASSERT(!cls.IsNull());
4545 const auto& error = cls.EnsureIsFinalized(thread: Thread::Current());
4546 ASSERT(error == Error::null());
4547 const Function& throwNew =
4548 Function::Handle(ptr: cls.LookupFunctionAllowPrivate(name: Symbols::ThrowNew()));
4549 return DartEntry::InvokeFunction(function: throwNew, arguments: args);
4550}
4551
4552static ObjectPtr ThrowTypeError(const TokenPosition token_pos,
4553 const Instance& src_value,
4554 const AbstractType& dst_type,
4555 const String& dst_name) {
4556 const Array& args = Array::Handle(ptr: Array::New(len: 4));
4557 const Smi& pos = Smi::Handle(ptr: Smi::New(value: token_pos.Serialize()));
4558 args.SetAt(index: 0, value: pos);
4559 args.SetAt(index: 1, value: src_value);
4560 args.SetAt(index: 2, value: dst_type);
4561 args.SetAt(index: 3, value: dst_name);
4562
4563 const Library& libcore = Library::Handle(ptr: Library::CoreLibrary());
4564 const Class& cls =
4565 Class::Handle(ptr: libcore.LookupClassAllowPrivate(name: Symbols::TypeError()));
4566 const auto& error = cls.EnsureIsFinalized(thread: Thread::Current());
4567 ASSERT(error == Error::null());
4568 const Function& throwNew =
4569 Function::Handle(ptr: cls.LookupFunctionAllowPrivate(name: Symbols::ThrowNew()));
4570 return DartEntry::InvokeFunction(function: throwNew, arguments: args);
4571}
4572
4573ObjectPtr Class::InvokeGetter(const String& getter_name,
4574 bool throw_nsm_if_absent,
4575 bool respect_reflectable,
4576 bool check_is_entrypoint) const {
4577 Thread* thread = Thread::Current();
4578 Zone* zone = thread->zone();
4579
4580 CHECK_ERROR(EnsureIsFinalized(thread));
4581
4582 // Note static fields do not have implicit getters.
4583 const Field& field = Field::Handle(zone, ptr: LookupStaticField(name: getter_name));
4584
4585 if (!field.IsNull() && check_is_entrypoint) {
4586 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
4587 }
4588
4589 if (field.IsNull() || field.IsUninitialized()) {
4590 const String& internal_getter_name =
4591 String::Handle(zone, ptr: Field::GetterName(field_name: getter_name));
4592 Function& getter =
4593 Function::Handle(zone, ptr: LookupStaticFunction(name: internal_getter_name));
4594
4595 if (field.IsNull() && !getter.IsNull() && check_is_entrypoint) {
4596 CHECK_ERROR(getter.VerifyCallEntryPoint());
4597 }
4598
4599 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
4600 if (getter.IsNull()) {
4601 getter = LookupStaticFunction(name: getter_name);
4602 if (!getter.IsNull()) {
4603 if (check_is_entrypoint) {
4604 CHECK_ERROR(getter.VerifyClosurizedEntryPoint());
4605 }
4606 if (getter.SafeToClosurize()) {
4607 // Looking for a getter but found a regular method: closurize it.
4608 const Function& closure_function =
4609 Function::Handle(zone, ptr: getter.ImplicitClosureFunction());
4610 return closure_function.ImplicitStaticClosure();
4611 }
4612 }
4613 }
4614 if (throw_nsm_if_absent) {
4615 return ThrowNoSuchMethod(
4616 receiver: AbstractType::Handle(zone, ptr: RareType()), function_name: getter_name,
4617 arguments: Object::null_array(), argument_names: Object::null_array(),
4618 level: InvocationMirror::kStatic, kind: InvocationMirror::kGetter);
4619 }
4620 // Fall through case: Indicate that we didn't find any function or field
4621 // using a special null instance. This is different from a field being
4622 // null. Callers make sure that this null does not leak into Dartland.
4623 return Object::sentinel().ptr();
4624 }
4625
4626 // Invoke the getter and return the result.
4627 return DartEntry::InvokeFunction(function: getter, arguments: Object::empty_array());
4628 }
4629
4630 return field.StaticValue();
4631}
4632
4633ObjectPtr Class::InvokeSetter(const String& setter_name,
4634 const Instance& value,
4635 bool respect_reflectable,
4636 bool check_is_entrypoint) const {
4637 Thread* thread = Thread::Current();
4638 Zone* zone = thread->zone();
4639
4640 CHECK_ERROR(EnsureIsFinalized(thread));
4641
4642 // Check for real fields and user-defined setters.
4643 const Field& field = Field::Handle(zone, ptr: LookupStaticField(name: setter_name));
4644 const String& internal_setter_name =
4645 String::Handle(zone, ptr: Field::SetterName(setter_name));
4646
4647 if (!field.IsNull() && check_is_entrypoint) {
4648 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
4649 }
4650
4651 AbstractType& parameter_type = AbstractType::Handle(zone);
4652 if (field.IsNull()) {
4653 const Function& setter =
4654 Function::Handle(zone, ptr: LookupStaticFunction(name: internal_setter_name));
4655 if (!setter.IsNull() && check_is_entrypoint) {
4656 CHECK_ERROR(setter.VerifyCallEntryPoint());
4657 }
4658 const int kNumArgs = 1;
4659 const Array& args = Array::Handle(zone, ptr: Array::New(len: kNumArgs));
4660 args.SetAt(index: 0, value);
4661 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
4662 return ThrowNoSuchMethod(receiver: AbstractType::Handle(zone, ptr: RareType()),
4663 function_name: internal_setter_name, arguments: args, argument_names: Object::null_array(),
4664 level: InvocationMirror::kStatic,
4665 kind: InvocationMirror::kSetter);
4666 }
4667 parameter_type = setter.ParameterTypeAt(index: 0);
4668 if (!value.RuntimeTypeIsSubtypeOf(other: parameter_type,
4669 other_instantiator_type_arguments: Object::null_type_arguments(),
4670 other_function_type_arguments: Object::null_type_arguments())) {
4671 const String& argument_name =
4672 String::Handle(zone, ptr: setter.ParameterNameAt(index: 0));
4673 return ThrowTypeError(token_pos: setter.token_pos(), src_value: value, dst_type: parameter_type,
4674 dst_name: argument_name);
4675 }
4676 // Invoke the setter and return the result.
4677 return DartEntry::InvokeFunction(function: setter, arguments: args);
4678 }
4679
4680 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
4681 const int kNumArgs = 1;
4682 const Array& args = Array::Handle(zone, ptr: Array::New(len: kNumArgs));
4683 args.SetAt(index: 0, value);
4684 return ThrowNoSuchMethod(receiver: AbstractType::Handle(zone, ptr: RareType()),
4685 function_name: internal_setter_name, arguments: args, argument_names: Object::null_array(),
4686 level: InvocationMirror::kStatic,
4687 kind: InvocationMirror::kSetter);
4688 }
4689
4690 parameter_type = field.type();
4691 if (!value.RuntimeTypeIsSubtypeOf(other: parameter_type,
4692 other_instantiator_type_arguments: Object::null_type_arguments(),
4693 other_function_type_arguments: Object::null_type_arguments())) {
4694 const String& argument_name = String::Handle(zone, ptr: field.name());
4695 return ThrowTypeError(token_pos: field.token_pos(), src_value: value, dst_type: parameter_type,
4696 dst_name: argument_name);
4697 }
4698 field.SetStaticValue(value);
4699 return value.ptr();
4700}
4701
4702// Creates a new array of boxed arguments suitable for invoking the callable
4703// from the original boxed arguments for a static call. Also sets the contents
4704// of the handle pointed to by [callable_args_desc_array_out] to an appropriate
4705// arguments descriptor array for the new arguments.
4706//
4707// Assumes [arg_names] are consistent with [static_args_descriptor].
4708static ArrayPtr CreateCallableArgumentsFromStatic(
4709 Zone* zone,
4710 const Instance& receiver,
4711 const Array& static_args,
4712 const Array& arg_names,
4713 const ArgumentsDescriptor& static_args_descriptor) {
4714 const intptr_t num_static_type_args = static_args_descriptor.TypeArgsLen();
4715 const intptr_t num_static_args = static_args_descriptor.Count();
4716 // Double check that the static args descriptor expects boxed arguments
4717 // and the static args descriptor is consistent with the static arguments.
4718 ASSERT_EQUAL(static_args_descriptor.Size(), num_static_args);
4719 ASSERT_EQUAL(static_args.Length(),
4720 num_static_args + (num_static_type_args > 0 ? 1 : 0));
4721 // Add an additional slot to store the callable as the receiver.
4722 const auto& callable_args =
4723 Array::Handle(zone, ptr: Array::New(len: static_args.Length() + 1));
4724 const intptr_t first_arg_index = static_args_descriptor.FirstArgIndex();
4725 auto& temp = Object::Handle(zone);
4726 // Copy the static args into the corresponding slots of the callable args.
4727 if (num_static_type_args > 0) {
4728 temp = static_args.At(index: 0);
4729 callable_args.SetAt(index: 0, value: temp);
4730 }
4731 for (intptr_t i = first_arg_index; i < static_args.Length(); i++) {
4732 temp = static_args.At(index: i);
4733 callable_args.SetAt(index: i + 1, value: temp);
4734 }
4735 // Set the receiver slot in the callable args.
4736 callable_args.SetAt(index: first_arg_index, value: receiver);
4737 return callable_args.ptr();
4738}
4739
4740ObjectPtr Class::Invoke(const String& function_name,
4741 const Array& args,
4742 const Array& arg_names,
4743 bool respect_reflectable,
4744 bool check_is_entrypoint) const {
4745 Thread* thread = Thread::Current();
4746 Zone* zone = thread->zone();
4747 CHECK_ERROR(EnsureIsFinalized(thread));
4748
4749 // We don't pass any explicit type arguments, which will be understood as
4750 // using dynamic for any function type arguments by lower layers.
4751 const int kTypeArgsLen = 0;
4752 const Array& args_descriptor_array = Array::Handle(
4753 zone, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: args.Length(),
4754 optional_arguments_names: arg_names, space: Heap::kNew));
4755 ArgumentsDescriptor args_descriptor(args_descriptor_array);
4756
4757 Function& function =
4758 Function::Handle(zone, ptr: LookupStaticFunction(name: function_name));
4759
4760 if (!function.IsNull() && check_is_entrypoint) {
4761 CHECK_ERROR(function.VerifyCallEntryPoint());
4762 }
4763
4764 if (function.IsNull()) {
4765 // Didn't find a method: try to find a getter and invoke call on its result.
4766 const Object& getter_result = Object::Handle(
4767 zone, ptr: InvokeGetter(getter_name: function_name, throw_nsm_if_absent: false, respect_reflectable,
4768 check_is_entrypoint));
4769 if (getter_result.ptr() != Object::sentinel().ptr()) {
4770 if (check_is_entrypoint) {
4771 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
4772 }
4773 const auto& call_args_descriptor_array = Array::Handle(
4774 zone, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: args_descriptor.TypeArgsLen(),
4775 num_arguments: args_descriptor.Count() + 1,
4776 optional_arguments_names: arg_names, space: Heap::kNew));
4777 const auto& call_args = Array::Handle(
4778 zone,
4779 ptr: CreateCallableArgumentsFromStatic(zone, receiver: Instance::Cast(obj: getter_result),
4780 static_args: args, arg_names, static_args_descriptor: args_descriptor));
4781 return DartEntry::InvokeClosure(thread, arguments: call_args,
4782 arguments_descriptor: call_args_descriptor_array);
4783 }
4784 }
4785
4786 if (function.IsNull() ||
4787 !function.AreValidArguments(args_desc: args_descriptor, error_message: nullptr) ||
4788 (respect_reflectable && !function.is_reflectable())) {
4789 return ThrowNoSuchMethod(
4790 receiver: AbstractType::Handle(zone, ptr: RareType()), function_name, arguments: args, argument_names: arg_names,
4791 level: InvocationMirror::kStatic, kind: InvocationMirror::kMethod);
4792 }
4793 // This is a static function, so we pass an empty instantiator tav.
4794 ASSERT(function.is_static());
4795 ObjectPtr type_error = function.DoArgumentTypesMatch(
4796 args, arg_names: args_descriptor, instantiator_type_args: Object::empty_type_arguments());
4797 if (type_error != Error::null()) {
4798 return type_error;
4799 }
4800 return DartEntry::InvokeFunction(function, arguments: args, arguments_descriptor: args_descriptor_array);
4801}
4802
4803#if !defined(DART_PRECOMPILED_RUNTIME)
4804
4805static ObjectPtr LoadExpressionEvaluationFunction(
4806 Zone* zone,
4807 const ExternalTypedData& kernel_buffer,
4808 const String& library_url,
4809 const String& klass) {
4810 std::unique_ptr<kernel::Program> kernel_pgm =
4811 kernel::Program::ReadFromTypedData(typed_data: kernel_buffer);
4812
4813 if (kernel_pgm == nullptr) {
4814 return ApiError::New(message: String::Handle(
4815 zone, ptr: String::New(cstr: "Kernel isolate returned ill-formed kernel.")));
4816 }
4817
4818 auto& result = Object::Handle(zone);
4819 {
4820 kernel::KernelLoader loader(kernel_pgm.get(),
4821 /*uri_to_source_table=*/nullptr);
4822 result = loader.LoadExpressionEvaluationFunction(library_url, klass);
4823 kernel_pgm.reset();
4824 }
4825 if (result.IsError()) return result.ptr();
4826 return Function::Cast(obj: result).ptr();
4827}
4828
4829static bool EvaluationFunctionNeedsReceiver(Thread* thread,
4830 Zone* zone,
4831 const Function& eval_function) {
4832 auto parsed_function = new ParsedFunction(
4833 thread, Function::ZoneHandle(zone, ptr: eval_function.ptr()));
4834 parsed_function->EnsureKernelScopes();
4835 return parsed_function->is_receiver_used();
4836}
4837
4838static ObjectPtr EvaluateCompiledExpressionHelper(
4839 Zone* zone,
4840 const Function& eval_function,
4841 const Array& type_definitions,
4842 const Array& arguments,
4843 const TypeArguments& type_arguments) {
4844 // type_arguments is null if all type arguments are dynamic.
4845 if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
4846 return DartEntry::InvokeFunction(function: eval_function, arguments);
4847 }
4848
4849 intptr_t num_type_args = type_arguments.Length();
4850 const auto& real_arguments =
4851 Array::Handle(zone, ptr: Array::New(len: arguments.Length() + 1));
4852 real_arguments.SetAt(index: 0, value: type_arguments);
4853 Object& arg = Object::Handle(zone);
4854 for (intptr_t i = 0; i < arguments.Length(); ++i) {
4855 arg = arguments.At(index: i);
4856 real_arguments.SetAt(index: i + 1, value: arg);
4857 }
4858
4859 const Array& args_desc =
4860 Array::Handle(zone, ptr: ArgumentsDescriptor::NewBoxed(
4861 type_args_len: num_type_args, num_arguments: arguments.Length(), space: Heap::kNew));
4862 return DartEntry::InvokeFunction(function: eval_function, arguments: real_arguments, arguments_descriptor: args_desc);
4863}
4864
4865#endif // !defined(DART_PRECOMPILED_RUNTIME)
4866
4867ObjectPtr Library::EvaluateCompiledExpression(
4868 const ExternalTypedData& kernel_buffer,
4869 const Array& type_definitions,
4870 const Array& arguments,
4871 const TypeArguments& type_arguments) const {
4872 const auto& klass = Class::Handle(ptr: toplevel_class());
4873 return klass.EvaluateCompiledExpression(kernel_buffer, type_definitions,
4874 param_values: arguments, type_param_values: type_arguments);
4875}
4876
4877ObjectPtr Class::EvaluateCompiledExpression(
4878 const ExternalTypedData& kernel_buffer,
4879 const Array& type_definitions,
4880 const Array& arguments,
4881 const TypeArguments& type_arguments) const {
4882 auto thread = Thread::Current();
4883 const auto& library = Library::Handle(zone: thread->zone(), ptr: this->library());
4884 return Instance::EvaluateCompiledExpression(
4885 thread, receiver: Instance::null_object(), library, klass: *this, kernel_buffer,
4886 type_definitions, param_values: arguments, type_param_values: type_arguments);
4887}
4888
4889ObjectPtr Instance::EvaluateCompiledExpression(
4890 const Class& klass,
4891 const ExternalTypedData& kernel_buffer,
4892 const Array& type_definitions,
4893 const Array& arguments,
4894 const TypeArguments& type_arguments) const {
4895 auto thread = Thread::Current();
4896 auto zone = thread->zone();
4897 const auto& library = Library::Handle(zone, ptr: klass.library());
4898 return Instance::EvaluateCompiledExpression(thread, receiver: *this, library, klass,
4899 kernel_buffer, type_definitions,
4900 param_values: arguments, type_param_values: type_arguments);
4901}
4902
4903ObjectPtr Instance::EvaluateCompiledExpression(
4904 Thread* thread,
4905 const Object& receiver,
4906 const Library& library,
4907 const Class& klass,
4908 const ExternalTypedData& kernel_buffer,
4909 const Array& type_definitions,
4910 const Array& arguments,
4911 const TypeArguments& type_arguments) {
4912 auto zone = Thread::Current()->zone();
4913#if defined(DART_PRECOMPILED_RUNTIME)
4914 const auto& error_str = String::Handle(
4915 zone,
4916 String::New("Expression evaluation not available in precompiled mode."));
4917 return ApiError::New(error_str);
4918#else
4919 if (IsInternalOnlyClassId(index: klass.id()) || (klass.id() == kTypeArgumentsCid)) {
4920 const auto& exception = Instance::Handle(
4921 zone, ptr: String::New(cstr: "Expressions can be evaluated only with regular Dart "
4922 "instances/classes."));
4923 return UnhandledException::New(exception, stacktrace: StackTrace::null_instance());
4924 }
4925
4926 const auto& url = String::Handle(zone, ptr: library.url());
4927 const auto& klass_name = klass.IsTopLevel()
4928 ? String::null_string()
4929 : String::Handle(zone, ptr: klass.UserVisibleName());
4930
4931 const auto& result = Object::Handle(
4932 zone,
4933 ptr: LoadExpressionEvaluationFunction(zone, kernel_buffer, library_url: url, klass: klass_name));
4934 if (result.IsError()) return result.ptr();
4935
4936 const auto& eval_function = Function::Cast(obj: result);
4937
4938 auto& all_arguments = Array::Handle(zone, ptr: arguments.ptr());
4939 if (!eval_function.is_static()) {
4940 // `this` may be optimized out (e.g. not accessible from breakpoint due to
4941 // not being captured by closure). We allow this as long as the evaluation
4942 // function doesn't actually need `this`.
4943 if (receiver.IsNull() || receiver.ptr() == Object::optimized_out().ptr()) {
4944 if (EvaluationFunctionNeedsReceiver(thread, zone, eval_function)) {
4945 return Object::optimized_out().ptr();
4946 }
4947 }
4948
4949 all_arguments = Array::New(len: 1 + arguments.Length());
4950 auto& param = PassiveObject::Handle();
4951 all_arguments.SetAt(index: 0, value: receiver);
4952 for (intptr_t i = 0; i < arguments.Length(); i++) {
4953 param = arguments.At(index: i);
4954 all_arguments.SetAt(index: i + 1, value: param);
4955 }
4956 }
4957
4958 return EvaluateCompiledExpressionHelper(zone, eval_function, type_definitions,
4959 arguments: all_arguments, type_arguments);
4960#endif // !defined(DART_PRECOMPILED_RUNTIME)
4961}
4962
4963void Class::EnsureDeclarationLoaded() const {
4964 if (!is_declaration_loaded()) {
4965#if defined(DART_PRECOMPILED_RUNTIME)
4966 UNREACHABLE();
4967#else
4968 FATAL("Unable to use class %s which is not loaded yet.", ToCString());
4969#endif
4970 }
4971}
4972
4973// Ensure that top level parsing of the class has been done.
4974ErrorPtr Class::EnsureIsFinalized(Thread* thread) const {
4975 ASSERT(!IsNull());
4976 if (is_finalized()) {
4977 return Error::null();
4978 }
4979#if defined(DART_PRECOMPILED_RUNTIME)
4980 UNREACHABLE();
4981 return Error::null();
4982#else
4983 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4984 if (is_finalized()) {
4985 return Error::null();
4986 }
4987 LeaveCompilerScope ncs(thread);
4988 ASSERT(thread != nullptr);
4989 const Error& error =
4990 Error::Handle(zone: thread->zone(), ptr: ClassFinalizer::LoadClassMembers(cls: *this));
4991 if (!error.IsNull()) {
4992 ASSERT(thread == Thread::Current());
4993 if (thread->long_jump_base() != nullptr) {
4994 Report::LongJump(error);
4995 UNREACHABLE();
4996 }
4997 }
4998 return error.ptr();
4999#endif // defined(DART_PRECOMPILED_RUNTIME)
5000}
5001
5002// Ensure that code outdated by finalized class is cleaned up, new instance of
5003// this class is ready to be allocated.
5004ErrorPtr Class::EnsureIsAllocateFinalized(Thread* thread) const {
5005 ASSERT(!IsNull());
5006 if (is_allocate_finalized()) {
5007 return Error::null();
5008 }
5009 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5010 if (is_allocate_finalized()) {
5011 return Error::null();
5012 }
5013 ASSERT(thread != nullptr);
5014 Error& error = Error::Handle(zone: thread->zone(), ptr: EnsureIsFinalized(thread));
5015 if (!error.IsNull()) {
5016 ASSERT(thread == Thread::Current());
5017 if (thread->long_jump_base() != nullptr) {
5018 Report::LongJump(error);
5019 UNREACHABLE();
5020 }
5021 }
5022 // May be allocate-finalized recursively during EnsureIsFinalized.
5023 if (is_allocate_finalized()) {
5024 return Error::null();
5025 }
5026#if defined(DART_PRECOMPILED_RUNTIME)
5027 UNREACHABLE();
5028#else
5029 error ^= ClassFinalizer::AllocateFinalizeClass(cls: *this);
5030#endif // defined(DART_PRECOMPILED_RUNTIME)
5031 return error.ptr();
5032}
5033
5034void Class::SetFields(const Array& value) const {
5035 ASSERT(!value.IsNull());
5036#if defined(DEBUG)
5037 Thread* thread = Thread::Current();
5038 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
5039 // Verify that all the fields in the array have this class as owner.
5040 Field& field = Field::Handle();
5041 intptr_t len = value.Length();
5042 for (intptr_t i = 0; i < len; i++) {
5043 field ^= value.At(i);
5044 ASSERT(field.IsOriginal());
5045 ASSERT(field.Owner() == ptr());
5046 }
5047#endif
5048 // The value of static fields is already initialized to null.
5049 set_fields(value);
5050}
5051
5052void Class::AddField(const Field& field) const {
5053#if defined(DEBUG)
5054 Thread* thread = Thread::Current();
5055 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
5056#endif
5057 const Array& arr = Array::Handle(ptr: fields());
5058 const Array& new_arr = Array::Handle(ptr: Array::Grow(source: arr, new_length: arr.Length() + 1));
5059 new_arr.SetAt(index: arr.Length(), value: field);
5060 SetFields(new_arr);
5061}
5062
5063void Class::AddFields(const GrowableArray<const Field*>& new_fields) const {
5064#if defined(DEBUG)
5065 Thread* thread = Thread::Current();
5066 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
5067#endif
5068 const intptr_t num_new_fields = new_fields.length();
5069 if (num_new_fields == 0) return;
5070 const Array& arr = Array::Handle(ptr: fields());
5071 const intptr_t num_old_fields = arr.Length();
5072 const Array& new_arr = Array::Handle(
5073 ptr: Array::Grow(source: arr, new_length: num_old_fields + num_new_fields, space: Heap::kOld));
5074 for (intptr_t i = 0; i < num_new_fields; i++) {
5075 new_arr.SetAt(index: i + num_old_fields, value: *new_fields.At(index: i));
5076 }
5077 SetFields(new_arr);
5078}
5079
5080intptr_t Class::FindFieldIndex(const Field& needle) const {
5081 Thread* thread = Thread::Current();
5082 if (EnsureIsFinalized(thread) != Error::null()) {
5083 return -1;
5084 }
5085 REUSABLE_ARRAY_HANDLESCOPE(thread);
5086 REUSABLE_FIELD_HANDLESCOPE(thread);
5087 Array& fields = thread->ArrayHandle();
5088 Field& field = thread->FieldHandle();
5089 fields = this->fields();
5090 ASSERT(!fields.IsNull());
5091 for (intptr_t i = 0, n = fields.Length(); i < n; ++i) {
5092 field ^= fields.At(index: i);
5093 if (needle.ptr() == field.ptr()) {
5094 return i;
5095 }
5096 }
5097 // Not found.
5098 return -1;
5099}
5100
5101FieldPtr Class::FieldFromIndex(intptr_t idx) const {
5102 Array& fields = Array::Handle(ptr: this->fields());
5103 if ((idx < 0) || (idx >= fields.Length())) {
5104 return Field::null();
5105 }
5106 return Field::RawCast(raw: fields.At(index: idx));
5107}
5108
5109bool Class::InjectCIDFields() const {
5110 if (library() != Library::InternalLibrary() ||
5111 Name() != Symbols::ClassID().ptr()) {
5112 return false;
5113 }
5114
5115 auto thread = Thread::Current();
5116 auto isolate_group = thread->isolate_group();
5117 auto zone = thread->zone();
5118 Field& field = Field::Handle(zone);
5119 Smi& value = Smi::Handle(zone);
5120 String& field_name = String::Handle(zone);
5121
5122 static const struct {
5123 const char* const field_name;
5124 const intptr_t cid;
5125 } cid_fields[] = {
5126#define CLASS_LIST_WITH_NULL(V) \
5127 V(Null) \
5128 CLASS_LIST_NO_OBJECT(V)
5129#define ADD_SET_FIELD(clazz) {"cid" #clazz, k##clazz##Cid},
5130 CLASS_LIST_WITH_NULL(ADD_SET_FIELD)
5131#undef ADD_SET_FIELD
5132#undef CLASS_LIST_WITH_NULL
5133#define ADD_SET_FIELD(clazz) \
5134 {"cid" #clazz, kTypedData##clazz##Cid}, \
5135 {"cid" #clazz "View", kTypedData##clazz##ViewCid}, \
5136 {"cidExternal" #clazz, kExternalTypedData##clazz##Cid}, \
5137 {"cidUnmodifiable" #clazz "View", \
5138 kUnmodifiableTypedData##clazz##ViewCid},
5139 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
5140#undef ADD_SET_FIELD
5141 // Used in const hashing to determine whether we're dealing with a
5142 // user-defined const. See lib/_internal/vm/lib/compact_hash.dart.
5143 {.field_name: "numPredefinedCids", .cid: kNumPredefinedCids},
5144 };
5145
5146 const AbstractType& field_type = Type::Handle(zone, ptr: Type::IntType());
5147 for (size_t i = 0; i < ARRAY_SIZE(cid_fields); i++) {
5148 field_name = Symbols::New(thread, cstr: cid_fields[i].field_name);
5149 field = Field::New(name: field_name, /* is_static = */ true,
5150 /* is_final = */ false,
5151 /* is_const = */ true,
5152 /* is_reflectable = */ false,
5153 /* is_late = */ false, owner: *this, type: field_type,
5154 token_pos: TokenPosition::kMinSource, end_token_pos: TokenPosition::kMinSource);
5155 value = Smi::New(value: cid_fields[i].cid);
5156 isolate_group->RegisterStaticField(field, initial_value: value);
5157 AddField(field);
5158 }
5159
5160 return true;
5161}
5162
5163template <class FakeInstance, class TargetFakeInstance>
5164ClassPtr Class::NewCommon(intptr_t index) {
5165 ASSERT(Object::class_class() != Class::null());
5166 const auto& result = Class::Handle(ptr: Object::Allocate<Class>(space: Heap::kOld));
5167 // Here kIllegalCid means not-yet-assigned.
5168 Object::VerifyBuiltinVtable<FakeInstance>(index == kIllegalCid ? kInstanceCid
5169 : index);
5170 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
5171 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
5172 const intptr_t host_instance_size = FakeInstance::InstanceSize();
5173 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5174 size: TargetFakeInstance::InstanceSize());
5175 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5176 result.set_type_arguments_field_offset_in_words(host_value: kNoTypeArguments,
5177 target_value: RTN::Class::kNoTypeArguments);
5178 const intptr_t host_next_field_offset = FakeInstance::NextFieldOffset();
5179 const intptr_t target_next_field_offset =
5180 TargetFakeInstance::NextFieldOffset();
5181 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5182 target_value_in_bytes: target_next_field_offset);
5183 result.set_id(index);
5184 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
5185 result.set_num_type_arguments_unsafe(kUnknownNumTypeArguments);
5186 result.set_num_native_fields(0);
5187 result.set_state_bits(0);
5188 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
5189 result.InitEmptyFields();
5190 return result.ptr();
5191}
5192
5193template <class FakeInstance, class TargetFakeInstance>
5194ClassPtr Class::New(intptr_t index,
5195 IsolateGroup* isolate_group,
5196 bool register_class,
5197 bool is_abstract) {
5198 Class& result =
5199 Class::Handle(NewCommon<FakeInstance, TargetFakeInstance>(index));
5200 if (is_abstract) {
5201 result.set_is_abstract();
5202 }
5203 if (register_class) {
5204 isolate_group->class_table()->Register(cls: result);
5205 }
5206 return result.ptr();
5207}
5208
5209ClassPtr Class::New(const Library& lib,
5210 const String& name,
5211 const Script& script,
5212 TokenPosition token_pos,
5213 bool register_class) {
5214 Class& result =
5215 Class::Handle(ptr: NewCommon<Instance, RTN::Instance>(index: kIllegalCid));
5216 result.set_library(lib);
5217 result.set_name(name);
5218 result.set_script(script);
5219 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
5220
5221 // The size gets initialized to 0. Once the class gets finalized the class
5222 // finalizer will set the correct size.
5223 ASSERT(!result.is_finalized() && !result.is_prefinalized());
5224 result.set_instance_size_in_words(host_value: 0, target_value: 0);
5225
5226 if (register_class) {
5227 IsolateGroup::Current()->RegisterClass(cls: result);
5228 }
5229 return result.ptr();
5230}
5231
5232ClassPtr Class::NewInstanceClass() {
5233 return Class::New<Instance, RTN::Instance>(index: kIllegalCid,
5234 isolate_group: IsolateGroup::Current());
5235}
5236
5237ClassPtr Class::NewNativeWrapper(const Library& library,
5238 const String& name,
5239 int field_count) {
5240 Class& cls = Class::Handle(ptr: library.LookupClass(name));
5241 if (cls.IsNull()) {
5242 cls = New(lib: library, name, script: Script::Handle(), token_pos: TokenPosition::kNoSource);
5243 cls.SetFields(Object::empty_array());
5244 cls.SetFunctions(Object::empty_array());
5245 // Set super class to Object.
5246 cls.set_super_type(Type::Handle(ptr: Type::ObjectType()));
5247 // Compute instance size. First word contains a pointer to a properly
5248 // sized typed array once the first native field has been set.
5249 const intptr_t host_instance_size =
5250 sizeof(UntaggedInstance) + kCompressedWordSize;
5251#if defined(DART_PRECOMPILER)
5252 const intptr_t target_instance_size =
5253 compiler::target::Instance::InstanceSize() +
5254 compiler::target::kCompressedWordSize;
5255#else
5256 const intptr_t target_instance_size =
5257 sizeof(UntaggedInstance) + compiler::target::kCompressedWordSize;
5258#endif
5259 cls.set_instance_size(
5260 host_value_in_bytes: RoundedAllocationSize(size: host_instance_size),
5261 target_value_in_bytes: compiler::target::RoundedAllocationSize(size: target_instance_size));
5262 cls.set_next_field_offset(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5263 cls.set_num_native_fields(field_count);
5264 cls.set_is_allocate_finalized();
5265 // The signature of the constructor yet to be added to this class will have
5266 // to be finalized explicitly, since the class is prematurely marked as
5267 // 'is_allocate_finalized' and finalization of member types will not occur.
5268 cls.set_is_declaration_loaded();
5269 cls.set_is_type_finalized();
5270 cls.set_is_synthesized_class();
5271 cls.set_is_isolate_unsendable(true);
5272 NOT_IN_PRECOMPILED(cls.set_implementor_cid(kDynamicCid));
5273 library.AddClass(cls);
5274 return cls.ptr();
5275 } else {
5276 return Class::null();
5277 }
5278}
5279
5280ClassPtr Class::NewStringClass(intptr_t class_id, IsolateGroup* isolate_group) {
5281 intptr_t host_instance_size, target_instance_size;
5282 if (class_id == kOneByteStringCid) {
5283 host_instance_size = OneByteString::InstanceSize();
5284 target_instance_size = compiler::target::RoundedAllocationSize(
5285 size: RTN::OneByteString::InstanceSize());
5286 } else if (class_id == kTwoByteStringCid) {
5287 host_instance_size = TwoByteString::InstanceSize();
5288 target_instance_size = compiler::target::RoundedAllocationSize(
5289 size: RTN::TwoByteString::InstanceSize());
5290 } else if (class_id == kExternalOneByteStringCid) {
5291 host_instance_size = ExternalOneByteString::InstanceSize();
5292 target_instance_size = compiler::target::RoundedAllocationSize(
5293 size: RTN::ExternalOneByteString::InstanceSize());
5294 } else {
5295 ASSERT(class_id == kExternalTwoByteStringCid);
5296 host_instance_size = ExternalTwoByteString::InstanceSize();
5297 target_instance_size = compiler::target::RoundedAllocationSize(
5298 size: RTN::ExternalTwoByteString::InstanceSize());
5299 }
5300 Class& result = Class::Handle(ptr: New<String, RTN::String>(
5301 index: class_id, isolate_group, /*register_class=*/false));
5302 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5303
5304 const intptr_t host_next_field_offset = String::NextFieldOffset();
5305 const intptr_t target_next_field_offset = RTN::String::NextFieldOffset();
5306 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5307 target_value_in_bytes: target_next_field_offset);
5308 result.set_is_prefinalized();
5309 isolate_group->class_table()->Register(cls: result);
5310 return result.ptr();
5311}
5312
5313ClassPtr Class::NewTypedDataClass(intptr_t class_id,
5314 IsolateGroup* isolate_group) {
5315 ASSERT(IsTypedDataClassId(class_id));
5316 const intptr_t host_instance_size = TypedData::InstanceSize();
5317 const intptr_t target_instance_size =
5318 compiler::target::RoundedAllocationSize(size: RTN::TypedData::InstanceSize());
5319 Class& result = Class::Handle(ptr: New<TypedData, RTN::TypedData>(
5320 index: class_id, isolate_group, /*register_class=*/false));
5321 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5322
5323 const intptr_t host_next_field_offset = TypedData::NextFieldOffset();
5324 const intptr_t target_next_field_offset = RTN::TypedData::NextFieldOffset();
5325 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5326 target_value_in_bytes: target_next_field_offset);
5327 result.set_is_prefinalized();
5328 isolate_group->class_table()->Register(cls: result);
5329 return result.ptr();
5330}
5331
5332ClassPtr Class::NewTypedDataViewClass(intptr_t class_id,
5333 IsolateGroup* isolate_group) {
5334 ASSERT(IsTypedDataViewClassId(class_id));
5335 const intptr_t host_instance_size = TypedDataView::InstanceSize();
5336 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5337 size: RTN::TypedDataView::InstanceSize());
5338 Class& result = Class::Handle(ptr: New<TypedDataView, RTN::TypedDataView>(
5339 index: class_id, isolate_group, /*register_class=*/false));
5340 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5341
5342 const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset();
5343 const intptr_t target_next_field_offset =
5344 RTN::TypedDataView::NextFieldOffset();
5345 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5346 target_value_in_bytes: target_next_field_offset);
5347 result.set_is_prefinalized();
5348 isolate_group->class_table()->Register(cls: result);
5349 return result.ptr();
5350}
5351
5352ClassPtr Class::NewUnmodifiableTypedDataViewClass(intptr_t class_id,
5353 IsolateGroup* isolate_group) {
5354 ASSERT(IsUnmodifiableTypedDataViewClassId(class_id));
5355 const intptr_t host_instance_size = TypedDataView::InstanceSize();
5356 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5357 size: RTN::TypedDataView::InstanceSize());
5358 Class& result = Class::Handle(ptr: New<TypedDataView, RTN::TypedDataView>(
5359 index: class_id, isolate_group, /*register_class=*/false));
5360 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5361
5362 const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset();
5363 const intptr_t target_next_field_offset =
5364 RTN::TypedDataView::NextFieldOffset();
5365 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5366 target_value_in_bytes: target_next_field_offset);
5367 result.set_is_prefinalized();
5368 isolate_group->class_table()->Register(cls: result);
5369 return result.ptr();
5370}
5371
5372ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id,
5373 IsolateGroup* isolate_group) {
5374 ASSERT(IsExternalTypedDataClassId(class_id));
5375 const intptr_t host_instance_size = ExternalTypedData::InstanceSize();
5376 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5377 size: RTN::ExternalTypedData::InstanceSize());
5378 Class& result = Class::Handle(ptr: New<ExternalTypedData, RTN::ExternalTypedData>(
5379 index: class_id, isolate_group, /*register_class=*/false));
5380
5381 const intptr_t host_next_field_offset = ExternalTypedData::NextFieldOffset();
5382 const intptr_t target_next_field_offset =
5383 RTN::ExternalTypedData::NextFieldOffset();
5384 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5385 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5386 target_value_in_bytes: target_next_field_offset);
5387 result.set_is_prefinalized();
5388 isolate_group->class_table()->Register(cls: result);
5389 return result.ptr();
5390}
5391
5392ClassPtr Class::NewPointerClass(intptr_t class_id,
5393 IsolateGroup* isolate_group) {
5394 ASSERT(IsFfiPointerClassId(class_id));
5395 intptr_t host_instance_size = Pointer::InstanceSize();
5396 intptr_t target_instance_size =
5397 compiler::target::RoundedAllocationSize(size: RTN::Pointer::InstanceSize());
5398 Class& result = Class::Handle(ptr: New<Pointer, RTN::Pointer>(
5399 index: class_id, isolate_group, /*register_class=*/false));
5400 result.set_instance_size(host_value_in_bytes: host_instance_size, target_value_in_bytes: target_instance_size);
5401 result.set_type_arguments_field_offset(host_value_in_bytes: Pointer::type_arguments_offset(),
5402 target_value_in_bytes: RTN::Pointer::type_arguments_offset());
5403
5404 const intptr_t host_next_field_offset = Pointer::NextFieldOffset();
5405 const intptr_t target_next_field_offset = RTN::Pointer::NextFieldOffset();
5406
5407 result.set_next_field_offset(host_value_in_bytes: host_next_field_offset,
5408 target_value_in_bytes: target_next_field_offset);
5409 result.set_is_prefinalized();
5410 isolate_group->class_table()->Register(cls: result);
5411 return result.ptr();
5412}
5413
5414void Class::set_name(const String& value) const {
5415 ASSERT(untag()->name() == String::null());
5416 ASSERT(value.IsSymbol());
5417 untag()->set_name(value.ptr());
5418#if !defined(PRODUCT)
5419 if (untag()->user_name() == String::null()) {
5420 // TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
5421 // lazily set user name for the other classes.
5422 // Generate and set user_name.
5423 const String& user_name = String::Handle(
5424 ptr: Symbols::New(thread: Thread::Current(), cstr: GenerateUserVisibleName()));
5425 set_user_name(user_name);
5426 }
5427#endif // !defined(PRODUCT)
5428}
5429
5430#if !defined(PRODUCT)
5431void Class::set_user_name(const String& value) const {
5432 untag()->set_user_name(value.ptr());
5433}
5434#endif // !defined(PRODUCT)
5435
5436#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5437void Class::SetUserVisibleNameInClassTable() {
5438 IsolateGroup* isolate_group = IsolateGroup::Current();
5439 auto class_table = isolate_group->class_table();
5440 if (class_table->UserVisibleNameFor(cid: id()) == nullptr) {
5441 String& name = String::Handle(ptr: UserVisibleName());
5442 class_table->SetUserVisibleNameFor(cid: id(), name: name.ToMallocCString());
5443 }
5444}
5445#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5446
5447const char* Class::GenerateUserVisibleName() const {
5448 if (FLAG_show_internal_names) {
5449 return String::Handle(ptr: Name()).ToCString();
5450 }
5451 switch (id()) {
5452 case kFloat32x4Cid:
5453 return Symbols::Float32x4().ToCString();
5454 case kFloat64x2Cid:
5455 return Symbols::Float64x2().ToCString();
5456 case kInt32x4Cid:
5457 return Symbols::Int32x4().ToCString();
5458 case kTypedDataInt8ArrayCid:
5459 case kExternalTypedDataInt8ArrayCid:
5460 return Symbols::Int8List().ToCString();
5461 case kTypedDataUint8ArrayCid:
5462 case kExternalTypedDataUint8ArrayCid:
5463 return Symbols::Uint8List().ToCString();
5464 case kTypedDataUint8ClampedArrayCid:
5465 case kExternalTypedDataUint8ClampedArrayCid:
5466 return Symbols::Uint8ClampedList().ToCString();
5467 case kTypedDataInt16ArrayCid:
5468 case kExternalTypedDataInt16ArrayCid:
5469 return Symbols::Int16List().ToCString();
5470 case kTypedDataUint16ArrayCid:
5471 case kExternalTypedDataUint16ArrayCid:
5472 return Symbols::Uint16List().ToCString();
5473 case kTypedDataInt32ArrayCid:
5474 case kExternalTypedDataInt32ArrayCid:
5475 return Symbols::Int32List().ToCString();
5476 case kTypedDataUint32ArrayCid:
5477 case kExternalTypedDataUint32ArrayCid:
5478 return Symbols::Uint32List().ToCString();
5479 case kTypedDataInt64ArrayCid:
5480 case kExternalTypedDataInt64ArrayCid:
5481 return Symbols::Int64List().ToCString();
5482 case kTypedDataUint64ArrayCid:
5483 case kExternalTypedDataUint64ArrayCid:
5484 return Symbols::Uint64List().ToCString();
5485 case kTypedDataInt32x4ArrayCid:
5486 case kExternalTypedDataInt32x4ArrayCid:
5487 return Symbols::Int32x4List().ToCString();
5488 case kTypedDataFloat32x4ArrayCid:
5489 case kExternalTypedDataFloat32x4ArrayCid:
5490 return Symbols::Float32x4List().ToCString();
5491 case kTypedDataFloat64x2ArrayCid:
5492 case kExternalTypedDataFloat64x2ArrayCid:
5493 return Symbols::Float64x2List().ToCString();
5494 case kTypedDataFloat32ArrayCid:
5495 case kExternalTypedDataFloat32ArrayCid:
5496 return Symbols::Float32List().ToCString();
5497 case kTypedDataFloat64ArrayCid:
5498 case kExternalTypedDataFloat64ArrayCid:
5499 return Symbols::Float64List().ToCString();
5500 case kPointerCid:
5501 return Symbols::FfiPointer().ToCString();
5502 case kDynamicLibraryCid:
5503 return Symbols::FfiDynamicLibrary().ToCString();
5504 case kNullCid:
5505 return Symbols::Null().ToCString();
5506 case kDynamicCid:
5507 return Symbols::Dynamic().ToCString();
5508 case kVoidCid:
5509 return Symbols::Void().ToCString();
5510 case kNeverCid:
5511 return Symbols::Never().ToCString();
5512 case kClassCid:
5513 return Symbols::Class().ToCString();
5514 case kTypeParametersCid:
5515 return Symbols::TypeParameters().ToCString();
5516 case kTypeArgumentsCid:
5517 return Symbols::TypeArguments().ToCString();
5518 case kPatchClassCid:
5519 return Symbols::PatchClass().ToCString();
5520 case kFunctionCid:
5521 return Symbols::Function().ToCString();
5522 case kClosureDataCid:
5523 return Symbols::ClosureData().ToCString();
5524 case kFfiTrampolineDataCid:
5525 return Symbols::FfiTrampolineData().ToCString();
5526 case kFieldCid:
5527 return Symbols::Field().ToCString();
5528 case kScriptCid:
5529 return Symbols::Script().ToCString();
5530 case kLibraryCid:
5531 return Symbols::Library().ToCString();
5532 case kLibraryPrefixCid:
5533 return Symbols::LibraryPrefix().ToCString();
5534 case kNamespaceCid:
5535 return Symbols::Namespace().ToCString();
5536 case kKernelProgramInfoCid:
5537 return Symbols::KernelProgramInfo().ToCString();
5538 case kWeakSerializationReferenceCid:
5539 return Symbols::WeakSerializationReference().ToCString();
5540 case kWeakArrayCid:
5541 return Symbols::WeakArray().ToCString();
5542 case kCodeCid:
5543 return Symbols::Code().ToCString();
5544 case kInstructionsCid:
5545 return Symbols::Instructions().ToCString();
5546 case kInstructionsSectionCid:
5547 return Symbols::InstructionsSection().ToCString();
5548 case kInstructionsTableCid:
5549 return Symbols::InstructionsTable().ToCString();
5550 case kObjectPoolCid:
5551 return Symbols::ObjectPool().ToCString();
5552 case kCodeSourceMapCid:
5553 return Symbols::CodeSourceMap().ToCString();
5554 case kPcDescriptorsCid:
5555 return Symbols::PcDescriptors().ToCString();
5556 case kCompressedStackMapsCid:
5557 return Symbols::CompressedStackMaps().ToCString();
5558 case kLocalVarDescriptorsCid:
5559 return Symbols::LocalVarDescriptors().ToCString();
5560 case kExceptionHandlersCid:
5561 return Symbols::ExceptionHandlers().ToCString();
5562 case kContextCid:
5563 return Symbols::Context().ToCString();
5564 case kContextScopeCid:
5565 return Symbols::ContextScope().ToCString();
5566 case kSentinelCid:
5567 return Symbols::Sentinel().ToCString();
5568 case kSingleTargetCacheCid:
5569 return Symbols::SingleTargetCache().ToCString();
5570 case kICDataCid:
5571 return Symbols::ICData().ToCString();
5572 case kMegamorphicCacheCid:
5573 return Symbols::MegamorphicCache().ToCString();
5574 case kSubtypeTestCacheCid:
5575 return Symbols::SubtypeTestCache().ToCString();
5576 case kLoadingUnitCid:
5577 return Symbols::LoadingUnit().ToCString();
5578 case kApiErrorCid:
5579 return Symbols::ApiError().ToCString();
5580 case kLanguageErrorCid:
5581 return Symbols::LanguageError().ToCString();
5582 case kUnhandledExceptionCid:
5583 return Symbols::UnhandledException().ToCString();
5584 case kUnwindErrorCid:
5585 return Symbols::UnwindError().ToCString();
5586 case kIntegerCid:
5587 case kSmiCid:
5588 case kMintCid:
5589 return Symbols::Int().ToCString();
5590 case kDoubleCid:
5591 return Symbols::Double().ToCString();
5592 case kOneByteStringCid:
5593 case kTwoByteStringCid:
5594 case kExternalOneByteStringCid:
5595 case kExternalTwoByteStringCid:
5596 return Symbols::_String().ToCString();
5597 case kArrayCid:
5598 case kImmutableArrayCid:
5599 case kGrowableObjectArrayCid:
5600 return Symbols::List().ToCString();
5601 }
5602 String& name = String::Handle(ptr: Name());
5603 name = Symbols::New(thread: Thread::Current(), cstr: String::ScrubName(name));
5604 if (name.ptr() == Symbols::_Future().ptr() &&
5605 library() == Library::AsyncLibrary()) {
5606 return Symbols::Future().ToCString();
5607 }
5608 return name.ToCString();
5609}
5610
5611void Class::set_script(const Script& value) const {
5612 untag()->set_script(value.ptr());
5613}
5614
5615#if !defined(DART_PRECOMPILED_RUNTIME)
5616KernelProgramInfoPtr Class::KernelProgramInfo() const {
5617 const auto& lib = Library::Handle(ptr: library());
5618 return lib.kernel_program_info();
5619}
5620
5621void Class::set_token_pos(TokenPosition token_pos) const {
5622 ASSERT(!token_pos.IsClassifying());
5623 StoreNonPointer(addr: &untag()->token_pos_, value: token_pos);
5624}
5625
5626void Class::set_end_token_pos(TokenPosition token_pos) const {
5627 ASSERT(!token_pos.IsClassifying());
5628 StoreNonPointer(addr: &untag()->end_token_pos_, value: token_pos);
5629}
5630
5631void Class::set_implementor_cid(intptr_t value) const {
5632 ASSERT(value >= 0 && value < std::numeric_limits<classid_t>::max());
5633 StoreNonPointer(addr: &untag()->implementor_cid_, value);
5634}
5635
5636bool Class::NoteImplementor(const Class& implementor) const {
5637 ASSERT(!implementor.is_abstract());
5638 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5639 if (implementor_cid() == kDynamicCid) {
5640 return false;
5641 } else if (implementor_cid() == implementor.id()) {
5642 return false;
5643 } else if (implementor_cid() == kIllegalCid) {
5644 set_implementor_cid(implementor.id());
5645 return true; // None -> One
5646 } else {
5647 set_implementor_cid(kDynamicCid);
5648 return true; // One -> Many
5649 }
5650}
5651#endif // !defined(DART_PRECOMPILED_RUNTIME)
5652
5653uint32_t Class::Hash() const {
5654 return Class::Hash(ptr());
5655}
5656uint32_t Class::Hash(ClassPtr obj) {
5657 return String::HashRawSymbol(symbol: obj.untag()->name());
5658}
5659
5660int32_t Class::SourceFingerprint() const {
5661#if !defined(DART_PRECOMPILED_RUNTIME)
5662 return kernel::KernelSourceFingerprintHelper::CalculateClassFingerprint(
5663 klass: *this);
5664#else
5665 return 0;
5666#endif // !defined(DART_PRECOMPILED_RUNTIME)
5667}
5668
5669void Class::set_is_implemented() const {
5670 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5671 set_is_implemented_unsafe();
5672}
5673
5674void Class::set_is_implemented_unsafe() const {
5675 set_state_bits(ImplementedBit::update(value: true, original: state_bits()));
5676}
5677
5678void Class::set_is_abstract() const {
5679 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5680 set_state_bits(AbstractBit::update(value: true, original: state_bits()));
5681}
5682
5683void Class::set_is_declaration_loaded() const {
5684 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5685 set_is_declaration_loaded_unsafe();
5686}
5687
5688void Class::set_is_declaration_loaded_unsafe() const {
5689 ASSERT(!is_declaration_loaded());
5690 set_state_bits(ClassLoadingBits::update(value: UntaggedClass::kDeclarationLoaded,
5691 original: state_bits()));
5692}
5693
5694void Class::set_is_type_finalized() const {
5695 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5696 ASSERT(is_declaration_loaded());
5697 ASSERT(!is_type_finalized());
5698 set_state_bits(
5699 ClassLoadingBits::update(value: UntaggedClass::kTypeFinalized, original: state_bits()));
5700}
5701
5702void Class::set_is_synthesized_class() const {
5703 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5704 set_is_synthesized_class_unsafe();
5705}
5706
5707void Class::set_is_synthesized_class_unsafe() const {
5708 set_state_bits(SynthesizedClassBit::update(value: true, original: state_bits()));
5709}
5710
5711void Class::set_is_enum_class() const {
5712 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5713 set_state_bits(EnumBit::update(value: true, original: state_bits()));
5714}
5715
5716void Class::set_is_const() const {
5717 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5718 set_state_bits(ConstBit::update(value: true, original: state_bits()));
5719}
5720
5721void Class::set_is_transformed_mixin_application() const {
5722 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5723 set_state_bits(TransformedMixinApplicationBit::update(value: true, original: state_bits()));
5724}
5725
5726void Class::set_is_sealed() const {
5727 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5728 set_state_bits(SealedBit::update(value: true, original: state_bits()));
5729}
5730
5731void Class::set_is_mixin_class() const {
5732 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5733 set_state_bits(MixinClassBit::update(value: true, original: state_bits()));
5734}
5735
5736void Class::set_is_base_class() const {
5737 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5738 set_state_bits(BaseClassBit::update(value: true, original: state_bits()));
5739}
5740
5741void Class::set_is_interface_class() const {
5742 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5743 set_state_bits(InterfaceClassBit::update(value: true, original: state_bits()));
5744}
5745
5746void Class::set_is_final() const {
5747 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5748 set_state_bits(FinalBit::update(value: true, original: state_bits()));
5749}
5750
5751void Class::set_is_fields_marked_nullable() const {
5752 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5753 set_state_bits(FieldsMarkedNullableBit::update(value: true, original: state_bits()));
5754}
5755
5756void Class::set_is_allocated(bool value) const {
5757 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5758 set_is_allocated_unsafe(value);
5759}
5760
5761void Class::set_is_allocated_unsafe(bool value) const {
5762 set_state_bits(IsAllocatedBit::update(value, original: state_bits()));
5763}
5764
5765void Class::set_is_loaded(bool value) const {
5766 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5767 set_state_bits(IsLoadedBit::update(value, original: state_bits()));
5768}
5769
5770void Class::set_is_finalized() const {
5771 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5772 ASSERT(!is_finalized());
5773 set_is_finalized_unsafe();
5774}
5775
5776void Class::set_is_finalized_unsafe() const {
5777 set_state_bits(
5778 ClassFinalizedBits::update(value: UntaggedClass::kFinalized, original: state_bits()));
5779}
5780
5781void Class::set_is_allocate_finalized() const {
5782 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5783 ASSERT(!is_allocate_finalized());
5784 set_state_bits(ClassFinalizedBits::update(value: UntaggedClass::kAllocateFinalized,
5785 original: state_bits()));
5786}
5787
5788void Class::set_is_prefinalized() const {
5789 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5790 ASSERT(!is_finalized());
5791 set_state_bits(
5792 ClassFinalizedBits::update(value: UntaggedClass::kPreFinalized, original: state_bits()));
5793}
5794
5795void Class::set_interfaces(const Array& value) const {
5796 ASSERT(!value.IsNull());
5797 untag()->set_interfaces(value.ptr());
5798}
5799
5800#if !defined(DART_PRECOMPILED_RUNTIME)
5801
5802void Class::AddDirectImplementor(const Class& implementor,
5803 bool is_mixin) const {
5804 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5805 ASSERT(is_implemented());
5806 ASSERT(!implementor.IsNull());
5807 GrowableObjectArray& direct_implementors =
5808 GrowableObjectArray::Handle(ptr: untag()->direct_implementors());
5809 if (direct_implementors.IsNull()) {
5810 direct_implementors = GrowableObjectArray::New(capacity: 4, space: Heap::kOld);
5811 untag()->set_direct_implementors(direct_implementors.ptr());
5812 }
5813#if defined(DEBUG)
5814 // Verify that the same class is not added twice.
5815 // The only exception is mixins: when mixin application is transformed,
5816 // mixin is added to the end of interfaces list and may be duplicated:
5817 // class X = A with B implements B;
5818 // This is rare and harmless.
5819 if (!is_mixin) {
5820 for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
5821 ASSERT(direct_implementors.At(i) != implementor.ptr());
5822 }
5823 }
5824#endif
5825 direct_implementors.Add(value: implementor, space: Heap::kOld);
5826}
5827
5828void Class::set_direct_implementors(
5829 const GrowableObjectArray& implementors) const {
5830 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5831 untag()->set_direct_implementors(implementors.ptr());
5832}
5833
5834void Class::AddDirectSubclass(const Class& subclass) const {
5835 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5836 ASSERT(!subclass.IsNull());
5837 ASSERT(subclass.SuperClass() == ptr());
5838 // Do not keep track of the direct subclasses of class Object.
5839 ASSERT(!IsObjectClass());
5840 GrowableObjectArray& direct_subclasses =
5841 GrowableObjectArray::Handle(ptr: untag()->direct_subclasses());
5842 if (direct_subclasses.IsNull()) {
5843 direct_subclasses = GrowableObjectArray::New(capacity: 4, space: Heap::kOld);
5844 untag()->set_direct_subclasses(direct_subclasses.ptr());
5845 }
5846#if defined(DEBUG)
5847 // Verify that the same class is not added twice.
5848 for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
5849 ASSERT(direct_subclasses.At(i) != subclass.ptr());
5850 }
5851#endif
5852 direct_subclasses.Add(value: subclass, space: Heap::kOld);
5853}
5854
5855void Class::set_direct_subclasses(const GrowableObjectArray& subclasses) const {
5856 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5857 untag()->set_direct_subclasses(subclasses.ptr());
5858}
5859
5860#endif // !defined(DART_PRECOMPILED_RUNTIME)
5861
5862ArrayPtr Class::constants() const {
5863 return untag()->constants();
5864}
5865
5866void Class::set_constants(const Array& value) const {
5867 untag()->set_constants(value.ptr());
5868}
5869
5870void Class::set_declaration_type(const Type& value) const {
5871 ASSERT(id() != kDynamicCid && id() != kVoidCid);
5872 ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
5873 ASSERT((declaration_type() == Object::null()) ||
5874 (declaration_type() == value.ptr())); // Set during own finalization.
5875 // Since DeclarationType is used as the runtime type of instances of a
5876 // non-generic class, its nullability must be kNonNullable.
5877 // The exception is DeclarationType of Null which is kNullable.
5878 ASSERT(value.type_class_id() != kNullCid || value.IsNullable());
5879 ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable());
5880 untag()->set_declaration_type<std::memory_order_release>(value.ptr());
5881}
5882
5883TypePtr Class::DeclarationType() const {
5884 ASSERT(is_declaration_loaded());
5885 if (IsNullClass()) {
5886 return Type::NullType();
5887 }
5888 if (IsDynamicClass()) {
5889 return Type::DynamicType();
5890 }
5891 if (IsVoidClass()) {
5892 return Type::VoidType();
5893 }
5894 if (declaration_type() != Type::null()) {
5895 return declaration_type();
5896 }
5897 {
5898 auto thread = Thread::Current();
5899 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5900 if (declaration_type() != Type::null()) {
5901 return declaration_type();
5902 }
5903 // For efficiency, the runtimeType intrinsic returns the type cached by
5904 // DeclarationType without checking its nullability. Therefore, we
5905 // consistently cache the kNonNullable version of the type.
5906 // The exception is type Null which is stored as kNullable.
5907 TypeArguments& type_args = TypeArguments::Handle();
5908 const intptr_t num_type_params = NumTypeParameters();
5909 if (num_type_params > 0) {
5910 type_args = TypeArguments::New(len: num_type_params);
5911 TypeParameter& type_param = TypeParameter::Handle();
5912 for (intptr_t i = 0; i < num_type_params; i++) {
5913 type_param = TypeParameterAt(index: i);
5914 type_args.SetTypeAt(index: i, value: type_param);
5915 }
5916 }
5917 Type& type =
5918 Type::Handle(ptr: Type::New(clazz: *this, arguments: type_args, nullability: Nullability::kNonNullable));
5919 type ^= ClassFinalizer::FinalizeType(type);
5920 set_declaration_type(type);
5921 return type.ptr();
5922 }
5923}
5924
5925#if !defined(DART_PRECOMPILED_RUNTIME)
5926void Class::set_allocation_stub(const Code& value) const {
5927 // Never clear the stub as it may still be a target, but will be GC-d if
5928 // not referenced.
5929 ASSERT(!value.IsNull());
5930 ASSERT(untag()->allocation_stub() == Code::null());
5931 untag()->set_allocation_stub(value.ptr());
5932}
5933#endif // !defined(DART_PRECOMPILED_RUNTIME)
5934
5935void Class::DisableAllocationStub() const {
5936#if defined(DART_PRECOMPILED_RUNTIME)
5937 UNREACHABLE();
5938#else
5939 {
5940 const Code& existing_stub = Code::Handle(ptr: allocation_stub());
5941 if (existing_stub.IsNull()) {
5942 return;
5943 }
5944 }
5945 auto thread = Thread::Current();
5946 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5947 const Code& existing_stub = Code::Handle(ptr: allocation_stub());
5948 if (existing_stub.IsNull()) {
5949 return;
5950 }
5951 ASSERT(!existing_stub.IsDisabled());
5952 // Change the stub so that the next caller will regenerate the stub.
5953 existing_stub.DisableStubCode(is_cls_parameterized: NumTypeParameters() > 0);
5954 // Disassociate the existing stub from class.
5955 untag()->set_allocation_stub(Code::null());
5956#endif // defined(DART_PRECOMPILED_RUNTIME)
5957}
5958
5959bool Class::IsDartFunctionClass() const {
5960 return ptr() == Type::Handle(ptr: Type::DartFunctionType()).type_class();
5961}
5962
5963bool Class::IsFutureClass() const {
5964 // Looking up future_class in the object store would not work, because
5965 // this function is called during class finalization, before the object store
5966 // field would be initialized by InitKnownObjects().
5967 return (Name() == Symbols::Future().ptr()) &&
5968 (library() == Library::AsyncLibrary());
5969}
5970
5971// Checks if type T0 is a subtype of type T1.
5972// Type T0 is specified by class 'cls' parameterized with 'type_arguments' and
5973// by 'nullability', and type T1 is specified by 'other' and must have a type
5974// class.
5975// [type_arguments] should be a flattened instance type arguments vector.
5976bool Class::IsSubtypeOf(const Class& cls,
5977 const TypeArguments& type_arguments,
5978 Nullability nullability,
5979 const AbstractType& other,
5980 Heap::Space space,
5981 FunctionTypeMapping* function_type_equivalence) {
5982 TRACE_TYPE_CHECKS_VERBOSE(" Class::IsSubtypeOf(%s %s, %s)\n",
5983 cls.ToCString(), type_arguments.ToCString(),
5984 other.ToCString());
5985 // This function does not support Null, Never, dynamic, or void as type T0.
5986 classid_t this_cid = cls.id();
5987 ASSERT(this_cid != kNullCid && this_cid != kNeverCid &&
5988 this_cid != kDynamicCid && this_cid != kVoidCid);
5989 ASSERT(type_arguments.IsNull() ||
5990 (type_arguments.Length() >= cls.NumTypeArguments()));
5991 // Type T1 must have a type class (e.g. not a type param or a function type).
5992 ASSERT(other.HasTypeClass());
5993 const classid_t other_cid = other.type_class_id();
5994 if (other_cid == kDynamicCid || other_cid == kVoidCid) {
5995 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
5996 return true;
5997 }
5998 // Left nullable:
5999 // if T0 is S0? then:
6000 // T0 <: T1 iff S0 <: T1 and Null <: T1
6001 if ((nullability == Nullability::kNullable) &&
6002 !Instance::NullIsAssignableTo(other)) {
6003 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (nullability)\n");
6004 return false;
6005 }
6006
6007 // Right Object.
6008 if (other_cid == kObjectCid) {
6009 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is Object)\n");
6010 return true;
6011 }
6012
6013 Thread* thread = Thread::Current();
6014 Zone* zone = thread->zone();
6015 const Class& other_class = Class::Handle(zone, ptr: other.type_class());
6016 const TypeArguments& other_type_arguments =
6017 TypeArguments::Handle(zone, ptr: other.arguments());
6018 // Use the 'this_class' object as if it was the receiver of this method, but
6019 // instead of recursing, reset it to the super class and loop.
6020 Class& this_class = Class::Handle(zone, ptr: cls.ptr());
6021 while (true) {
6022 // Apply additional subtyping rules if T0 or T1 are 'FutureOr'.
6023
6024 // Left FutureOr:
6025 // if T0 is FutureOr<S0> then:
6026 // T0 <: T1 iff Future<S0> <: T1 and S0 <: T1
6027 if (this_cid == kFutureOrCid) {
6028 // Check Future<S0> <: T1.
6029 ObjectStore* object_store = IsolateGroup::Current()->object_store();
6030 const Class& future_class =
6031 Class::Handle(zone, ptr: object_store->future_class());
6032 ASSERT(!future_class.IsNull() && future_class.NumTypeParameters() == 1 &&
6033 this_class.NumTypeParameters() == 1);
6034 ASSERT(type_arguments.IsNull() || type_arguments.Length() >= 1);
6035 if (Class::IsSubtypeOf(cls: future_class, type_arguments,
6036 nullability: Nullability::kNonNullable, other, space,
6037 function_type_equivalence)) {
6038 // Check S0 <: T1.
6039 const AbstractType& type_arg =
6040 AbstractType::Handle(zone, ptr: type_arguments.TypeAtNullSafe(index: 0));
6041 if (type_arg.IsSubtypeOf(other, space, function_type_equivalence)) {
6042 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is FutureOr)\n");
6043 return true;
6044 }
6045 }
6046 }
6047
6048 // Right FutureOr:
6049 // if T1 is FutureOr<S1> then:
6050 // T0 <: T1 iff any of the following hold:
6051 // either T0 <: Future<S1>
6052 // or T0 <: S1
6053 // or T0 is X0 and X0 has bound S0 and S0 <: T1 (checked elsewhere)
6054 if (other_cid == kFutureOrCid) {
6055 const AbstractType& other_type_arg =
6056 AbstractType::Handle(zone, ptr: other_type_arguments.TypeAtNullSafe(index: 0));
6057 // Check if S1 is a top type.
6058 if (other_type_arg.IsTopTypeForSubtyping()) {
6059 TRACE_TYPE_CHECKS_VERBOSE(
6060 " - result: true (right is FutureOr top)\n");
6061 return true;
6062 }
6063 // Check T0 <: Future<S1> when T0 is Future<S0>.
6064 if (this_class.IsFutureClass()) {
6065 const AbstractType& type_arg =
6066 AbstractType::Handle(zone, ptr: type_arguments.TypeAtNullSafe(index: 0));
6067 // If T0 is Future<S0>, then T0 <: Future<S1>, iff S0 <: S1.
6068 if (type_arg.IsSubtypeOf(other: other_type_arg, space,
6069 function_type_equivalence)) {
6070 TRACE_TYPE_CHECKS_VERBOSE(
6071 " - result: true (left is Future, right is FutureOr)\n");
6072 return true;
6073 }
6074 }
6075 // Check T0 <: Future<S1> when T0 is FutureOr<S0> is already done.
6076 // Check T0 <: S1.
6077 if (other_type_arg.HasTypeClass() &&
6078 Class::IsSubtypeOf(cls: this_class, type_arguments, nullability,
6079 other: other_type_arg, space,
6080 function_type_equivalence)) {
6081 TRACE_TYPE_CHECKS_VERBOSE(
6082 " - result: true (right is FutureOr, subtype of arg)\n");
6083 return true;
6084 }
6085 }
6086
6087 // Check for reflexivity.
6088 if (this_class.ptr() == other_class.ptr()) {
6089 const intptr_t num_type_params = this_class.NumTypeParameters();
6090 if (num_type_params == 0) {
6091 TRACE_TYPE_CHECKS_VERBOSE(
6092 " - result: true (same non-generic class)\n");
6093 return true;
6094 }
6095 // Check for covariance.
6096 if (other_type_arguments.IsNull()) {
6097 TRACE_TYPE_CHECKS_VERBOSE(
6098 " - result: true (same class, dynamic type args)\n");
6099 return true;
6100 }
6101 const intptr_t num_type_args = this_class.NumTypeArguments();
6102 const intptr_t from_index = num_type_args - num_type_params;
6103 ASSERT(other_type_arguments.Length() == num_type_params);
6104 AbstractType& type = AbstractType::Handle(zone);
6105 AbstractType& other_type = AbstractType::Handle(zone);
6106 for (intptr_t i = 0; i < num_type_params; ++i) {
6107 type = type_arguments.TypeAtNullSafe(index: from_index + i);
6108 other_type = other_type_arguments.TypeAt(index: i);
6109 ASSERT(!type.IsNull() && !other_type.IsNull());
6110 if (!type.IsSubtypeOf(other: other_type, space, function_type_equivalence)) {
6111 TRACE_TYPE_CHECKS_VERBOSE(
6112 " - result: false (same class, type args mismatch)\n");
6113 return false;
6114 }
6115 }
6116 TRACE_TYPE_CHECKS_VERBOSE(
6117 " - result: true (same class, matching type args)\n");
6118 return true;
6119 }
6120
6121 // _Closure <: Function
6122 if (this_class.IsClosureClass() && other_class.IsDartFunctionClass()) {
6123 TRACE_TYPE_CHECKS_VERBOSE(
6124 " - result: true (left is closure, right is Function)\n");
6125 return true;
6126 }
6127
6128 // Check for 'direct super type' specified in the implements clause
6129 // and check for transitivity at the same time.
6130 Array& interfaces = Array::Handle(zone, ptr: this_class.interfaces());
6131 Type& interface = Type::Handle(zone);
6132 Class& interface_class = Class::Handle(zone);
6133 TypeArguments& interface_args = TypeArguments::Handle(zone);
6134 for (intptr_t i = 0; i < interfaces.Length(); i++) {
6135 interface ^= interfaces.At(index: i);
6136 ASSERT(interface.IsFinalized());
6137 interface_class = interface.type_class();
6138 interface_args = interface.arguments();
6139 if (!interface_args.IsNull() && !interface_args.IsInstantiated()) {
6140 // This type class implements an interface that is parameterized with
6141 // generic type(s), e.g. it implements List<T>.
6142 // The uninstantiated type T must be instantiated using the type
6143 // parameters of this type before performing the type test.
6144 // The type arguments of this type that are referred to by the type
6145 // parameters of the interface are at the end of the type vector,
6146 // after the type arguments of the super type of this type.
6147 // The index of the type parameters is adjusted upon finalization.
6148 interface_args = interface_args.InstantiateFrom(
6149 instantiator_type_arguments: type_arguments, function_type_arguments: Object::null_type_arguments(), num_free_fun_type_params: kNoneFree, space);
6150 }
6151 interface_args = interface_class.GetInstanceTypeArguments(
6152 thread, type_arguments: interface_args, /*canonicalize=*/false);
6153 // In Dart 2, implementing Function has no meaning.
6154 // TODO(regis): Can we encounter and skip Object as well?
6155 if (interface_class.IsDartFunctionClass()) {
6156 continue;
6157 }
6158 if (Class::IsSubtypeOf(cls: interface_class, type_arguments: interface_args,
6159 nullability: Nullability::kNonNullable, other, space,
6160 function_type_equivalence)) {
6161 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (interface found)\n");
6162 return true;
6163 }
6164 }
6165 // "Recurse" up the class hierarchy until we have reached the top.
6166 this_class = this_class.SuperClass();
6167 if (this_class.IsNull()) {
6168 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (supertype not found)\n");
6169 return false;
6170 }
6171 this_cid = this_class.id();
6172 }
6173 UNREACHABLE();
6174 return false;
6175}
6176
6177bool Class::IsTopLevel() const {
6178 return Name() == Symbols::TopLevel().ptr();
6179}
6180
6181bool Class::IsPrivate() const {
6182 return Library::IsPrivate(name: String::Handle(ptr: Name()));
6183}
6184
6185FunctionPtr Class::LookupDynamicFunctionUnsafe(const String& name) const {
6186 return LookupFunctionReadLocked(name, kind: kInstance);
6187}
6188
6189FunctionPtr Class::LookupDynamicFunctionAllowPrivate(const String& name) const {
6190 return LookupFunctionAllowPrivate(name, kind: kInstance);
6191}
6192
6193FunctionPtr Class::LookupStaticFunction(const String& name) const {
6194 Thread* thread = Thread::Current();
6195 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6196 return LookupFunctionReadLocked(name, kind: kStatic);
6197}
6198
6199FunctionPtr Class::LookupStaticFunctionAllowPrivate(const String& name) const {
6200 return LookupFunctionAllowPrivate(name, kind: kStatic);
6201}
6202
6203FunctionPtr Class::LookupConstructor(const String& name) const {
6204 Thread* thread = Thread::Current();
6205 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6206 return LookupFunctionReadLocked(name, kind: kConstructor);
6207}
6208
6209FunctionPtr Class::LookupConstructorAllowPrivate(const String& name) const {
6210 return LookupFunctionAllowPrivate(name, kind: kConstructor);
6211}
6212
6213FunctionPtr Class::LookupFactory(const String& name) const {
6214 Thread* thread = Thread::Current();
6215 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6216 return LookupFunctionReadLocked(name, kind: kFactory);
6217}
6218
6219FunctionPtr Class::LookupFactoryAllowPrivate(const String& name) const {
6220 return LookupFunctionAllowPrivate(name, kind: kFactory);
6221}
6222
6223FunctionPtr Class::LookupFunctionAllowPrivate(const String& name) const {
6224 return LookupFunctionAllowPrivate(name, kind: kAny);
6225}
6226
6227FunctionPtr Class::LookupFunctionReadLocked(const String& name) const {
6228 return LookupFunctionReadLocked(name, kind: kAny);
6229}
6230
6231// Returns true if 'prefix' and 'accessor_name' match 'name'.
6232static bool MatchesAccessorName(const String& name,
6233 const char* prefix,
6234 intptr_t prefix_length,
6235 const String& accessor_name) {
6236 intptr_t name_len = name.Length();
6237 intptr_t accessor_name_len = accessor_name.Length();
6238
6239 if (name_len != (accessor_name_len + prefix_length)) {
6240 return false;
6241 }
6242 for (intptr_t i = 0; i < prefix_length; i++) {
6243 if (name.CharAt(index: i) != prefix[i]) {
6244 return false;
6245 }
6246 }
6247 for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) {
6248 if (name.CharAt(index: j) != accessor_name.CharAt(index: i)) {
6249 return false;
6250 }
6251 }
6252 return true;
6253}
6254
6255FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) {
6256 if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
6257 if (func.IsDynamicFunction(allow_abstract: kind == kInstanceAllowAbstract)) {
6258 return func.ptr();
6259 }
6260 } else if (kind == kStatic) {
6261 if (func.IsStaticFunction()) {
6262 return func.ptr();
6263 }
6264 } else if (kind == kConstructor) {
6265 if (func.IsGenerativeConstructor()) {
6266 ASSERT(!func.is_static());
6267 return func.ptr();
6268 }
6269 } else if (kind == kFactory) {
6270 if (func.IsFactory()) {
6271 ASSERT(func.is_static());
6272 return func.ptr();
6273 }
6274 } else if (kind == kAny) {
6275 return func.ptr();
6276 }
6277 return Function::null();
6278}
6279
6280FunctionPtr Class::LookupFunctionReadLocked(const String& name,
6281 MemberKind kind) const {
6282 ASSERT(!IsNull());
6283 Thread* thread = Thread::Current();
6284 RELEASE_ASSERT(is_finalized());
6285 // Caller needs to ensure they grab program_lock because this method
6286 // can be invoked with either ReadRwLock or WriteRwLock.
6287#if defined(DEBUG)
6288 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadReader());
6289#endif
6290 REUSABLE_ARRAY_HANDLESCOPE(thread);
6291 REUSABLE_FUNCTION_HANDLESCOPE(thread);
6292 Array& funcs = thread->ArrayHandle();
6293 funcs = functions();
6294 ASSERT(!funcs.IsNull());
6295 const intptr_t len = funcs.Length();
6296 Function& function = thread->FunctionHandle();
6297 if (len >= kFunctionLookupHashThreshold) {
6298 // TODO(dartbug.com/36097): We require currently a read lock in the resolver
6299 // to avoid read-write race access to this hash table.
6300 // If we want to increase resolver speed by avoiding the need for read lock,
6301 // we could make change this hash table to be lock-free for the reader.
6302 const Array& hash_table =
6303 Array::Handle(zone: thread->zone(), ptr: untag()->functions_hash_table());
6304 if (!hash_table.IsNull()) {
6305 ClassFunctionsSet set(hash_table.ptr());
6306 REUSABLE_STRING_HANDLESCOPE(thread);
6307 function ^= set.GetOrNull(key: FunctionName(name, &(thread->StringHandle())));
6308 // No mutations.
6309 ASSERT(set.Release().ptr() == hash_table.ptr());
6310 return function.IsNull() ? Function::null()
6311 : CheckFunctionType(func: function, kind);
6312 }
6313 }
6314 if (name.IsSymbol()) {
6315 // Quick Symbol compare.
6316 NoSafepointScope no_safepoint;
6317 for (intptr_t i = 0; i < len; i++) {
6318 function ^= funcs.At(index: i);
6319 if (function.name() == name.ptr()) {
6320 return CheckFunctionType(func: function, kind);
6321 }
6322 }
6323 } else {
6324 REUSABLE_STRING_HANDLESCOPE(thread);
6325 String& function_name = thread->StringHandle();
6326 for (intptr_t i = 0; i < len; i++) {
6327 function ^= funcs.At(index: i);
6328 function_name = function.name();
6329 if (function_name.Equals(str: name)) {
6330 return CheckFunctionType(func: function, kind);
6331 }
6332 }
6333 }
6334 // No function found.
6335 return Function::null();
6336}
6337
6338FunctionPtr Class::LookupFunctionAllowPrivate(const String& name,
6339 MemberKind kind) const {
6340 ASSERT(!IsNull());
6341 Thread* thread = Thread::Current();
6342 RELEASE_ASSERT(is_finalized());
6343 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6344 REUSABLE_ARRAY_HANDLESCOPE(thread);
6345 REUSABLE_FUNCTION_HANDLESCOPE(thread);
6346 REUSABLE_STRING_HANDLESCOPE(thread);
6347 Array& funcs = thread->ArrayHandle();
6348 funcs = current_functions();
6349 ASSERT(!funcs.IsNull());
6350 const intptr_t len = funcs.Length();
6351 Function& function = thread->FunctionHandle();
6352 String& function_name = thread->StringHandle();
6353 for (intptr_t i = 0; i < len; i++) {
6354 function ^= funcs.At(index: i);
6355 function_name = function.name();
6356 if (String::EqualsIgnoringPrivateKey(str1: function_name, str2: name)) {
6357 return CheckFunctionType(func: function, kind);
6358 }
6359 }
6360 // No function found.
6361 return Function::null();
6362}
6363
6364FunctionPtr Class::LookupGetterFunction(const String& name) const {
6365 return LookupAccessorFunction(prefix: kGetterPrefix, prefix_length: kGetterPrefixLength, name);
6366}
6367
6368FunctionPtr Class::LookupSetterFunction(const String& name) const {
6369 return LookupAccessorFunction(prefix: kSetterPrefix, prefix_length: kSetterPrefixLength, name);
6370}
6371
6372FunctionPtr Class::LookupAccessorFunction(const char* prefix,
6373 intptr_t prefix_length,
6374 const String& name) const {
6375 ASSERT(!IsNull());
6376 Thread* thread = Thread::Current();
6377 if (EnsureIsFinalized(thread) != Error::null()) {
6378 return Function::null();
6379 }
6380 REUSABLE_ARRAY_HANDLESCOPE(thread);
6381 REUSABLE_FUNCTION_HANDLESCOPE(thread);
6382 REUSABLE_STRING_HANDLESCOPE(thread);
6383 Array& funcs = thread->ArrayHandle();
6384 funcs = current_functions();
6385 intptr_t len = funcs.Length();
6386 Function& function = thread->FunctionHandle();
6387 String& function_name = thread->StringHandle();
6388 for (intptr_t i = 0; i < len; i++) {
6389 function ^= funcs.At(index: i);
6390 function_name = function.name();
6391 if (MatchesAccessorName(name: function_name, prefix, prefix_length, accessor_name: name)) {
6392 return function.ptr();
6393 }
6394 }
6395
6396 // No function found.
6397 return Function::null();
6398}
6399
6400FieldPtr Class::LookupInstanceField(const String& name) const {
6401 return LookupField(name, kind: kInstance);
6402}
6403
6404FieldPtr Class::LookupStaticField(const String& name) const {
6405 return LookupField(name, kind: kStatic);
6406}
6407
6408FieldPtr Class::LookupField(const String& name) const {
6409 return LookupField(name, kind: kAny);
6410}
6411
6412FieldPtr Class::LookupField(const String& name, MemberKind kind) const {
6413 ASSERT(!IsNull());
6414 Thread* thread = Thread::Current();
6415 if (EnsureIsFinalized(thread) != Error::null()) {
6416 return Field::null();
6417 }
6418 REUSABLE_ARRAY_HANDLESCOPE(thread);
6419 REUSABLE_FIELD_HANDLESCOPE(thread);
6420 REUSABLE_STRING_HANDLESCOPE(thread);
6421 Array& flds = thread->ArrayHandle();
6422 flds = fields();
6423 ASSERT(!flds.IsNull());
6424 intptr_t len = flds.Length();
6425 Field& field = thread->FieldHandle();
6426 if (name.IsSymbol()) {
6427 // Use fast raw pointer string compare for symbols.
6428 for (intptr_t i = 0; i < len; i++) {
6429 field ^= flds.At(index: i);
6430 if (name.ptr() == field.name()) {
6431 if (kind == kInstance) {
6432 return field.is_static() ? Field::null() : field.ptr();
6433 } else if (kind == kStatic) {
6434 return field.is_static() ? field.ptr() : Field::null();
6435 }
6436 ASSERT(kind == kAny);
6437 return field.ptr();
6438 }
6439 }
6440 } else {
6441 String& field_name = thread->StringHandle();
6442 for (intptr_t i = 0; i < len; i++) {
6443 field ^= flds.At(index: i);
6444 field_name = field.name();
6445 if (name.Equals(str: field_name)) {
6446 if (kind == kInstance) {
6447 return field.is_static() ? Field::null() : field.ptr();
6448 } else if (kind == kStatic) {
6449 return field.is_static() ? field.ptr() : Field::null();
6450 }
6451 ASSERT(kind == kAny);
6452 return field.ptr();
6453 }
6454 }
6455 }
6456 return Field::null();
6457}
6458
6459FieldPtr Class::LookupFieldAllowPrivate(const String& name,
6460 bool instance_only) const {
6461 ASSERT(!IsNull());
6462 // Use slow string compare, ignoring privacy name mangling.
6463 Thread* thread = Thread::Current();
6464 if (EnsureIsFinalized(thread) != Error::null()) {
6465 return Field::null();
6466 }
6467 REUSABLE_ARRAY_HANDLESCOPE(thread);
6468 REUSABLE_FIELD_HANDLESCOPE(thread);
6469 REUSABLE_STRING_HANDLESCOPE(thread);
6470 Array& flds = thread->ArrayHandle();
6471 flds = fields();
6472 ASSERT(!flds.IsNull());
6473 intptr_t len = flds.Length();
6474 Field& field = thread->FieldHandle();
6475 String& field_name = thread->StringHandle();
6476 for (intptr_t i = 0; i < len; i++) {
6477 field ^= flds.At(index: i);
6478 field_name = field.name();
6479 if (field.is_static() && instance_only) {
6480 // If we only care about instance fields, skip statics.
6481 continue;
6482 }
6483 if (String::EqualsIgnoringPrivateKey(str1: field_name, str2: name)) {
6484 return field.ptr();
6485 }
6486 }
6487 return Field::null();
6488}
6489
6490FieldPtr Class::LookupInstanceFieldAllowPrivate(const String& name) const {
6491 Field& field = Field::Handle(ptr: LookupFieldAllowPrivate(name, instance_only: true));
6492 if (!field.IsNull() && !field.is_static()) {
6493 return field.ptr();
6494 }
6495 return Field::null();
6496}
6497
6498FieldPtr Class::LookupStaticFieldAllowPrivate(const String& name) const {
6499 Field& field = Field::Handle(ptr: LookupFieldAllowPrivate(name));
6500 if (!field.IsNull() && field.is_static()) {
6501 return field.ptr();
6502 }
6503 return Field::null();
6504}
6505
6506const char* Class::ToCString() const {
6507 NoSafepointScope no_safepoint;
6508 const Library& lib = Library::Handle(ptr: library());
6509 const char* library_name = lib.IsNull() ? "" : lib.ToCString();
6510 const char* class_name = String::Handle(ptr: Name()).ToCString();
6511 return OS::SCreate(zone: Thread::Current()->zone(), format: "%s Class: %s", library_name,
6512 class_name);
6513}
6514
6515// Thomas Wang, Integer Hash Functions.
6516// https://gist.github.com/badboy/6267743
6517// "64 bit to 32 bit Hash Functions"
6518static uword Hash64To32(uint64_t v) {
6519 v = ~v + (v << 18);
6520 v = v ^ (v >> 31);
6521 v = v * 21;
6522 v = v ^ (v >> 11);
6523 v = v + (v << 6);
6524 v = v ^ (v >> 22);
6525 return static_cast<uint32_t>(v);
6526}
6527
6528InstancePtr Class::LookupCanonicalInstance(Zone* zone,
6529 const Instance& value) const {
6530 ASSERT(this->ptr() == value.clazz());
6531 ASSERT(is_finalized() || is_prefinalized());
6532 Instance& canonical_value = Instance::Handle(zone);
6533 if (this->constants() != Array::null()) {
6534 CanonicalInstancesSet constants(zone, this->constants());
6535 canonical_value ^= constants.GetOrNull(key: CanonicalInstanceKey(value));
6536 this->set_constants(constants.Release());
6537 }
6538 return canonical_value.ptr();
6539}
6540
6541InstancePtr Class::InsertCanonicalConstant(Zone* zone,
6542 const Instance& constant) const {
6543 ASSERT(constant.IsCanonical());
6544 ASSERT(this->ptr() == constant.clazz());
6545 Instance& canonical_value = Instance::Handle(zone);
6546 if (this->constants() == Array::null()) {
6547 CanonicalInstancesSet constants(
6548 HashTables::New<CanonicalInstancesSet>(initial_capacity: 128, space: Heap::kOld));
6549 canonical_value ^= constants.InsertNewOrGet(key: CanonicalInstanceKey(constant));
6550 this->set_constants(constants.Release());
6551 } else {
6552 CanonicalInstancesSet constants(Thread::Current()->zone(),
6553 this->constants());
6554 canonical_value ^= constants.InsertNewOrGet(key: CanonicalInstanceKey(constant));
6555 this->set_constants(constants.Release());
6556 }
6557 return canonical_value.ptr();
6558}
6559
6560bool Class::RequireCanonicalTypeErasureOfConstants(Zone* zone) const {
6561 const intptr_t num_type_params = NumTypeParameters();
6562 const intptr_t num_type_args = NumTypeArguments();
6563 const intptr_t from_index = num_type_args - num_type_params;
6564 Instance& constant = Instance::Handle(zone);
6565 TypeArguments& type_arguments = TypeArguments::Handle(zone);
6566 CanonicalInstancesSet set(zone, constants());
6567 CanonicalInstancesSet::Iterator it(&set);
6568 bool result = false;
6569 while (it.MoveNext()) {
6570 constant ^= set.GetKey(entry: it.Current());
6571 ASSERT(!constant.IsNull());
6572 ASSERT(!constant.IsTypeArguments());
6573 ASSERT(!constant.IsType());
6574 type_arguments = constant.GetTypeArguments();
6575 if (type_arguments.RequireConstCanonicalTypeErasure(zone, from_index,
6576 len: num_type_params)) {
6577 result = true;
6578 break;
6579 }
6580 }
6581 set.Release();
6582 return result;
6583}
6584
6585// Scoped mapping FunctionType -> FunctionType.
6586// Used for tracking and updating nested generic function types
6587// and their type parameters.
6588class FunctionTypeMapping : public ValueObject {
6589 public:
6590 FunctionTypeMapping(Zone* zone,
6591 FunctionTypeMapping** mapping,
6592 const FunctionType& from,
6593 const FunctionType& to)
6594 : zone_(zone), parent_(*mapping), from_(from), to_(to) {
6595 // Add self to the linked list.
6596 *mapping = this;
6597 }
6598
6599 const FunctionType* Find(const Object& from) const {
6600 if (!from.IsFunctionType()) {
6601 return nullptr;
6602 }
6603 for (const FunctionTypeMapping* scope = this; scope != nullptr;
6604 scope = scope->parent_) {
6605 if (scope->from_.ptr() == from.ptr()) {
6606 return &(scope->to_);
6607 }
6608 }
6609 return nullptr;
6610 }
6611
6612 TypeParameterPtr MapTypeParameter(const TypeParameter& type_param) const {
6613 ASSERT(type_param.IsFunctionTypeParameter());
6614 const FunctionType* new_owner = Find(
6615 from: FunctionType::Handle(zone: zone_, ptr: type_param.parameterized_function_type()));
6616 if (new_owner != nullptr) {
6617 return new_owner->TypeParameterAt(index: type_param.index() - type_param.base(),
6618 nullability: type_param.nullability());
6619 }
6620 return type_param.ptr();
6621 }
6622
6623 bool ContainsOwnersOfTypeParameters(const TypeParameter& p1,
6624 const TypeParameter& p2) const {
6625 auto& from = FunctionType::Handle(zone: zone_, ptr: p1.parameterized_function_type());
6626 const FunctionType* to = Find(from);
6627 if (to != nullptr) {
6628 return to->ptr() == p2.parameterized_function_type();
6629 }
6630 from = p2.parameterized_function_type();
6631 to = Find(from);
6632 if (to != nullptr) {
6633 return to->ptr() == p1.parameterized_function_type();
6634 }
6635 return false;
6636 }
6637
6638 private:
6639 Zone* zone_;
6640 const FunctionTypeMapping* const parent_;
6641 const FunctionType& from_;
6642 const FunctionType& to_;
6643};
6644
6645intptr_t TypeParameters::Length() const {
6646 if (IsNull() || untag()->names() == Array::null()) return 0;
6647 return Smi::Value(raw_smi: untag()->names()->untag()->length());
6648}
6649
6650void TypeParameters::set_names(const Array& value) const {
6651 ASSERT(!value.IsNull());
6652 untag()->set_names(value.ptr());
6653}
6654
6655StringPtr TypeParameters::NameAt(intptr_t index) const {
6656 const Array& names_array = Array::Handle(ptr: names());
6657 return String::RawCast(raw: names_array.At(index));
6658}
6659
6660void TypeParameters::SetNameAt(intptr_t index, const String& value) const {
6661 const Array& names_array = Array::Handle(ptr: names());
6662 names_array.SetAt(index, value);
6663}
6664
6665void TypeParameters::set_flags(const Array& value) const {
6666 untag()->set_flags(value.ptr());
6667}
6668
6669void TypeParameters::set_bounds(const TypeArguments& value) const {
6670 // A null value represents a vector of dynamic.
6671 untag()->set_bounds(value.ptr());
6672}
6673
6674AbstractTypePtr TypeParameters::BoundAt(intptr_t index) const {
6675 const TypeArguments& upper_bounds = TypeArguments::Handle(ptr: bounds());
6676 return upper_bounds.IsNull() ? Type::DynamicType()
6677 : upper_bounds.TypeAt(index);
6678}
6679
6680void TypeParameters::SetBoundAt(intptr_t index,
6681 const AbstractType& value) const {
6682 const TypeArguments& upper_bounds = TypeArguments::Handle(ptr: bounds());
6683 upper_bounds.SetTypeAt(index, value);
6684}
6685
6686bool TypeParameters::AllDynamicBounds() const {
6687 return bounds() == TypeArguments::null();
6688}
6689
6690void TypeParameters::set_defaults(const TypeArguments& value) const {
6691 // The null value represents a vector of dynamic.
6692 untag()->set_defaults(value.ptr());
6693}
6694
6695AbstractTypePtr TypeParameters::DefaultAt(intptr_t index) const {
6696 const TypeArguments& default_type_args = TypeArguments::Handle(ptr: defaults());
6697 return default_type_args.IsNull() ? Type::DynamicType()
6698 : default_type_args.TypeAt(index);
6699}
6700
6701void TypeParameters::SetDefaultAt(intptr_t index,
6702 const AbstractType& value) const {
6703 const TypeArguments& default_type_args = TypeArguments::Handle(ptr: defaults());
6704 default_type_args.SetTypeAt(index, value);
6705}
6706
6707bool TypeParameters::AllDynamicDefaults() const {
6708 return defaults() == TypeArguments::null();
6709}
6710
6711void TypeParameters::AllocateFlags(Heap::Space space) const {
6712 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6713 const Array& flags_array = Array::Handle(ptr: Array::New(len, space));
6714 // Initialize flags to 0.
6715 const Smi& zero = Smi::Handle(ptr: Smi::New(value: 0));
6716 for (intptr_t i = 0; i < len; i++) {
6717 flags_array.SetAt(index: i, value: zero);
6718 }
6719 set_flags(flags_array);
6720}
6721
6722void TypeParameters::OptimizeFlags() const {
6723 if (untag()->flags() == Array::null()) return; // Already optimized.
6724 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6725 const Array& flags_array = Array::Handle(ptr: flags());
6726 const Smi& zero = Smi::Handle(ptr: Smi::New(value: 0));
6727 for (intptr_t i = 0; i < len; i++) {
6728 if (flags_array.At(index: i) != zero.ptr()) return;
6729 }
6730 set_flags(Object::null_array());
6731}
6732
6733bool TypeParameters::IsGenericCovariantImplAt(intptr_t index) const {
6734 if (untag()->flags() == Array::null()) return false;
6735 const intptr_t flag = Smi::Value(
6736 raw_smi: Smi::RawCast(raw: Array::Handle(ptr: flags()).At(index: index >> kFlagsPerSmiShift)));
6737 return (flag >> (index & kFlagsPerSmiMask)) != 0;
6738}
6739
6740void TypeParameters::SetIsGenericCovariantImplAt(intptr_t index,
6741 bool value) const {
6742 const Array& flg = Array::Handle(ptr: flags());
6743 intptr_t flag = Smi::Value(raw_smi: Smi::RawCast(raw: flg.At(index: index >> kFlagsPerSmiShift)));
6744 if (value) {
6745 flag |= 1 << (index % kFlagsPerSmiMask);
6746 } else {
6747 flag &= ~(1 << (index % kFlagsPerSmiMask));
6748 }
6749 flg.SetAt(index: index >> kFlagsPerSmiShift, value: Smi::Handle(ptr: Smi::New(value: flag)));
6750}
6751
6752void TypeParameters::Print(Thread* thread,
6753 Zone* zone,
6754 bool are_class_type_parameters,
6755 intptr_t base,
6756 NameVisibility name_visibility,
6757 BaseTextBuffer* printer) const {
6758 String& name = String::Handle(zone);
6759 AbstractType& type = AbstractType::Handle(zone);
6760 const intptr_t num_type_params = Length();
6761 for (intptr_t i = 0; i < num_type_params; i++) {
6762 if (are_class_type_parameters) {
6763 name = NameAt(index: i);
6764 printer->AddString(s: name.ToCString());
6765 } else {
6766 printer->AddString(s: TypeParameter::CanonicalNameCString(
6767 is_class_type_parameter: are_class_type_parameters, base, index: base + i));
6768 }
6769 if (FLAG_show_internal_names || !AllDynamicBounds()) {
6770 type = BoundAt(index: i);
6771 // Do not print default bound or non-nullable Object bound in weak mode.
6772 if (!type.IsNull() &&
6773 (FLAG_show_internal_names || !type.IsObjectType() ||
6774 (thread->isolate_group()->null_safety() && type.IsNonNullable()))) {
6775 printer->AddString(s: " extends ");
6776 type.PrintName(visibility: name_visibility, printer);
6777 if (FLAG_show_internal_names && !AllDynamicDefaults()) {
6778 type = DefaultAt(index: i);
6779 if (!type.IsNull() &&
6780 (FLAG_show_internal_names || !type.IsDynamicType())) {
6781 printer->AddString(s: " defaults to ");
6782 type.PrintName(visibility: name_visibility, printer);
6783 }
6784 }
6785 }
6786 }
6787 if (i != num_type_params - 1) {
6788 printer->AddString(s: ", ");
6789 }
6790 }
6791}
6792
6793const char* TypeParameters::ToCString() const {
6794 if (IsNull()) {
6795 return "TypeParameters: null";
6796 }
6797 auto thread = Thread::Current();
6798 auto zone = thread->zone();
6799 ZoneTextBuffer buffer(zone);
6800 buffer.AddString(s: "TypeParameters: ");
6801 Print(thread, zone, are_class_type_parameters: true, base: 0, name_visibility: kInternalName, printer: &buffer);
6802 return buffer.buffer();
6803}
6804
6805TypeParametersPtr TypeParameters::New(Heap::Space space) {
6806 ASSERT(Object::type_parameters_class() != Class::null());
6807 return Object::Allocate<TypeParameters>(space);
6808}
6809
6810TypeParametersPtr TypeParameters::New(intptr_t count, Heap::Space space) {
6811 const TypeParameters& result =
6812 TypeParameters::Handle(ptr: TypeParameters::New(space));
6813 // Create an [ Array ] of [ String ] objects to represent the names.
6814 // Create a [ TypeArguments ] vector representing the bounds.
6815 // Create a [ TypeArguments ] vector representing the defaults.
6816 // Create an [ Array ] of [ Smi] objects to represent the flags.
6817 const Array& names_array = Array::Handle(ptr: Array::New(len: count, space));
6818 result.set_names(names_array);
6819 TypeArguments& type_args = TypeArguments::Handle();
6820 type_args = TypeArguments::New(len: count, space: Heap::kNew); // Will get canonicalized.
6821 result.set_bounds(type_args);
6822 type_args = TypeArguments::New(len: count, space: Heap::kNew); // Will get canonicalized.
6823 result.set_defaults(type_args);
6824 result.AllocateFlags(space); // Will get optimized.
6825 return result.ptr();
6826}
6827
6828intptr_t TypeArguments::ComputeNullability() const {
6829 if (IsNull()) return 0;
6830 const intptr_t num_types = Length();
6831 intptr_t result = 0;
6832 if (num_types <= kNullabilityMaxTypes) {
6833 AbstractType& type = AbstractType::Handle();
6834 for (intptr_t i = 0; i < num_types; i++) {
6835 type = TypeAt(index: i);
6836 intptr_t type_bits = 0;
6837 if (!type.IsNull()) {
6838 switch (type.nullability()) {
6839 case Nullability::kNullable:
6840 type_bits = kNullableBits;
6841 break;
6842 case Nullability::kNonNullable:
6843 type_bits = kNonNullableBits;
6844 break;
6845 case Nullability::kLegacy:
6846 type_bits = kLegacyBits;
6847 break;
6848 default:
6849 UNREACHABLE();
6850 }
6851 }
6852 result |= (type_bits << (i * kNullabilityBitsPerType));
6853 }
6854 }
6855 set_nullability(result);
6856 return result;
6857}
6858
6859void TypeArguments::set_nullability(intptr_t value) const {
6860 untag()->set_nullability(Smi::New(value));
6861}
6862
6863uword TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const {
6864 if (IsNull()) return kAllDynamicHash;
6865 if (IsRaw(from_index, len)) return kAllDynamicHash;
6866 uint32_t result = 0;
6867 AbstractType& type = AbstractType::Handle();
6868 for (intptr_t i = 0; i < len; i++) {
6869 type = TypeAt(index: from_index + i);
6870 ASSERT(!type.IsNull());
6871 result = CombineHashes(hash: result, other_hash: type.Hash());
6872 }
6873 result = FinalizeHash(hash: result, hashbits: kHashBits);
6874 return result;
6875}
6876
6877uword TypeArguments::ComputeHash() const {
6878 if (IsNull()) return kAllDynamicHash;
6879 const uword result = HashForRange(from_index: 0, len: Length());
6880 ASSERT(result != 0);
6881 SetHash(result);
6882 return result;
6883}
6884
6885TypeArgumentsPtr TypeArguments::Prepend(Zone* zone,
6886 const TypeArguments& other,
6887 intptr_t other_length,
6888 intptr_t total_length) const {
6889 if (other_length == 0) {
6890 ASSERT(IsCanonical());
6891 return ptr();
6892 } else if (other_length == total_length) {
6893 ASSERT(other.IsCanonical());
6894 return other.ptr();
6895 } else if (IsNull() && other.IsNull()) {
6896 return TypeArguments::null();
6897 }
6898 const TypeArguments& result =
6899 TypeArguments::Handle(zone, ptr: TypeArguments::New(len: total_length, space: Heap::kNew));
6900 AbstractType& type = AbstractType::Handle(zone);
6901 for (intptr_t i = 0; i < other_length; i++) {
6902 type = other.IsNull() ? Type::DynamicType() : other.TypeAt(index: i);
6903 result.SetTypeAt(index: i, value: type);
6904 }
6905 for (intptr_t i = other_length; i < total_length; i++) {
6906 type = IsNull() ? Type::DynamicType() : TypeAt(index: i - other_length);
6907 result.SetTypeAt(index: i, value: type);
6908 }
6909 return result.Canonicalize(thread: Thread::Current());
6910}
6911
6912TypeArgumentsPtr TypeArguments::ConcatenateTypeParameters(
6913 Zone* zone,
6914 const TypeArguments& other) const {
6915 ASSERT(!IsNull() && !other.IsNull());
6916 const intptr_t this_len = Length();
6917 const intptr_t other_len = other.Length();
6918 const auto& result = TypeArguments::Handle(
6919 zone, ptr: TypeArguments::New(len: this_len + other_len, space: Heap::kNew));
6920 auto& type = AbstractType::Handle(zone);
6921 for (intptr_t i = 0; i < this_len; ++i) {
6922 type = TypeAt(index: i);
6923 result.SetTypeAt(index: i, value: type);
6924 }
6925 for (intptr_t i = 0; i < other_len; ++i) {
6926 type = other.TypeAt(index: i);
6927 result.SetTypeAt(index: this_len + i, value: type);
6928 }
6929 return result.ptr();
6930}
6931
6932StringPtr TypeArguments::Name() const {
6933 Thread* thread = Thread::Current();
6934 ZoneTextBuffer printer(thread->zone());
6935 PrintSubvectorName(from_index: 0, len: Length(), name_visibility: kInternalName, printer: &printer);
6936 return Symbols::New(thread, cstr: printer.buffer());
6937}
6938
6939StringPtr TypeArguments::UserVisibleName() const {
6940 Thread* thread = Thread::Current();
6941 ZoneTextBuffer printer(thread->zone());
6942 PrintSubvectorName(from_index: 0, len: Length(), name_visibility: kUserVisibleName, printer: &printer);
6943 return Symbols::New(thread, cstr: printer.buffer());
6944}
6945
6946void TypeArguments::PrintSubvectorName(intptr_t from_index,
6947 intptr_t len,
6948 NameVisibility name_visibility,
6949 BaseTextBuffer* printer) const {
6950 printer->AddString(s: "<");
6951 AbstractType& type = AbstractType::Handle();
6952 for (intptr_t i = 0; i < len; i++) {
6953 if (from_index + i < Length()) {
6954 type = TypeAt(index: from_index + i);
6955 if (type.IsNull()) {
6956 printer->AddString(s: "null"); // Unfinalized vector.
6957 } else {
6958 type.PrintName(visibility: name_visibility, printer);
6959 }
6960 } else {
6961 printer->AddString(s: "dynamic");
6962 }
6963 if (i < len - 1) {
6964 printer->AddString(s: ", ");
6965 }
6966 }
6967 printer->AddString(s: ">");
6968}
6969
6970void TypeArguments::PrintTo(BaseTextBuffer* buffer) const {
6971 buffer->AddString(s: "TypeArguments: ");
6972 if (IsNull()) {
6973 return buffer->AddString(s: "null");
6974 }
6975 buffer->Printf(format: "(H%" Px ")", Smi::Value(raw_smi: untag()->hash()));
6976 auto& type_at = AbstractType::Handle();
6977 for (intptr_t i = 0; i < Length(); i++) {
6978 type_at = TypeAt(index: i);
6979 buffer->Printf(format: " [%s]", type_at.IsNull() ? "null" : type_at.ToCString());
6980 }
6981}
6982
6983bool TypeArguments::IsSubvectorEquivalent(
6984 const TypeArguments& other,
6985 intptr_t from_index,
6986 intptr_t len,
6987 TypeEquality kind,
6988 FunctionTypeMapping* function_type_equivalence) const {
6989 if (this->ptr() == other.ptr()) {
6990 return true;
6991 }
6992 if (kind == TypeEquality::kCanonical) {
6993 if (IsNull() || other.IsNull()) {
6994 return false;
6995 }
6996 if (Length() != other.Length()) {
6997 return false;
6998 }
6999 }
7000 AbstractType& type = AbstractType::Handle();
7001 AbstractType& other_type = AbstractType::Handle();
7002 for (intptr_t i = from_index; i < from_index + len; i++) {
7003 type = IsNull() ? Type::DynamicType() : TypeAt(index: i);
7004 ASSERT(!type.IsNull());
7005 other_type = other.IsNull() ? Type::DynamicType() : other.TypeAt(index: i);
7006 ASSERT(!other_type.IsNull());
7007 if (!type.IsEquivalent(other: other_type, kind, function_type_equivalence)) {
7008 return false;
7009 }
7010 }
7011 return true;
7012}
7013
7014bool TypeArguments::RequireConstCanonicalTypeErasure(Zone* zone,
7015 intptr_t from_index,
7016 intptr_t len) const {
7017 if (IsNull()) return false;
7018 ASSERT(Length() >= (from_index + len));
7019 AbstractType& type = AbstractType::Handle(zone);
7020 for (intptr_t i = 0; i < len; i++) {
7021 type = TypeAt(index: from_index + i);
7022 if (type.IsNonNullable() ||
7023 (type.IsNullable() && type.RequireConstCanonicalTypeErasure(zone))) {
7024 // It is not possible for a legacy type to have non-nullable type
7025 // arguments or for a legacy function type to have non-nullable type in
7026 // its signature.
7027 return true;
7028 }
7029 }
7030 return false;
7031}
7032
7033bool TypeArguments::IsDynamicTypes(bool raw_instantiated,
7034 intptr_t from_index,
7035 intptr_t len) const {
7036 ASSERT(Length() >= (from_index + len));
7037 AbstractType& type = AbstractType::Handle();
7038 Class& type_class = Class::Handle();
7039 for (intptr_t i = 0; i < len; i++) {
7040 type = TypeAt(index: from_index + i);
7041 if (type.IsNull()) {
7042 return false;
7043 }
7044 if (!type.HasTypeClass()) {
7045 if (raw_instantiated && type.IsTypeParameter()) {
7046 // An uninstantiated type parameter is equivalent to dynamic.
7047 continue;
7048 }
7049 return false;
7050 }
7051 type_class = type.type_class();
7052 if (!type_class.IsDynamicClass()) {
7053 return false;
7054 }
7055 }
7056 return true;
7057}
7058
7059TypeArguments::Cache::Cache(Zone* zone, const TypeArguments& source)
7060 : zone_(ASSERT_NOTNULL(zone)),
7061 cache_container_(&source),
7062 data_(Array::Handle(ptr: source.instantiations())),
7063 smi_handle_(Smi::Handle(zone)) {
7064 ASSERT(IsolateGroup::Current()
7065 ->type_arguments_canonicalization_mutex()
7066 ->IsOwnedByCurrentThread());
7067}
7068
7069TypeArguments::Cache::Cache(Zone* zone, const Array& array)
7070 : zone_(ASSERT_NOTNULL(zone)),
7071 cache_container_(nullptr),
7072 data_(Array::Handle(ptr: array.ptr())),
7073 smi_handle_(Smi::Handle(zone)) {
7074 ASSERT(IsolateGroup::Current()
7075 ->type_arguments_canonicalization_mutex()
7076 ->IsOwnedByCurrentThread());
7077}
7078
7079bool TypeArguments::Cache::IsHash(const Array& array) {
7080 return array.Length() > kMaxLinearCacheSize;
7081}
7082
7083intptr_t TypeArguments::Cache::NumOccupied(const Array& array) {
7084 return NumOccupiedBits::decode(
7085 value: RawSmiValue(raw_value: Smi::RawCast(raw: array.AtAcquire(index: kMetadataIndex))));
7086}
7087
7088#if defined(DEBUG)
7089bool TypeArguments::Cache::IsValidStorageLocked(const Array& array) {
7090 // We only require the mutex be held so we don't need to use acquire/release
7091 // semantics to access and set the number of occupied entries in the header.
7092 ASSERT(IsolateGroup::Current()
7093 ->type_arguments_canonicalization_mutex()
7094 ->IsOwnedByCurrentThread());
7095 // Quick check against the empty linear cache.
7096 if (array.ptr() == EmptyStorage().ptr()) return true;
7097 const intptr_t num_occupied = NumOccupied(array);
7098 // We should be using the same shared value for an empty cache.
7099 if (num_occupied == 0) return false;
7100 const intptr_t storage_len = array.Length();
7101 // All caches have the metadata followed by a series of entries.
7102 if ((storage_len % kEntrySize) != kHeaderSize) return false;
7103 const intptr_t num_entries = NumEntries(array);
7104 // Linear caches contain at least one unoccupied entry, and hash-based caches
7105 // grow prior to hitting 100% occupancy.
7106 if (num_occupied >= num_entries) return false;
7107 // In a linear cache, all entries with indexes smaller than [num_occupied]
7108 // should be occupied and ones greater than or equal should be unoccupied.
7109 const bool is_linear_cache = IsLinear(array);
7110 // The capacity of a hash-based cache must be a power of two (see
7111 // EnsureCapacityLocked as to why).
7112 if (!is_linear_cache) {
7113 if (!Utils::IsPowerOfTwo(num_entries)) return false;
7114 const intptr_t metadata =
7115 RawSmiValue(Smi::RawCast(array.AtAcquire(kMetadataIndex)));
7116 if ((1 << EntryCountLog2Bits::decode(metadata)) != num_entries) {
7117 return false;
7118 }
7119 }
7120 for (intptr_t i = 0; i < num_entries; i++) {
7121 const intptr_t index = kHeaderSize + i * kEntrySize;
7122 if (array.At(index + kSentinelIndex) == Sentinel()) {
7123 if (is_linear_cache && i < num_occupied) return false;
7124 continue;
7125 }
7126 if (is_linear_cache && i >= num_occupied) return false;
7127 // The elements of an occupied entry are all TypeArguments values.
7128 for (intptr_t j = index; j < index + kEntrySize; j++) {
7129 if (!array.At(j)->IsHeapObject()) return false;
7130 if (array.At(j) == Object::null()) continue; // null is a valid TAV.
7131 if (!array.At(j)->IsTypeArguments()) return false;
7132 }
7133 }
7134 return true;
7135}
7136#endif
7137
7138bool TypeArguments::Cache::IsOccupied(intptr_t entry) const {
7139 InstantiationsCacheTable table(data_);
7140 ASSERT(entry >= 0 && entry < table.Length());
7141 return table.At(i: entry).Get<kSentinelIndex>() != Sentinel();
7142}
7143
7144TypeArgumentsPtr TypeArguments::Cache::Retrieve(intptr_t entry) const {
7145 ASSERT(IsOccupied(entry));
7146 InstantiationsCacheTable table(data_);
7147 return table.At(i: entry).Get<kInstantiatedTypeArgsIndex>();
7148}
7149
7150intptr_t TypeArguments::Cache::NumEntries(const Array& array) {
7151 InstantiationsCacheTable table(array);
7152 return table.Length();
7153}
7154
7155TypeArguments::Cache::KeyLocation TypeArguments::Cache::FindKeyOrUnused(
7156 const Array& array,
7157 const TypeArguments& instantiator_tav,
7158 const TypeArguments& function_tav) {
7159 const bool is_hash = IsHash(array);
7160 InstantiationsCacheTable table(array);
7161 const intptr_t num_entries = table.Length();
7162 // For a linear cache, start at the first entry and probe linearly. This can
7163 // be done because a linear cache always has at least one unoccupied entry
7164 // after all the occupied ones.
7165 intptr_t probe = 0;
7166 intptr_t probe_distance = 1;
7167 if (is_hash) {
7168 // For a hash-based cache, instead start at an entry determined by the hash
7169 // of the keys.
7170 auto hash = FinalizeHash(
7171 hash: CombineHashes(hash: instantiator_tav.Hash(), other_hash: function_tav.Hash()));
7172 probe = hash & (num_entries - 1);
7173 }
7174 while (true) {
7175 const auto& tuple = table.At(i: probe);
7176 if (tuple.Get<kSentinelIndex>() == Sentinel()) break;
7177 if ((tuple.Get<kInstantiatorTypeArgsIndex>() == instantiator_tav.ptr()) &&
7178 (tuple.Get<kFunctionTypeArgsIndex>() == function_tav.ptr())) {
7179 return {.entry: probe, .present: true};
7180 }
7181 // Advance probe by the current probing distance.
7182 probe = probe + probe_distance;
7183 if (is_hash) {
7184 // Wrap around if the probe goes off the end of the entries array.
7185 probe = probe & (num_entries - 1);
7186 // We had a collision, so increase the probe distance. See comment in
7187 // EnsureCapacityLocked for an explanation of how this hits all slots.
7188 probe_distance++;
7189 }
7190 }
7191 // We should always get the next slot for a linear cache.
7192 ASSERT(is_hash || probe == NumOccupied(array));
7193 return {.entry: probe, .present: false};
7194}
7195
7196TypeArguments::Cache::KeyLocation TypeArguments::Cache::AddEntry(
7197 intptr_t entry,
7198 const TypeArguments& instantiator_tav,
7199 const TypeArguments& function_tav,
7200 const TypeArguments& instantiated_tav) const {
7201 // We don't do mutating operations in tests without a TypeArguments object.
7202 ASSERT(cache_container_ != nullptr);
7203#if defined(DEBUG)
7204 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7205 ASSERT_EQUAL(loc.entry, entry);
7206 ASSERT(!loc.present);
7207#endif
7208 // Double-check we got the expected entry index when adding to a linear array.
7209 ASSERT(!IsLinear() || entry == NumOccupied());
7210 const intptr_t new_occupied = NumOccupied() + 1;
7211 const bool storage_changed = EnsureCapacity(occupied: new_occupied);
7212 // Note that this call to IsLinear() may return a different result than the
7213 // earlier, since EnsureCapacity() may have swapped to hash-based storage.
7214 if (storage_changed && !IsLinear()) {
7215 // The capacity of the array has changed, and the capacity is used when
7216 // probing further into the array due to collisions. Thus, we need to redo
7217 // the entry index calculation.
7218 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7219 ASSERT(!loc.present);
7220 entry = loc.entry;
7221 }
7222
7223 // Go ahead and increment the number of occupied entries prior to adding the
7224 // entry. Use a store-release barrier in case of concurrent readers.
7225 const intptr_t metadata = RawSmiValue(raw_value: Smi::RawCast(raw: data_.At(index: kMetadataIndex)));
7226 smi_handle_ = Smi::New(value: NumOccupiedBits::update(value: new_occupied, original: metadata));
7227 data_.SetAtRelease(index: kMetadataIndex, value: smi_handle_);
7228
7229 InstantiationsCacheTable table(data_);
7230 const auto& tuple = table.At(i: entry);
7231 // The parts of the tuple that aren't used for sentinel checking are only
7232 // retrieved if the entry is occupied. Entries in the cache are never deleted,
7233 // so once the entry is marked as occupied, the contents of that entry never
7234 // change. Thus, we don't need store-release barriers here.
7235 tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7236 tuple.Set<kInstantiatedTypeArgsIndex>(instantiated_tav);
7237 // For the sentinel position, though, we do.
7238 static_assert(
7239 kSentinelIndex == kInstantiatorTypeArgsIndex,
7240 "the sentinel position is not protected with a store-release barrier");
7241 tuple.Set<kInstantiatorTypeArgsIndex, std::memory_order_release>(
7242 instantiator_tav);
7243
7244 if (storage_changed) {
7245 // Only check for validity on growth, just to keep the overhead on DEBUG
7246 // builds down.
7247 DEBUG_ASSERT(IsValidStorageLocked(data_));
7248 // Update the container of the original cache to point to the new one.
7249 cache_container_->set_instantiations(data_);
7250 }
7251
7252 return {.entry: entry, .present: true};
7253}
7254
7255SmiPtr TypeArguments::Cache::Sentinel() {
7256 return Smi::New(value: kSentinelValue);
7257}
7258
7259bool TypeArguments::Cache::EnsureCapacity(intptr_t new_occupied) const {
7260 ASSERT(new_occupied > NumOccupied());
7261 // How many entries are in the current array (including unoccupied entries).
7262 const intptr_t current_capacity = NumEntries();
7263
7264 // Early returns for cases where no growth is needed.
7265 const bool is_linear = IsLinear();
7266 if (is_linear) {
7267 // We need at least one unoccupied entry in addition to the occupied ones.
7268 if (current_capacity > new_occupied) return false;
7269 } else {
7270 if (LoadFactor(occupied: new_occupied, capacity: current_capacity) < kMaxLoadFactor) {
7271 return false;
7272 }
7273 }
7274
7275 if (new_occupied <= kMaxLinearCacheEntries) {
7276 ASSERT(is_linear);
7277 // Not enough room for both the new entry and at least one unoccupied
7278 // entry, so grow the tuple capacity of the linear cache by about 50%,
7279 // ensuring that space for at least one new tuple is added, capping the
7280 // total number of occupied entries to the max allowed.
7281 const intptr_t new_capacity =
7282 Utils::Minimum(x: current_capacity + (current_capacity >> 1),
7283 y: kMaxLinearCacheEntries) +
7284 1;
7285 const intptr_t cache_size = kHeaderSize + new_capacity * kEntrySize;
7286 ASSERT(cache_size <= kMaxLinearCacheSize);
7287 data_ = Array::Grow(source: data_, new_length: cache_size, space: Heap::kOld);
7288 ASSERT(!data_.IsNull());
7289 // No need to adjust the number of occupied entries or old entries, as they
7290 // are copied over by Array::Grow. Just mark any new entries as unoccupied.
7291 smi_handle_ = Sentinel();
7292 InstantiationsCacheTable table(data_);
7293 for (intptr_t i = current_capacity; i < new_capacity; i++) {
7294 const auto& tuple = table.At(i);
7295 tuple.Set<kSentinelIndex>(smi_handle_);
7296 }
7297 return true;
7298 }
7299
7300 // Either we're converting a linear cache into a hash-based cache, or the
7301 // load factor of the hash-based cache has increased to the point where we
7302 // need to grow it.
7303 const intptr_t new_capacity =
7304 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
7305 // Because we use quadratic (actually triangle number) probing it is
7306 // important that the size is a power of two (otherwise we could fail to
7307 // find an empty slot). This is described in Knuth's The Art of Computer
7308 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
7309 // appendix, 2nd edition).
7310 ASSERT(Utils::IsPowerOfTwo(new_capacity));
7311 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
7312 const intptr_t new_size = kHeaderSize + new_capacity * kEntrySize;
7313 const auto& new_data =
7314 Array::Handle(zone: zone_, ptr: Array::NewUninitialized(len: new_size, space: Heap::kOld));
7315 ASSERT(!new_data.IsNull());
7316 // First set up the metadata in new_data.
7317 const intptr_t metadata = RawSmiValue(raw_value: Smi::RawCast(raw: data_.At(index: kMetadataIndex)));
7318 smi_handle_ = Smi::New(value: EntryCountLog2Bits::update(
7319 value: Utils::ShiftForPowerOfTwo(x: new_capacity), original: metadata));
7320 new_data.SetAt(index: kMetadataIndex, value: smi_handle_);
7321 // Then mark all the entries in new_data as unoccupied.
7322 smi_handle_ = Sentinel();
7323 InstantiationsCacheTable to_table(new_data);
7324 for (const auto& tuple : to_table) {
7325 tuple.Set<kSentinelIndex>(smi_handle_);
7326 }
7327 // Finally, copy over the entries.
7328 auto& instantiator_tav = TypeArguments::Handle(zone: zone_);
7329 auto& function_tav = TypeArguments::Handle(zone: zone_);
7330 auto& result_tav = TypeArguments::Handle(zone: zone_);
7331 const InstantiationsCacheTable from_table(data_);
7332 for (const auto& from_tuple : from_table) {
7333 // Skip unoccupied entries.
7334 if (from_tuple.Get<kSentinelIndex>() == Sentinel()) continue;
7335 instantiator_tav ^= from_tuple.Get<kInstantiatorTypeArgsIndex>();
7336 function_tav = from_tuple.Get<kFunctionTypeArgsIndex>();
7337 result_tav = from_tuple.Get<kInstantiatedTypeArgsIndex>();
7338 // Since new_data has a different total capacity, we can't use the old
7339 // entry indexes, but must recalculate them.
7340 auto loc = FindKeyOrUnused(array: new_data, instantiator_tav, function_tav);
7341 ASSERT(!loc.present);
7342 const auto& to_tuple = to_table.At(i: loc.entry);
7343 to_tuple.Set<kInstantiatorTypeArgsIndex>(instantiator_tav);
7344 to_tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7345 to_tuple.Set<kInstantiatedTypeArgsIndex>(result_tav);
7346 }
7347 data_ = new_data.ptr();
7348 return true;
7349}
7350
7351bool TypeArguments::HasInstantiations() const {
7352 return instantiations() != Cache::EmptyStorage().ptr();
7353}
7354
7355ArrayPtr TypeArguments::instantiations() const {
7356 // We rely on the fact that any loads from the array are dependent loads and
7357 // avoid the load-acquire barrier here.
7358 return untag()->instantiations();
7359}
7360
7361void TypeArguments::set_instantiations(const Array& value) const {
7362 // We have to ensure that initializing stores to the array are available
7363 // when releasing the pointer to the array pointer.
7364 // => We have to use store-release here.
7365 ASSERT(!value.IsNull());
7366 untag()->set_instantiations<std::memory_order_release>(value.ptr());
7367}
7368
7369bool TypeArguments::HasCount(intptr_t count) const {
7370 if (IsNull()) {
7371 return true;
7372 }
7373 return Length() == count;
7374}
7375
7376intptr_t TypeArguments::Length() const {
7377 if (IsNull()) {
7378 return 0;
7379 }
7380 return Smi::Value(raw_smi: untag()->length());
7381}
7382
7383intptr_t TypeArguments::nullability() const {
7384 if (IsNull()) {
7385 return 0;
7386 }
7387 return Smi::Value(raw_smi: untag()->nullability());
7388}
7389
7390AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const {
7391 ASSERT(!IsNull());
7392 ASSERT((index >= 0) && (index < Length()));
7393 return untag()->element(index);
7394}
7395
7396AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const {
7397 if (IsNull()) {
7398 // null vector represents infinite list of dynamics
7399 return Type::dynamic_type().ptr();
7400 }
7401 ASSERT((index >= 0) && (index < Length()));
7402 return TypeAt(index);
7403}
7404
7405void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
7406 ASSERT(!IsCanonical());
7407 ASSERT((index >= 0) && (index < Length()));
7408 return untag()->set_element(index, value: value.ptr());
7409}
7410
7411bool TypeArguments::IsSubvectorInstantiated(
7412 intptr_t from_index,
7413 intptr_t len,
7414 Genericity genericity,
7415 intptr_t num_free_fun_type_params) const {
7416 ASSERT(!IsNull());
7417 AbstractType& type = AbstractType::Handle();
7418 for (intptr_t i = 0; i < len; i++) {
7419 type = TypeAt(index: from_index + i);
7420 // If this type argument T is null, the type A containing T in its flattened
7421 // type argument vector V is recursive and is still being finalized.
7422 // T is the type argument of a super type of A. T is being instantiated
7423 // during finalization of V, which is also the instantiator. T depends
7424 // solely on the type parameters of A and will be replaced by a non-null
7425 // type before A is marked as finalized.
7426 if (!type.IsNull() &&
7427 !type.IsInstantiated(genericity, num_free_fun_type_params)) {
7428 return false;
7429 }
7430 }
7431 return true;
7432}
7433
7434bool TypeArguments::IsUninstantiatedIdentity() const {
7435 AbstractType& type = AbstractType::Handle();
7436 const intptr_t num_types = Length();
7437 for (intptr_t i = 0; i < num_types; i++) {
7438 type = TypeAt(index: i);
7439 if (type.IsNull()) {
7440 return false; // Still unfinalized, too early to tell.
7441 }
7442 if (!type.IsTypeParameter()) {
7443 return false;
7444 }
7445 const TypeParameter& type_param = TypeParameter::Cast(obj: type);
7446 ASSERT(type_param.IsFinalized());
7447 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7448 return false;
7449 }
7450 // Instantiating nullable and legacy type parameters may change
7451 // nullability of a type, so type arguments vector containing such type
7452 // parameters cannot be substituted with instantiator type arguments.
7453 if (type_param.IsNullable() || type_param.IsLegacy()) {
7454 return false;
7455 }
7456 }
7457 return true;
7458 // Note that it is not necessary to verify at runtime that the instantiator
7459 // type vector is long enough, since this uninstantiated vector contains as
7460 // many different type parameters as it is long.
7461}
7462
7463// Return true if this uninstantiated type argument vector, once instantiated
7464// at runtime, is a prefix of the type argument vector of its instantiator.
7465// A runtime check may be required, as indicated by with_runtime_check.
7466bool TypeArguments::CanShareInstantiatorTypeArguments(
7467 const Class& instantiator_class,
7468 bool* with_runtime_check) const {
7469 ASSERT(!IsInstantiated());
7470 if (with_runtime_check != nullptr) {
7471 *with_runtime_check = false;
7472 }
7473 const intptr_t num_type_args = Length();
7474 const intptr_t num_instantiator_type_args =
7475 instantiator_class.NumTypeArguments();
7476 if (num_type_args > num_instantiator_type_args) {
7477 // This vector cannot be a prefix of a shorter vector.
7478 return false;
7479 }
7480 const intptr_t num_instantiator_type_params =
7481 instantiator_class.NumTypeParameters();
7482 const intptr_t first_type_param_offset =
7483 num_instantiator_type_args - num_instantiator_type_params;
7484 // At compile time, the type argument vector of the instantiator consists of
7485 // the type argument vector of its super type, which may refer to the type
7486 // parameters of the instantiator class, followed by (or overlapping partially
7487 // or fully with) the type parameters of the instantiator class in declaration
7488 // order.
7489 // In other words, the only variables are the type parameters of the
7490 // instantiator class.
7491 // This uninstantiated type argument vector is also expressed in terms of the
7492 // type parameters of the instantiator class. Therefore, in order to be a
7493 // prefix once instantiated at runtime, every one of its type argument must be
7494 // equal to the type argument of the instantiator vector at the same index.
7495
7496 // As a first requirement, the last num_instantiator_type_params type
7497 // arguments of this type argument vector must refer to the corresponding type
7498 // parameters of the instantiator class.
7499 AbstractType& type_arg = AbstractType::Handle();
7500 for (intptr_t i = first_type_param_offset; i < num_type_args; i++) {
7501 type_arg = TypeAt(index: i);
7502 if (!type_arg.IsTypeParameter()) {
7503 return false;
7504 }
7505 const TypeParameter& type_param = TypeParameter::Cast(obj: type_arg);
7506 ASSERT(type_param.IsFinalized());
7507 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7508 return false;
7509 }
7510 // Instantiating nullable and legacy type parameters may change nullability
7511 // of a type, so type arguments vector containing such type parameters
7512 // cannot be substituted with instantiator type arguments, unless we check
7513 // at runtime the nullability of the first 1 or 2 type arguments of the
7514 // instantiator.
7515 // Note that the presence of non-overlapping super type arguments (i.e.
7516 // first_type_param_offset > 0) will prevent this optimization.
7517 if (type_param.IsNullable() || type_param.IsLegacy()) {
7518 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7519 return false;
7520 }
7521 *with_runtime_check = true;
7522 }
7523 }
7524 // As a second requirement, the type arguments corresponding to the super type
7525 // must be identical. Overlapping ones have already been checked starting at
7526 // first_type_param_offset.
7527 if (first_type_param_offset == 0) {
7528 return true;
7529 }
7530 Type& super_type = Type::Handle(ptr: instantiator_class.super_type());
7531 const TypeArguments& super_type_args =
7532 TypeArguments::Handle(ptr: super_type.GetInstanceTypeArguments(
7533 thread: Thread::Current(), /*canonicalize=*/false));
7534 if (super_type_args.IsNull()) {
7535 ASSERT(!IsUninstantiatedIdentity());
7536 return false;
7537 }
7538 AbstractType& super_type_arg = AbstractType::Handle();
7539 for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args);
7540 i++) {
7541 type_arg = TypeAt(index: i);
7542 super_type_arg = super_type_args.TypeAt(index: i);
7543 if (!type_arg.Equals(other: super_type_arg)) {
7544 ASSERT(!IsUninstantiatedIdentity());
7545 return false;
7546 }
7547 }
7548 return true;
7549}
7550
7551// Return true if this uninstantiated type argument vector, once instantiated
7552// at runtime, is a prefix of the enclosing function type arguments.
7553// A runtime check may be required, as indicated by with_runtime_check.
7554bool TypeArguments::CanShareFunctionTypeArguments(
7555 const Function& function,
7556 bool* with_runtime_check) const {
7557 ASSERT(!IsInstantiated());
7558 if (with_runtime_check != nullptr) {
7559 *with_runtime_check = false;
7560 }
7561 const intptr_t num_type_args = Length();
7562 const intptr_t num_parent_type_args = function.NumParentTypeArguments();
7563 const intptr_t num_function_type_params = function.NumTypeParameters();
7564 const intptr_t num_function_type_args =
7565 num_parent_type_args + num_function_type_params;
7566 if (num_type_args > num_function_type_args) {
7567 // This vector cannot be a prefix of a shorter vector.
7568 return false;
7569 }
7570 AbstractType& type_arg = AbstractType::Handle();
7571 for (intptr_t i = 0; i < num_type_args; i++) {
7572 type_arg = TypeAt(index: i);
7573 if (!type_arg.IsTypeParameter()) {
7574 return false;
7575 }
7576 const TypeParameter& type_param = TypeParameter::Cast(obj: type_arg);
7577 ASSERT(type_param.IsFinalized());
7578 if ((type_param.index() != i) || !type_param.IsFunctionTypeParameter()) {
7579 return false;
7580 }
7581 // Instantiating nullable and legacy type parameters may change nullability
7582 // of a type, so type arguments vector containing such type parameters
7583 // cannot be substituted with the enclosing function type arguments, unless
7584 // we check at runtime the nullability of the first 1 or 2 type arguments of
7585 // the enclosing function type arguments.
7586 if (type_param.IsNullable() || type_param.IsLegacy()) {
7587 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7588 return false;
7589 }
7590 *with_runtime_check = true;
7591 }
7592 }
7593 return true;
7594}
7595
7596TypeArgumentsPtr TypeArguments::TruncatedTo(intptr_t length) const {
7597 Thread* thread = Thread::Current();
7598 Zone* zone = thread->zone();
7599 const TypeArguments& result =
7600 TypeArguments::Handle(zone, ptr: TypeArguments::New(len: length));
7601 AbstractType& type = AbstractType::Handle(zone);
7602 for (intptr_t i = 0; i < length; i++) {
7603 type = TypeAt(index: i);
7604 result.SetTypeAt(index: i, value: type);
7605 }
7606 return result.Canonicalize(thread);
7607}
7608
7609bool TypeArguments::IsFinalized() const {
7610 ASSERT(!IsNull());
7611 AbstractType& type = AbstractType::Handle();
7612 const intptr_t num_types = Length();
7613 for (intptr_t i = 0; i < num_types; i++) {
7614 type = TypeAt(index: i);
7615 if (!type.IsFinalized()) {
7616 return false;
7617 }
7618 }
7619 return true;
7620}
7621
7622TypeArgumentsPtr TypeArguments::InstantiateFrom(
7623 const TypeArguments& instantiator_type_arguments,
7624 const TypeArguments& function_type_arguments,
7625 intptr_t num_free_fun_type_params,
7626 Heap::Space space,
7627 FunctionTypeMapping* function_type_mapping,
7628 intptr_t num_parent_type_args_adjustment) const {
7629 ASSERT(!IsInstantiated());
7630 if ((instantiator_type_arguments.IsNull() ||
7631 instantiator_type_arguments.Length() == Length()) &&
7632 IsUninstantiatedIdentity()) {
7633 return instantiator_type_arguments.ptr();
7634 }
7635 const intptr_t num_types = Length();
7636 TypeArguments& instantiated_array =
7637 TypeArguments::Handle(ptr: TypeArguments::New(len: num_types, space));
7638 AbstractType& type = AbstractType::Handle();
7639 for (intptr_t i = 0; i < num_types; i++) {
7640 type = TypeAt(index: i);
7641 // If this type argument T is null, the type A containing T in its flattened
7642 // type argument vector V is recursive and is still being finalized.
7643 // T is the type argument of a super type of A. T is being instantiated
7644 // during finalization of V, which is also the instantiator. T depends
7645 // solely on the type parameters of A and will be replaced by a non-null
7646 // type before A is marked as finalized.
7647 if (!type.IsNull() && !type.IsInstantiated()) {
7648 type = type.InstantiateFrom(
7649 instantiator_type_arguments, function_type_arguments,
7650 num_free_fun_type_params, space, function_type_mapping,
7651 num_parent_type_args_adjustment);
7652 // A returned null type indicates a failed instantiation in dead code that
7653 // must be propagated up to the caller, the optimizing compiler.
7654 if (type.IsNull()) {
7655 return Object::empty_type_arguments().ptr();
7656 }
7657 }
7658 instantiated_array.SetTypeAt(index: i, value: type);
7659 }
7660 return instantiated_array.ptr();
7661}
7662
7663TypeArgumentsPtr TypeArguments::UpdateFunctionTypes(
7664 intptr_t num_parent_type_args_adjustment,
7665 intptr_t num_free_fun_type_params,
7666 Heap::Space space,
7667 FunctionTypeMapping* function_type_mapping) const {
7668 Zone* zone = Thread::Current()->zone();
7669 TypeArguments* updated_args = nullptr;
7670 AbstractType& type = AbstractType::Handle(zone);
7671 AbstractType& updated = AbstractType::Handle(zone);
7672 for (intptr_t i = 0, n = Length(); i < n; ++i) {
7673 type = TypeAt(index: i);
7674 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
7675 num_free_fun_type_params, space,
7676 function_type_mapping);
7677 if (type.ptr() != updated.ptr()) {
7678 if (updated_args == nullptr) {
7679 updated_args =
7680 &TypeArguments::Handle(zone, ptr: TypeArguments::New(len: n, space));
7681 for (intptr_t j = 0; j < i; ++j) {
7682 type = TypeAt(index: j);
7683 updated_args->SetTypeAt(index: j, value: type);
7684 }
7685 }
7686 }
7687 if (updated_args != nullptr) {
7688 updated_args->SetTypeAt(index: i, value: updated);
7689 }
7690 }
7691 return (updated_args != nullptr) ? updated_args->ptr() : ptr();
7692}
7693
7694#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7695// A local flag used only in object_test.cc that, when true, causes a failure
7696// when a cache entry for the given instantiator and function type arguments
7697// already exists. Used to check that the InstantiateTypeArguments stub found
7698// the cache entry instead of calling the runtime.
7699bool TESTING_runtime_fail_on_existing_cache_entry = false;
7700#endif
7701
7702TypeArgumentsPtr TypeArguments::InstantiateAndCanonicalizeFrom(
7703 const TypeArguments& instantiator_type_arguments,
7704 const TypeArguments& function_type_arguments) const {
7705 auto thread = Thread::Current();
7706 auto zone = thread->zone();
7707 SafepointMutexLocker ml(
7708 thread->isolate_group()->type_arguments_canonicalization_mutex());
7709
7710 ASSERT(!IsInstantiated());
7711 ASSERT(instantiator_type_arguments.IsNull() ||
7712 instantiator_type_arguments.IsCanonical());
7713 ASSERT(function_type_arguments.IsNull() ||
7714 function_type_arguments.IsCanonical());
7715 // Lookup instantiators and if found, return instantiated result.
7716 Cache cache(zone, *this);
7717 auto const loc = cache.FindKeyOrUnused(instantiator_tav: instantiator_type_arguments,
7718 function_tav: function_type_arguments);
7719 if (loc.present) {
7720#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7721 if (TESTING_runtime_fail_on_existing_cache_entry) {
7722 TextBuffer buffer(1024);
7723 buffer.Printf(format: "for\n");
7724 buffer.Printf(format: " * uninstantiated type arguments %s\n", ToCString());
7725 buffer.Printf(format: " * instantiation type arguments: %s (hash: %" Pu ")\n",
7726 instantiator_type_arguments.ToCString(),
7727 instantiator_type_arguments.Hash());
7728 buffer.Printf(format: " * function type arguments: %s (hash: %" Pu ")\n",
7729 function_type_arguments.ToCString(),
7730 function_type_arguments.Hash());
7731 buffer.Printf(format: " * number of occupied entries in cache: %" Pd "\n",
7732 cache.NumOccupied());
7733 buffer.Printf(format: " * number of total entries in cache: %" Pd "\n",
7734 cache.NumEntries());
7735 buffer.Printf(format: "expected to find entry %" Pd
7736 " of cache in stub, but reached runtime",
7737 loc.entry);
7738 FATAL("%s", buffer.buffer());
7739 }
7740#endif
7741 return cache.Retrieve(entry: loc.entry);
7742 }
7743 // Cache lookup failed. Instantiate the type arguments.
7744 TypeArguments& result = TypeArguments::Handle(zone);
7745 result = InstantiateFrom(instantiator_type_arguments, function_type_arguments,
7746 num_free_fun_type_params: kAllFree, space: Heap::kOld);
7747 // Canonicalize type arguments.
7748 result = result.Canonicalize(thread);
7749 // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
7750 // indirectly, so the prior_instantiations array cannot have grown.
7751 ASSERT(cache.data_.ptr() == instantiations());
7752 cache.AddEntry(entry: loc.entry, instantiator_tav: instantiator_type_arguments,
7753 function_tav: function_type_arguments, instantiated_tav: result);
7754 return result.ptr();
7755}
7756
7757TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) {
7758 if (len < 0 || len > kMaxElements) {
7759 // This should be caught before we reach here.
7760 FATAL("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len);
7761 }
7762 TypeArguments& result = TypeArguments::Handle();
7763 {
7764 auto raw = Object::Allocate<TypeArguments>(space, elements: len);
7765 NoSafepointScope no_safepoint;
7766 result = raw;
7767 // Length must be set before we start storing into the array.
7768 result.SetLength(len);
7769 result.SetHash(0);
7770 result.set_nullability(0);
7771 }
7772 // The array used as storage for an empty linear cache should be initialized.
7773 ASSERT(Cache::EmptyStorage().ptr() != Array::null());
7774 result.set_instantiations(Cache::EmptyStorage());
7775 return result.ptr();
7776}
7777
7778void TypeArguments::SetLength(intptr_t value) const {
7779 ASSERT(!IsCanonical());
7780 // This is only safe because we create a new Smi, which does not cause
7781 // heap allocation.
7782 untag()->set_length(Smi::New(value));
7783}
7784
7785TypeArgumentsPtr TypeArguments::Canonicalize(Thread* thread) const {
7786 if (IsNull() || IsCanonical()) {
7787 ASSERT(IsOld());
7788 return this->ptr();
7789 }
7790 const intptr_t num_types = Length();
7791 if (num_types == 0) {
7792 return TypeArguments::empty_type_arguments().ptr();
7793 } else if (IsRaw(from_index: 0, len: num_types)) {
7794 return TypeArguments::null();
7795 }
7796 Zone* zone = thread->zone();
7797 auto isolate_group = thread->isolate_group();
7798 ObjectStore* object_store = isolate_group->object_store();
7799 TypeArguments& result = TypeArguments::Handle(zone);
7800 {
7801 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7802 CanonicalTypeArgumentsSet table(zone,
7803 object_store->canonical_type_arguments());
7804 result ^= table.GetOrNull(key: CanonicalTypeArgumentsKey(*this));
7805 object_store->set_canonical_type_arguments(table.Release());
7806 }
7807 if (result.IsNull()) {
7808 // Canonicalize each type argument.
7809 AbstractType& type_arg = AbstractType::Handle(zone);
7810 GrowableHandlePtrArray<const AbstractType> canonicalized_types(zone,
7811 num_types);
7812 for (intptr_t i = 0; i < num_types; i++) {
7813 type_arg = TypeAt(index: i);
7814 type_arg = type_arg.Canonicalize(thread);
7815 canonicalized_types.Add(t: type_arg);
7816 }
7817 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7818 CanonicalTypeArgumentsSet table(zone,
7819 object_store->canonical_type_arguments());
7820 // Since we canonicalized some type arguments above we need to lookup
7821 // in the table again to make sure we don't already have an equivalent
7822 // canonical entry.
7823 result ^= table.GetOrNull(key: CanonicalTypeArgumentsKey(*this));
7824 if (result.IsNull()) {
7825 for (intptr_t i = 0; i < num_types; i++) {
7826 SetTypeAt(index: i, value: canonicalized_types.At(index: i));
7827 }
7828 // Make sure we have an old space object and add it to the table.
7829 if (this->IsNew()) {
7830 result ^= Object::Clone(orig: *this, space: Heap::kOld);
7831 } else {
7832 result = this->ptr();
7833 }
7834 ASSERT(result.IsOld());
7835 result.ComputeNullability();
7836 result.SetCanonical(); // Mark object as being canonical.
7837 // Now add this TypeArgument into the canonical list of type arguments.
7838 bool present = table.Insert(key: result);
7839 ASSERT(!present);
7840 }
7841 object_store->set_canonical_type_arguments(table.Release());
7842 }
7843 ASSERT(result.Equals(*this));
7844 ASSERT(!result.IsNull());
7845 ASSERT(result.IsTypeArguments());
7846 ASSERT(result.IsCanonical());
7847 return result.ptr();
7848}
7849
7850TypeArgumentsPtr TypeArguments::FromInstanceTypeArguments(
7851 Thread* thread,
7852 const Class& cls) const {
7853 if (IsNull()) {
7854 return ptr();
7855 }
7856 const intptr_t num_type_arguments = cls.NumTypeArguments();
7857 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7858 ASSERT(Length() >= num_type_arguments);
7859 if (Length() == num_type_parameters) {
7860 return ptr();
7861 }
7862 if (num_type_parameters == 0) {
7863 return TypeArguments::null();
7864 }
7865 Zone* zone = thread->zone();
7866 const auto& args =
7867 TypeArguments::Handle(zone, ptr: TypeArguments::New(len: num_type_parameters));
7868 const intptr_t offset = num_type_arguments - num_type_parameters;
7869 auto& type = AbstractType::Handle(zone);
7870 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7871 type = TypeAt(index: offset + i);
7872 args.SetTypeAt(index: i, value: type);
7873 }
7874 return args.ptr();
7875}
7876
7877TypeArgumentsPtr TypeArguments::ToInstantiatorTypeArguments(
7878 Thread* thread,
7879 const Class& cls) const {
7880 if (IsNull()) {
7881 return ptr();
7882 }
7883 const intptr_t num_type_arguments = cls.NumTypeArguments();
7884 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7885 ASSERT(Length() == num_type_parameters);
7886 if (num_type_arguments == num_type_parameters) {
7887 return ptr();
7888 }
7889 Zone* zone = thread->zone();
7890 const auto& args =
7891 TypeArguments::Handle(zone, ptr: TypeArguments::New(len: num_type_arguments));
7892 const intptr_t offset = num_type_arguments - num_type_parameters;
7893 auto& type = AbstractType::Handle(zone);
7894 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7895 type = TypeAt(index: i);
7896 args.SetTypeAt(index: offset + i, value: type);
7897 }
7898 return args.ptr();
7899}
7900
7901void TypeArguments::EnumerateURIs(URIs* uris) const {
7902 if (IsNull()) {
7903 return;
7904 }
7905 Thread* thread = Thread::Current();
7906 Zone* zone = thread->zone();
7907 AbstractType& type = AbstractType::Handle(zone);
7908 const intptr_t num_types = Length();
7909 for (intptr_t i = 0; i < num_types; i++) {
7910 type = TypeAt(index: i);
7911 type.EnumerateURIs(uris);
7912 }
7913}
7914
7915const char* TypeArguments::ToCString() const {
7916 if (IsNull()) {
7917 return "TypeArguments: null"; // Optimizing the frequent case.
7918 }
7919 ZoneTextBuffer buffer(Thread::Current()->zone());
7920 PrintTo(buffer: &buffer);
7921 return buffer.buffer();
7922}
7923
7924const char* PatchClass::ToCString() const {
7925 const Class& cls = Class::Handle(ptr: wrapped_class());
7926 const char* cls_name = cls.ToCString();
7927 return OS::SCreate(zone: Thread::Current()->zone(), format: "PatchClass for %s", cls_name);
7928}
7929
7930PatchClassPtr PatchClass::New(const Class& wrapped_class,
7931 const KernelProgramInfo& info,
7932 const Script& script) {
7933 const PatchClass& result = PatchClass::Handle(ptr: PatchClass::New());
7934 result.set_wrapped_class(wrapped_class);
7935 NOT_IN_PRECOMPILED_RUNTIME(
7936 result.untag()->set_kernel_program_info(info.ptr()));
7937 result.set_script(script);
7938 result.set_kernel_library_index(-1);
7939 return result.ptr();
7940}
7941
7942PatchClassPtr PatchClass::New() {
7943 ASSERT(Object::patch_class_class() != Class::null());
7944 return Object::Allocate<PatchClass>(space: Heap::kOld);
7945}
7946
7947void PatchClass::set_wrapped_class(const Class& value) const {
7948 untag()->set_wrapped_class(value.ptr());
7949}
7950
7951#if !defined(DART_PRECOMPILED_RUNTIME)
7952void PatchClass::set_kernel_program_info(const KernelProgramInfo& info) const {
7953 untag()->set_kernel_program_info(info.ptr());
7954}
7955#endif
7956
7957void PatchClass::set_script(const Script& value) const {
7958 untag()->set_script(value.ptr());
7959}
7960
7961uword Function::Hash() const {
7962 uword hash = String::HashRawSymbol(symbol: name());
7963 if (IsClosureFunction()) {
7964 hash = hash ^ token_pos().Hash();
7965 }
7966 if (Owner()->IsClass()) {
7967 hash = hash ^ Class::Hash(obj: Class::RawCast(raw: Owner()));
7968 }
7969 return hash;
7970}
7971
7972bool Function::HasBreakpoint() const {
7973#if defined(PRODUCT)
7974 return false;
7975#else
7976 auto thread = Thread::Current();
7977 return thread->isolate_group()->debugger()->HasBreakpoint(thread, function: *this);
7978#endif
7979}
7980
7981void Function::InstallOptimizedCode(const Code& code) const {
7982 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7983 // We may not have previous code if FLAG_precompile is set.
7984 // Hot-reload may have already disabled the current code.
7985 if (HasCode() && !Code::Handle(ptr: CurrentCode()).IsDisabled()) {
7986 Code::Handle(ptr: CurrentCode()).DisableDartCode();
7987 }
7988 AttachCode(value: code);
7989}
7990
7991void Function::SetInstructions(const Code& value) const {
7992 // Ensure that nobody is executing this function when we install it.
7993 if (untag()->code() != Code::null() && HasCode()) {
7994 GcSafepointOperationScope safepoint(Thread::Current());
7995 SetInstructionsSafe(value);
7996 } else {
7997 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7998 SetInstructionsSafe(value);
7999 }
8000}
8001
8002void Function::SetInstructionsSafe(const Code& value) const {
8003 untag()->set_code(value.ptr());
8004 StoreNonPointer(addr: &untag()->entry_point_, value: value.EntryPoint());
8005 StoreNonPointer(addr: &untag()->unchecked_entry_point_,
8006 value: value.UncheckedEntryPoint());
8007}
8008
8009void Function::AttachCode(const Code& value) const {
8010 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8011 // Finish setting up code before activating it.
8012 value.set_owner(*this);
8013 SetInstructions(value);
8014 ASSERT(Function::Handle(value.function()).IsNull() ||
8015 (value.function() == this->ptr()));
8016}
8017
8018bool Function::HasCode() const {
8019 NoSafepointScope no_safepoint;
8020 ASSERT(untag()->code() != Code::null());
8021 return untag()->code() != StubCode::LazyCompile().ptr();
8022}
8023
8024bool Function::HasCode(FunctionPtr function) {
8025 NoSafepointScope no_safepoint;
8026 ASSERT(function->untag()->code() != Code::null());
8027 return function->untag()->code() != StubCode::LazyCompile().ptr();
8028}
8029
8030void Function::ClearCode() const {
8031#if defined(DART_PRECOMPILED_RUNTIME)
8032 UNREACHABLE();
8033#else
8034 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8035 untag()->set_unoptimized_code(Code::null());
8036 SetInstructions(StubCode::LazyCompile());
8037#endif // defined(DART_PRECOMPILED_RUNTIME)
8038}
8039
8040void Function::ClearCodeSafe() const {
8041#if defined(DART_PRECOMPILED_RUNTIME)
8042 UNREACHABLE();
8043#else
8044 untag()->set_unoptimized_code(Code::null());
8045
8046 SetInstructionsSafe(StubCode::LazyCompile());
8047#endif // defined(DART_PRECOMPILED_RUNTIME)
8048}
8049
8050void Function::EnsureHasCompiledUnoptimizedCode() const {
8051 ASSERT(!ForceOptimize());
8052 Thread* thread = Thread::Current();
8053 ASSERT(thread->IsDartMutatorThread());
8054 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
8055 Zone* zone = thread->zone();
8056
8057 const Error& error =
8058 Error::Handle(zone, ptr: Compiler::EnsureUnoptimizedCode(thread, function: *this));
8059 if (!error.IsNull()) {
8060 Exceptions::PropagateError(error);
8061 }
8062}
8063
8064void Function::SwitchToUnoptimizedCode() const {
8065 ASSERT(HasOptimizedCode());
8066 Thread* thread = Thread::Current();
8067 DEBUG_ASSERT(
8068 thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
8069 Zone* zone = thread->zone();
8070 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
8071 const Code& current_code = Code::Handle(zone, ptr: CurrentCode());
8072
8073 if (FLAG_trace_deoptimization_verbose) {
8074 THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n",
8075 ToFullyQualifiedCString(), current_code.EntryPoint());
8076 }
8077 current_code.DisableDartCode();
8078 const Error& error =
8079 Error::Handle(zone, ptr: Compiler::EnsureUnoptimizedCode(thread, function: *this));
8080 if (!error.IsNull()) {
8081 Exceptions::PropagateError(error);
8082 }
8083 const Code& unopt_code = Code::Handle(zone, ptr: unoptimized_code());
8084 unopt_code.Enable();
8085 AttachCode(value: unopt_code);
8086}
8087
8088void Function::SwitchToLazyCompiledUnoptimizedCode() const {
8089#if defined(DART_PRECOMPILED_RUNTIME)
8090 UNREACHABLE();
8091#else
8092 if (!HasOptimizedCode()) {
8093 return;
8094 }
8095
8096 Thread* thread = Thread::Current();
8097 Zone* zone = thread->zone();
8098 ASSERT(thread->IsDartMutatorThread());
8099
8100 const Code& current_code = Code::Handle(zone, ptr: CurrentCode());
8101 TIR_Print("Disabling optimized code for %s\n", ToCString());
8102 current_code.DisableDartCode();
8103
8104 const Code& unopt_code = Code::Handle(zone, ptr: unoptimized_code());
8105 if (unopt_code.IsNull()) {
8106 // Set the lazy compile stub code.
8107 TIR_Print("Switched to lazy compile stub for %s\n", ToCString());
8108 SetInstructions(StubCode::LazyCompile());
8109 return;
8110 }
8111
8112 TIR_Print("Switched to unoptimized code for %s\n", ToCString());
8113
8114 AttachCode(value: unopt_code);
8115 unopt_code.Enable();
8116#endif
8117}
8118
8119void Function::set_unoptimized_code(const Code& value) const {
8120#if defined(DART_PRECOMPILED_RUNTIME)
8121 UNREACHABLE();
8122#else
8123 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
8124 ASSERT(value.IsNull() || !value.is_optimized());
8125 untag()->set_unoptimized_code(value.ptr());
8126#endif
8127}
8128
8129ContextScopePtr Function::context_scope() const {
8130 if (IsClosureFunction()) {
8131 const Object& obj = Object::Handle(ptr: untag()->data());
8132 ASSERT(!obj.IsNull());
8133 return ClosureData::Cast(obj).context_scope();
8134 }
8135 return ContextScope::null();
8136}
8137
8138void Function::set_context_scope(const ContextScope& value) const {
8139 if (IsClosureFunction()) {
8140 const Object& obj = Object::Handle(ptr: untag()->data());
8141 ASSERT(!obj.IsNull());
8142 ClosureData::Cast(obj).set_context_scope(value);
8143 return;
8144 }
8145 UNREACHABLE();
8146}
8147
8148Function::AwaiterLink Function::awaiter_link() const {
8149 if (IsClosureFunction()) {
8150 const Object& obj = Object::Handle(ptr: untag()->data());
8151 ASSERT(!obj.IsNull());
8152 return ClosureData::Cast(obj).awaiter_link();
8153 }
8154 UNREACHABLE();
8155 return {};
8156}
8157
8158void Function::set_awaiter_link(Function::AwaiterLink link) const {
8159 if (IsClosureFunction()) {
8160 const Object& obj = Object::Handle(ptr: untag()->data());
8161 ASSERT(!obj.IsNull());
8162 ClosureData::Cast(obj).set_awaiter_link(link);
8163 return;
8164 }
8165 UNREACHABLE();
8166}
8167
8168ClosurePtr Function::implicit_static_closure() const {
8169 if (IsImplicitStaticClosureFunction()) {
8170 const Object& obj = Object::Handle(ptr: untag()->data());
8171 ASSERT(!obj.IsNull());
8172 return ClosureData::Cast(obj).implicit_static_closure();
8173 }
8174 return Closure::null();
8175}
8176
8177void Function::set_implicit_static_closure(const Closure& closure) const {
8178 if (IsImplicitStaticClosureFunction()) {
8179 const Object& obj = Object::Handle(ptr: untag()->data());
8180 ASSERT(!obj.IsNull());
8181 ClosureData::Cast(obj).set_implicit_static_closure(closure);
8182 return;
8183 }
8184 UNREACHABLE();
8185}
8186
8187ScriptPtr Function::eval_script() const {
8188 const Object& obj = Object::Handle(ptr: untag()->data());
8189 if (obj.IsScript()) {
8190 return Script::Cast(obj).ptr();
8191 }
8192 return Script::null();
8193}
8194
8195void Function::set_eval_script(const Script& script) const {
8196 ASSERT(token_pos() == TokenPosition::kMinSource);
8197 ASSERT(untag()->data() == Object::null());
8198 set_data(script);
8199}
8200
8201FunctionPtr Function::extracted_method_closure() const {
8202 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8203 const Object& obj = Object::Handle(ptr: untag()->data());
8204 ASSERT(obj.IsFunction());
8205 return Function::Cast(obj).ptr();
8206}
8207
8208void Function::set_extracted_method_closure(const Function& value) const {
8209 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8210 ASSERT(untag()->data() == Object::null());
8211 set_data(value);
8212}
8213
8214ArrayPtr Function::saved_args_desc() const {
8215 if (kind() == UntaggedFunction::kDynamicInvocationForwarder) {
8216 return Array::null();
8217 }
8218 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8219 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8220 return Array::RawCast(raw: untag()->data());
8221}
8222
8223void Function::set_saved_args_desc(const Array& value) const {
8224 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8225 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8226 ASSERT(untag()->data() == Object::null());
8227 set_data(value);
8228}
8229
8230FieldPtr Function::accessor_field() const {
8231 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8232 kind() == UntaggedFunction::kImplicitSetter ||
8233 kind() == UntaggedFunction::kImplicitStaticGetter ||
8234 kind() == UntaggedFunction::kFieldInitializer);
8235 return Field::RawCast(raw: untag()->data());
8236}
8237
8238void Function::set_accessor_field(const Field& value) const {
8239 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8240 kind() == UntaggedFunction::kImplicitSetter ||
8241 kind() == UntaggedFunction::kImplicitStaticGetter ||
8242 kind() == UntaggedFunction::kFieldInitializer);
8243 // Top level classes may be finalized multiple times.
8244 ASSERT(untag()->data() == Object::null() || untag()->data() == value.ptr());
8245 set_data(value);
8246}
8247
8248FunctionPtr Function::parent_function() const {
8249 if (!IsClosureFunction()) return Function::null();
8250 Object& obj = Object::Handle(ptr: untag()->data());
8251 ASSERT(!obj.IsNull());
8252 return ClosureData::Cast(obj).parent_function();
8253}
8254
8255void Function::set_parent_function(const Function& value) const {
8256 ASSERT(IsClosureFunction());
8257 const Object& obj = Object::Handle(ptr: untag()->data());
8258 ASSERT(!obj.IsNull());
8259 ClosureData::Cast(obj).set_parent_function(value);
8260}
8261
8262TypeArgumentsPtr Function::InstantiateToBounds(
8263 Thread* thread,
8264 DefaultTypeArgumentsKind* kind_out) const {
8265 if (type_parameters() == TypeParameters::null()) {
8266 if (kind_out != nullptr) {
8267 *kind_out = DefaultTypeArgumentsKind::kIsInstantiated;
8268 }
8269 return Object::empty_type_arguments().ptr();
8270 }
8271 auto& type_params = TypeParameters::Handle(zone: thread->zone(), ptr: type_parameters());
8272 auto& result = TypeArguments::Handle(zone: thread->zone(), ptr: type_params.defaults());
8273 if (kind_out != nullptr) {
8274 if (IsClosureFunction()) {
8275 *kind_out = default_type_arguments_kind();
8276 } else {
8277 // We just return is/is not instantiated if the value isn't cached, as
8278 // the other checks may be more overhead at runtime than just doing the
8279 // instantiation.
8280 *kind_out = result.IsNull() || result.IsInstantiated()
8281 ? DefaultTypeArgumentsKind::kIsInstantiated
8282 : DefaultTypeArgumentsKind::kNeedsInstantiation;
8283 }
8284 }
8285 return result.ptr();
8286}
8287
8288Function::DefaultTypeArgumentsKind Function::default_type_arguments_kind()
8289 const {
8290 if (!IsClosureFunction()) {
8291 UNREACHABLE();
8292 }
8293 const auto& closure_data = ClosureData::Handle(ptr: ClosureData::RawCast(raw: data()));
8294 ASSERT(!closure_data.IsNull());
8295 return closure_data.default_type_arguments_kind();
8296}
8297
8298void Function::set_default_type_arguments_kind(
8299 Function::DefaultTypeArgumentsKind value) const {
8300 if (!IsClosureFunction()) {
8301 UNREACHABLE();
8302 }
8303 const auto& closure_data = ClosureData::Handle(ptr: ClosureData::RawCast(raw: data()));
8304 ASSERT(!closure_data.IsNull());
8305 closure_data.set_default_type_arguments_kind(value);
8306}
8307
8308Function::DefaultTypeArgumentsKind Function::DefaultTypeArgumentsKindFor(
8309 const TypeArguments& value) const {
8310 if (value.IsNull() || value.IsInstantiated()) {
8311 return DefaultTypeArgumentsKind::kIsInstantiated;
8312 }
8313 if (value.CanShareFunctionTypeArguments(function: *this)) {
8314 return DefaultTypeArgumentsKind::kSharesFunctionTypeArguments;
8315 }
8316 const auto& cls = Class::Handle(ptr: Owner());
8317 if (value.CanShareInstantiatorTypeArguments(instantiator_class: cls)) {
8318 return DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments;
8319 }
8320 return DefaultTypeArgumentsKind::kNeedsInstantiation;
8321}
8322
8323// Enclosing outermost function of this local function.
8324FunctionPtr Function::GetOutermostFunction() const {
8325 FunctionPtr parent = parent_function();
8326 if (parent == Object::null()) {
8327 return ptr();
8328 }
8329 Function& function = Function::Handle();
8330 do {
8331 function = parent;
8332 parent = function.parent_function();
8333 } while (parent != Object::null());
8334 return function.ptr();
8335}
8336
8337FunctionPtr Function::implicit_closure_function() const {
8338 if (IsClosureFunction() || IsDispatcherOrImplicitAccessor() ||
8339 IsFieldInitializer() || IsFfiTrampoline() || IsMethodExtractor()) {
8340 return Function::null();
8341 }
8342 const Object& obj = Object::Handle(ptr: data());
8343 ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
8344 if (obj.IsNull() || obj.IsScript()) {
8345 return Function::null();
8346 }
8347 if (obj.IsFunction()) {
8348 return Function::Cast(obj).ptr();
8349 }
8350 ASSERT(is_native());
8351 ASSERT(obj.IsArray());
8352 const Object& res = Object::Handle(ptr: Array::Cast(obj).AtAcquire(index: 1));
8353 return res.IsNull() ? Function::null() : Function::Cast(obj: res).ptr();
8354}
8355
8356void Function::set_implicit_closure_function(const Function& value) const {
8357 DEBUG_ASSERT(
8358 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8359 ASSERT(!IsClosureFunction());
8360 const Object& old_data = Object::Handle(ptr: data());
8361 if (is_native()) {
8362 ASSERT(old_data.IsArray());
8363 const auto& pair = Array::Cast(obj: old_data);
8364 ASSERT(pair.AtAcquire(NativeFunctionData::kTearOff) == Object::null() ||
8365 value.IsNull());
8366 pair.SetAtRelease(index: NativeFunctionData::kTearOff, value);
8367 } else {
8368 ASSERT(old_data.IsNull() || value.IsNull());
8369 set_data(value);
8370 }
8371}
8372
8373void Function::SetFfiCSignature(const FunctionType& sig) const {
8374 ASSERT(IsFfiTrampoline());
8375 const Object& obj = Object::Handle(ptr: data());
8376 ASSERT(!obj.IsNull());
8377 FfiTrampolineData::Cast(obj).set_c_signature(sig);
8378}
8379
8380FunctionTypePtr Function::FfiCSignature() const {
8381 ASSERT(IsFfiTrampoline());
8382 const Object& obj = Object::Handle(ptr: data());
8383 ASSERT(!obj.IsNull());
8384 return FfiTrampolineData::Cast(obj).c_signature();
8385}
8386
8387bool Function::FfiCSignatureContainsHandles() const {
8388 ASSERT(IsFfiTrampoline());
8389 const FunctionType& c_signature = FunctionType::Handle(ptr: FfiCSignature());
8390 const intptr_t num_params = c_signature.num_fixed_parameters();
8391 for (intptr_t i = 0; i < num_params; i++) {
8392 const bool is_handle =
8393 AbstractType::Handle(ptr: c_signature.ParameterTypeAt(index: i)).type_class_id() ==
8394 kFfiHandleCid;
8395 if (is_handle) {
8396 return true;
8397 }
8398 }
8399 return AbstractType::Handle(ptr: c_signature.result_type()).type_class_id() ==
8400 kFfiHandleCid;
8401}
8402
8403// Keep consistent with BaseMarshaller::IsCompound.
8404bool Function::FfiCSignatureReturnsStruct() const {
8405 ASSERT(IsFfiTrampoline());
8406 Zone* zone = Thread::Current()->zone();
8407 const auto& c_signature = FunctionType::Handle(zone, ptr: FfiCSignature());
8408 const auto& type = AbstractType::Handle(zone, ptr: c_signature.result_type());
8409 if (IsFfiTypeClassId(index: type.type_class_id())) {
8410 return false;
8411 }
8412 const auto& cls = Class::Handle(zone, ptr: type.type_class());
8413 const auto& superClass = Class::Handle(zone, ptr: cls.SuperClass());
8414 const bool is_abi_specific_int =
8415 String::Handle(zone, ptr: superClass.UserVisibleName())
8416 .Equals(str: Symbols::AbiSpecificInteger());
8417 if (is_abi_specific_int) {
8418 return false;
8419 }
8420#ifdef DEBUG
8421 const bool is_struct = String::Handle(zone, superClass.UserVisibleName())
8422 .Equals(Symbols::Struct());
8423 const bool is_union = String::Handle(zone, superClass.UserVisibleName())
8424 .Equals(Symbols::Union());
8425 ASSERT(is_struct || is_union);
8426#endif
8427 return true;
8428}
8429
8430int32_t Function::FfiCallbackId() const {
8431 ASSERT(IsFfiTrampoline());
8432 ASSERT(GetFfiTrampolineKind() != FfiTrampolineKind::kCall);
8433
8434 const auto& obj = Object::Handle(ptr: data());
8435 ASSERT(!obj.IsNull());
8436 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8437
8438 ASSERT(trampoline_data.callback_id() != -1);
8439
8440 return trampoline_data.callback_id();
8441}
8442
8443void Function::AssignFfiCallbackId(int32_t callback_id) const {
8444 ASSERT(IsFfiTrampoline());
8445 ASSERT(GetFfiTrampolineKind() != FfiTrampolineKind::kCall);
8446
8447 const auto& obj = Object::Handle(ptr: data());
8448 ASSERT(!obj.IsNull());
8449 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8450
8451 ASSERT(trampoline_data.callback_id() == -1);
8452 trampoline_data.set_callback_id(callback_id);
8453}
8454
8455bool Function::FfiIsLeaf() const {
8456 ASSERT(IsFfiTrampoline());
8457 const Object& obj = Object::Handle(ptr: untag()->data());
8458 ASSERT(!obj.IsNull());
8459 return FfiTrampolineData::Cast(obj).is_leaf();
8460}
8461
8462void Function::SetFfiIsLeaf(bool is_leaf) const {
8463 ASSERT(IsFfiTrampoline());
8464 const Object& obj = Object::Handle(ptr: untag()->data());
8465 ASSERT(!obj.IsNull());
8466 FfiTrampolineData::Cast(obj).set_is_leaf(is_leaf);
8467}
8468
8469FunctionPtr Function::FfiCallbackTarget() const {
8470 ASSERT(IsFfiTrampoline());
8471 const Object& obj = Object::Handle(ptr: data());
8472 ASSERT(!obj.IsNull());
8473 return FfiTrampolineData::Cast(obj).callback_target();
8474}
8475
8476void Function::SetFfiCallbackTarget(const Function& target) const {
8477 ASSERT(IsFfiTrampoline());
8478 const Object& obj = Object::Handle(ptr: data());
8479 ASSERT(!obj.IsNull());
8480 FfiTrampolineData::Cast(obj).set_callback_target(target);
8481}
8482
8483InstancePtr Function::FfiCallbackExceptionalReturn() const {
8484 ASSERT(IsFfiTrampoline());
8485 const Object& obj = Object::Handle(ptr: data());
8486 ASSERT(!obj.IsNull());
8487 return FfiTrampolineData::Cast(obj).callback_exceptional_return();
8488}
8489
8490void Function::SetFfiCallbackExceptionalReturn(const Instance& value) const {
8491 ASSERT(IsFfiTrampoline());
8492 const Object& obj = Object::Handle(ptr: data());
8493 ASSERT(!obj.IsNull());
8494 FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value);
8495}
8496
8497FfiTrampolineKind Function::GetFfiTrampolineKind() const {
8498 ASSERT(IsFfiTrampoline());
8499 const Object& obj = Object::Handle(ptr: data());
8500 ASSERT(!obj.IsNull());
8501 return FfiTrampolineData::Cast(obj).trampoline_kind();
8502}
8503
8504void Function::SetFfiTrampolineKind(FfiTrampolineKind value) const {
8505 ASSERT(IsFfiTrampoline());
8506 const Object& obj = Object::Handle(ptr: data());
8507 ASSERT(!obj.IsNull());
8508 FfiTrampolineData::Cast(obj).set_trampoline_kind(value);
8509}
8510
8511const char* Function::KindToCString(UntaggedFunction::Kind kind) {
8512 return UntaggedFunction::KindToCString(k: kind);
8513}
8514
8515FunctionPtr Function::ForwardingTarget() const {
8516 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8517 return Function::RawCast(raw: WeakSerializationReference::Unwrap(obj: data()));
8518}
8519
8520void Function::SetForwardingTarget(const Function& target) const {
8521 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8522 set_data(target);
8523}
8524
8525// This field is heavily overloaded:
8526// kernel eval function: Array[0] = Script
8527// Array[1] = KernelProgramInfo
8528// Array[2] = Kernel index of enclosing library
8529// method extractor: Function extracted closure function
8530// implicit getter: Field
8531// implicit setter: Field
8532// impl. static final gttr: Field
8533// field initializer: Field
8534// noSuchMethod dispatcher: Array arguments descriptor
8535// invoke-field dispatcher: Array arguments descriptor
8536// closure function: ClosureData
8537// irregexp function: Array[0] = RegExp
8538// Array[1] = Smi string specialization cid
8539// native function: Array[0] = String native name
8540// Array[1] = Function implicit closure function
8541// regular function: Function for implicit closure function
8542// constructor, factory: Function for implicit closure function
8543// ffi trampoline function: FfiTrampolineData (Dart->C)
8544// dyn inv forwarder: Forwarding target, a WSR pointing to it or null
8545// (null can only occur if forwarding target was
8546// dropped)
8547void Function::set_data(const Object& value) const {
8548 untag()->set_data<std::memory_order_release>(value.ptr());
8549}
8550
8551void Function::set_name(const String& value) const {
8552 ASSERT(value.IsSymbol());
8553 untag()->set_name(value.ptr());
8554}
8555
8556void Function::set_owner(const Object& value) const {
8557 ASSERT(!value.IsNull());
8558 untag()->set_owner(value.ptr());
8559}
8560
8561RegExpPtr Function::regexp() const {
8562 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8563 const Array& pair = Array::Cast(obj: Object::Handle(ptr: data()));
8564 return RegExp::RawCast(raw: pair.At(index: 0));
8565}
8566
8567class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
8568class StringSpecializationCid
8569 : public BitField<intptr_t, intptr_t, 1, UntaggedObject::kClassIdTagSize> {
8570};
8571
8572intptr_t Function::string_specialization_cid() const {
8573 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8574 const Array& pair = Array::Cast(obj: Object::Handle(ptr: data()));
8575 return StringSpecializationCid::decode(value: Smi::Value(raw_smi: Smi::RawCast(raw: pair.At(index: 1))));
8576}
8577
8578bool Function::is_sticky_specialization() const {
8579 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8580 const Array& pair = Array::Cast(obj: Object::Handle(ptr: data()));
8581 return StickySpecialization::decode(value: Smi::Value(raw_smi: Smi::RawCast(raw: pair.At(index: 1))));
8582}
8583
8584void Function::SetRegExpData(const RegExp& regexp,
8585 intptr_t string_specialization_cid,
8586 bool sticky) const {
8587 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8588 ASSERT(IsStringClassId(string_specialization_cid));
8589 ASSERT(data() == Object::null());
8590 const Array& pair = Array::Handle(ptr: Array::New(len: 2, space: Heap::kOld));
8591 pair.SetAt(index: 0, value: regexp);
8592 pair.SetAt(index: 1, value: Smi::Handle(ptr: Smi::New(value: StickySpecialization::encode(value: sticky) |
8593 StringSpecializationCid::encode(
8594 value: string_specialization_cid))));
8595 set_data(pair);
8596}
8597
8598StringPtr Function::native_name() const {
8599 ASSERT(is_native());
8600 const Object& obj = Object::Handle(ptr: data());
8601 ASSERT(obj.IsArray());
8602 return String::RawCast(raw: Array::Cast(obj).At(index: 0));
8603}
8604
8605void Function::set_native_name(const String& value) const {
8606 ASSERT(is_native());
8607 const auto& pair = Array::Cast(obj: Object::Handle(ptr: data()));
8608 ASSERT(pair.At(0) == Object::null());
8609 pair.SetAt(index: NativeFunctionData::kNativeName, value);
8610}
8611
8612void Function::SetSignature(const FunctionType& value) const {
8613 set_signature(value);
8614 ASSERT(NumImplicitParameters() == value.num_implicit_parameters());
8615 if (IsClosureFunction() && value.IsGeneric()) {
8616 const TypeParameters& type_params =
8617 TypeParameters::Handle(ptr: value.type_parameters());
8618 const TypeArguments& defaults =
8619 TypeArguments::Handle(ptr: type_params.defaults());
8620 auto kind = DefaultTypeArgumentsKindFor(value: defaults);
8621 ASSERT(kind != DefaultTypeArgumentsKind::kInvalid);
8622 set_default_type_arguments_kind(kind);
8623 }
8624}
8625
8626TypeParameterPtr FunctionType::TypeParameterAt(intptr_t index,
8627 Nullability nullability) const {
8628 ASSERT(index >= 0 && index < NumTypeParameters());
8629 Thread* thread = Thread::Current();
8630 Zone* zone = thread->zone();
8631 TypeParameter& type_param = TypeParameter::Handle(
8632 zone, ptr: TypeParameter::New(owner: *this, base: NumParentTypeArguments(),
8633 index: NumParentTypeArguments() + index, nullability));
8634 type_param.SetIsFinalized();
8635 if (IsFinalized()) {
8636 type_param ^= type_param.Canonicalize(thread);
8637 }
8638 return type_param.ptr();
8639}
8640
8641void FunctionType::set_result_type(const AbstractType& value) const {
8642 ASSERT(!value.IsNull());
8643 untag()->set_result_type(value.ptr());
8644}
8645
8646AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const {
8647 const Array& types = Array::Handle(ptr: parameter_types());
8648 return AbstractType::RawCast(raw: types.At(index));
8649}
8650
8651AbstractTypePtr FunctionType::ParameterTypeAt(intptr_t index) const {
8652 const Array& parameter_types = Array::Handle(ptr: untag()->parameter_types());
8653 return AbstractType::RawCast(raw: parameter_types.At(index));
8654}
8655
8656void FunctionType::SetParameterTypeAt(intptr_t index,
8657 const AbstractType& value) const {
8658 ASSERT(!value.IsNull());
8659 const Array& parameter_types = Array::Handle(ptr: untag()->parameter_types());
8660 parameter_types.SetAt(index, value);
8661}
8662
8663void FunctionType::set_parameter_types(const Array& value) const {
8664 ASSERT(value.IsNull() || value.Length() > 0);
8665 untag()->set_parameter_types(value.ptr());
8666}
8667
8668StringPtr Function::ParameterNameAt(intptr_t index) const {
8669#if defined(DART_PRECOMPILED_RUNTIME)
8670 if (signature() == FunctionType::null()) {
8671 // Without the signature, we're guaranteed not to have any name information.
8672 return Symbols::OptimizedOut().ptr();
8673 }
8674#endif
8675 const intptr_t num_fixed = num_fixed_parameters();
8676 if (HasOptionalNamedParameters() && index >= num_fixed) {
8677 const Array& parameter_names =
8678 Array::Handle(ptr: signature()->untag()->named_parameter_names());
8679 return String::RawCast(raw: parameter_names.At(index: index - num_fixed));
8680 }
8681#if defined(DART_PRECOMPILED_RUNTIME)
8682 return Symbols::OptimizedOut().ptr();
8683#else
8684 const Array& names = Array::Handle(ptr: untag()->positional_parameter_names());
8685 return String::RawCast(raw: names.At(index));
8686#endif
8687}
8688
8689void Function::SetParameterNameAt(intptr_t index, const String& value) const {
8690#if defined(DART_PRECOMPILED_RUNTIME)
8691 UNREACHABLE();
8692#else
8693 ASSERT(!value.IsNull() && value.IsSymbol());
8694 if (HasOptionalNamedParameters() && index >= num_fixed_parameters()) {
8695 // These should be set on the signature, not the function.
8696 UNREACHABLE();
8697 }
8698 const Array& parameter_names =
8699 Array::Handle(ptr: untag()->positional_parameter_names());
8700 parameter_names.SetAt(index, value);
8701#endif
8702}
8703
8704#if !defined(DART_PRECOMPILED_RUNTIME)
8705void Function::set_positional_parameter_names(const Array& value) const {
8706 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8707 untag()->set_positional_parameter_names(value.ptr());
8708}
8709#endif
8710
8711StringPtr FunctionType::ParameterNameAt(intptr_t index) const {
8712 const intptr_t num_fixed = num_fixed_parameters();
8713 if (!HasOptionalNamedParameters() || index < num_fixed) {
8714 // The positional parameter names are stored on the function, not here.
8715 UNREACHABLE();
8716 }
8717 const Array& parameter_names =
8718 Array::Handle(ptr: untag()->named_parameter_names());
8719 return String::RawCast(raw: parameter_names.At(index: index - num_fixed));
8720}
8721
8722void FunctionType::SetParameterNameAt(intptr_t index,
8723 const String& value) const {
8724#if defined(DART_PRECOMPILED_RUNTIME)
8725 UNREACHABLE();
8726#else
8727 ASSERT(!value.IsNull() && value.IsSymbol());
8728 const intptr_t num_fixed = num_fixed_parameters();
8729 if (!HasOptionalNamedParameters() || index < num_fixed) {
8730 UNREACHABLE();
8731 }
8732 const Array& parameter_names =
8733 Array::Handle(ptr: untag()->named_parameter_names());
8734 parameter_names.SetAt(index: index - num_fixed, value);
8735#endif
8736}
8737
8738void FunctionType::set_named_parameter_names(const Array& value) const {
8739 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8740 untag()->set_named_parameter_names(value.ptr());
8741}
8742
8743void Function::CreateNameArray(Heap::Space space) const {
8744#if defined(DART_PRECOMPILED_RUNTIME)
8745 UNREACHABLE();
8746#else
8747 const intptr_t num_positional_params =
8748 num_fixed_parameters() + NumOptionalPositionalParameters();
8749 if (num_positional_params == 0) {
8750 set_positional_parameter_names(Object::empty_array());
8751 } else {
8752 set_positional_parameter_names(
8753 Array::Handle(ptr: Array::New(len: num_positional_params, space)));
8754 }
8755#endif
8756}
8757
8758void FunctionType::CreateNameArrayIncludingFlags(Heap::Space space) const {
8759#if defined(DART_PRECOMPILED_RUNTIME)
8760 UNREACHABLE();
8761#else
8762 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8763 if (num_named_parameters == 0) {
8764 return set_named_parameter_names(Object::empty_array());
8765 }
8766 // Currently, we only store flags for named parameters.
8767 const intptr_t last_index = (num_named_parameters - 1) /
8768 compiler::target::kNumParameterFlagsPerElement;
8769 const intptr_t num_flag_slots = last_index + 1;
8770 intptr_t num_total_slots = num_named_parameters + num_flag_slots;
8771 auto& array = Array::Handle(ptr: Array::New(len: num_total_slots, space));
8772 // Set flag slots to Smi 0 before handing off.
8773 auto& empty_flags_smi = Smi::Handle(ptr: Smi::New(value: 0));
8774 for (intptr_t i = num_named_parameters; i < num_total_slots; i++) {
8775 array.SetAt(index: i, value: empty_flags_smi);
8776 }
8777 set_named_parameter_names(array);
8778#endif
8779}
8780
8781intptr_t FunctionType::GetRequiredFlagIndex(intptr_t index,
8782 intptr_t* flag_mask) const {
8783 // If these calculations change, also change
8784 // FlowGraphBuilder::BuildClosureCallHasRequiredNamedArgumentsCheck.
8785 ASSERT(HasOptionalNamedParameters());
8786 ASSERT(flag_mask != nullptr);
8787 ASSERT(index >= num_fixed_parameters());
8788 index -= num_fixed_parameters();
8789 *flag_mask = (1 << compiler::target::kRequiredNamedParameterFlag)
8790 << ((static_cast<uintptr_t>(index) %
8791 compiler::target::kNumParameterFlagsPerElement) *
8792 compiler::target::kNumParameterFlags);
8793 return NumOptionalNamedParameters() +
8794 index / compiler::target::kNumParameterFlagsPerElement;
8795}
8796
8797bool Function::HasRequiredNamedParameters() const {
8798#if defined(DART_PRECOMPILED_RUNTIME)
8799 if (signature() == FunctionType::null()) {
8800 // Signatures for functions with required named parameters are not dropped.
8801 return false;
8802 }
8803#endif
8804 return FunctionType::Handle(ptr: signature()).HasRequiredNamedParameters();
8805}
8806
8807bool Function::IsRequiredAt(intptr_t index) const {
8808#if defined(DART_PRECOMPILED_RUNTIME)
8809 if (signature() == FunctionType::null()) {
8810 // Signature is not dropped in aot when any named parameter is required.
8811 return false;
8812 }
8813#endif
8814 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8815 return false;
8816 }
8817 const FunctionType& sig = FunctionType::Handle(ptr: signature());
8818 return sig.IsRequiredAt(index);
8819}
8820
8821bool FunctionType::IsRequiredAt(intptr_t index) const {
8822 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8823 return false;
8824 }
8825 intptr_t flag_mask;
8826 const intptr_t flag_index = GetRequiredFlagIndex(index, flag_mask: &flag_mask);
8827 const Array& parameter_names =
8828 Array::Handle(ptr: untag()->named_parameter_names());
8829 if (flag_index >= parameter_names.Length()) {
8830 return false;
8831 }
8832 const intptr_t flags =
8833 Smi::Value(raw_smi: Smi::RawCast(raw: parameter_names.At(index: flag_index)));
8834 return (flags & flag_mask) != 0;
8835}
8836
8837void FunctionType::SetIsRequiredAt(intptr_t index) const {
8838#if defined(DART_PRECOMPILER_RUNTIME)
8839 UNREACHABLE();
8840#else
8841 intptr_t flag_mask;
8842 const intptr_t flag_index = GetRequiredFlagIndex(index, flag_mask: &flag_mask);
8843 const Array& parameter_names =
8844 Array::Handle(ptr: untag()->named_parameter_names());
8845 ASSERT(flag_index < parameter_names.Length());
8846 const intptr_t flags =
8847 Smi::Value(raw_smi: Smi::RawCast(raw: parameter_names.At(index: flag_index)));
8848 parameter_names.SetAt(index: flag_index, value: Smi::Handle(ptr: Smi::New(value: flags | flag_mask)));
8849#endif
8850}
8851
8852void FunctionType::FinalizeNameArray() const {
8853#if defined(DART_PRECOMPILER_RUNTIME)
8854 UNREACHABLE();
8855#else
8856 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8857 if (num_named_parameters == 0) {
8858 ASSERT(untag()->named_parameter_names() == Object::empty_array().ptr());
8859 return;
8860 }
8861 const Array& parameter_names =
8862 Array::Handle(ptr: untag()->named_parameter_names());
8863 // Truncate the parameter names array to remove unused flags from the end.
8864 intptr_t last_used = parameter_names.Length() - 1;
8865 for (; last_used >= num_named_parameters; --last_used) {
8866 if (Smi::Value(raw_smi: Smi::RawCast(raw: parameter_names.At(index: last_used))) != 0) {
8867 break;
8868 }
8869 }
8870 parameter_names.Truncate(new_length: last_used + 1);
8871#endif
8872}
8873
8874bool FunctionType::HasRequiredNamedParameters() const {
8875 const intptr_t num_named_params = NumOptionalNamedParameters();
8876 if (num_named_params == 0) return false;
8877 // Check for flag slots in the named parameter names array.
8878 const auto& parameter_names = Array::Handle(ptr: named_parameter_names());
8879 ASSERT(!parameter_names.IsNull());
8880 return parameter_names.Length() > num_named_params;
8881}
8882
8883static void ReportTooManyTypeParameters(const FunctionType& sig) {
8884 Report::MessageF(kind: Report::kError, script: Script::Handle(), token_pos: TokenPosition::kNoSource,
8885 report_after_token: Report::AtLocation,
8886 format: "too many type parameters declared in signature '%s' or in "
8887 "its enclosing signatures",
8888 sig.ToUserVisibleCString());
8889 UNREACHABLE();
8890}
8891
8892void FunctionType::SetTypeParameters(const TypeParameters& value) const {
8893 untag()->set_type_parameters(value.ptr());
8894 const intptr_t count = value.Length();
8895 if (!UntaggedFunctionType::PackedNumTypeParameters::is_valid(value: count)) {
8896 ReportTooManyTypeParameters(sig: *this);
8897 }
8898 untag()->packed_type_parameter_counts_.Update<PackedNumTypeParameters>(value: count);
8899}
8900
8901void FunctionType::SetNumParentTypeArguments(intptr_t value) const {
8902 ASSERT(value >= 0);
8903 if (!PackedNumParentTypeArguments::is_valid(value)) {
8904 ReportTooManyTypeParameters(sig: *this);
8905 }
8906 untag()->packed_type_parameter_counts_.Update<PackedNumParentTypeArguments>(
8907 value);
8908}
8909
8910bool Function::IsGeneric() const {
8911 return FunctionType::IsGeneric(ptr: signature());
8912}
8913intptr_t Function::NumTypeParameters() const {
8914 return FunctionType::NumTypeParametersOf(ptr: signature());
8915}
8916intptr_t Function::NumParentTypeArguments() const {
8917 return FunctionType::NumParentTypeArgumentsOf(ptr: signature());
8918}
8919intptr_t Function::NumTypeArguments() const {
8920 return FunctionType::NumTypeArgumentsOf(ptr: signature());
8921}
8922intptr_t Function::num_fixed_parameters() const {
8923 return FunctionType::NumFixedParametersOf(ptr: signature());
8924}
8925bool Function::HasOptionalParameters() const {
8926 return FunctionType::HasOptionalParameters(ptr: signature());
8927}
8928bool Function::HasOptionalNamedParameters() const {
8929 return FunctionType::HasOptionalNamedParameters(ptr: signature());
8930}
8931bool Function::HasOptionalPositionalParameters() const {
8932 return FunctionType::HasOptionalPositionalParameters(ptr: signature());
8933}
8934intptr_t Function::NumOptionalParameters() const {
8935 return FunctionType::NumOptionalParametersOf(ptr: signature());
8936}
8937intptr_t Function::NumOptionalPositionalParameters() const {
8938 return FunctionType::NumOptionalPositionalParametersOf(ptr: signature());
8939}
8940intptr_t Function::NumOptionalNamedParameters() const {
8941 return FunctionType::NumOptionalNamedParametersOf(ptr: signature());
8942}
8943intptr_t Function::NumParameters() const {
8944 return FunctionType::NumParametersOf(ptr: signature());
8945}
8946
8947TypeParameterPtr Function::TypeParameterAt(intptr_t index,
8948 Nullability nullability) const {
8949 const FunctionType& sig = FunctionType::Handle(ptr: signature());
8950 return sig.TypeParameterAt(index, nullability);
8951}
8952
8953void Function::set_kind(UntaggedFunction::Kind value) const {
8954 untag()->kind_tag_.Update<KindBits>(value);
8955}
8956
8957void Function::set_modifier(UntaggedFunction::AsyncModifier value) const {
8958 untag()->kind_tag_.Update<ModifierBits>(value);
8959}
8960
8961void Function::set_recognized_kind(MethodRecognizer::Kind value) const {
8962 // Prevent multiple settings of kind.
8963 ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
8964 untag()->kind_tag_.Update<RecognizedBits>(value);
8965}
8966
8967void Function::set_token_pos(TokenPosition token_pos) const {
8968#if defined(DART_PRECOMPILED_RUNTIME)
8969 UNREACHABLE();
8970#else
8971 ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
8972 StoreNonPointer(addr: &untag()->token_pos_, value: token_pos);
8973#endif
8974}
8975
8976void Function::set_kind_tag(uint32_t value) const {
8977 untag()->kind_tag_ = value;
8978}
8979
8980bool Function::is_eval_function() const {
8981 if (data()->IsArray()) {
8982 const intptr_t len = Array::LengthOf(array: Array::RawCast(raw: data()));
8983 return len == static_cast<intptr_t>(EvalFunctionData::kLength);
8984 }
8985 return false;
8986}
8987
8988void Function::set_packed_fields(uint32_t packed_fields) const {
8989#if defined(DART_PRECOMPILED_RUNTIME)
8990 UNREACHABLE();
8991#else
8992 StoreNonPointer(addr: &untag()->packed_fields_, value: packed_fields);
8993#endif
8994}
8995
8996bool Function::IsOptimizable() const {
8997 if (FLAG_precompiled_mode) {
8998 return true;
8999 }
9000 if (ForceOptimize()) return true;
9001 if (is_native()) {
9002 // Native methods don't need to be optimized.
9003 return false;
9004 }
9005 if (is_optimizable() && (script() != Script::null())) {
9006 // Additional check needed for implicit getters.
9007 return (unoptimized_code() == Object::null()) ||
9008 (Code::Handle(ptr: unoptimized_code()).Size() <
9009 FLAG_huge_method_cutoff_in_code_size);
9010 }
9011 return false;
9012}
9013
9014void Function::SetIsOptimizable(bool value) const {
9015 ASSERT(!is_native());
9016 set_is_optimizable(value);
9017 if (!value) {
9018 set_is_inlinable(false);
9019 set_usage_counter(INT32_MIN);
9020 }
9021}
9022
9023bool Function::ForceOptimize() const {
9024 return RecognizedKindForceOptimize() || IsFfiTrampoline() ||
9025 IsTypedDataViewFactory() || IsUnmodifiableTypedDataViewFactory();
9026}
9027
9028bool Function::RecognizedKindForceOptimize() const {
9029 switch (recognized_kind()) {
9030 // Uses unboxed/untagged data not supported in unoptimized.
9031 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
9032 case MethodRecognizer::kFinalizerBase_setIsolate:
9033 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
9034 case MethodRecognizer::kFinalizerEntry_getExternalSize:
9035 case MethodRecognizer::kExtensionStreamHasListener:
9036 case MethodRecognizer::kFfiLoadInt8:
9037 case MethodRecognizer::kFfiLoadInt16:
9038 case MethodRecognizer::kFfiLoadInt32:
9039 case MethodRecognizer::kFfiLoadInt64:
9040 case MethodRecognizer::kFfiLoadUint8:
9041 case MethodRecognizer::kFfiLoadUint16:
9042 case MethodRecognizer::kFfiLoadUint32:
9043 case MethodRecognizer::kFfiLoadUint64:
9044 case MethodRecognizer::kFfiLoadFloat:
9045 case MethodRecognizer::kFfiLoadFloatUnaligned:
9046 case MethodRecognizer::kFfiLoadDouble:
9047 case MethodRecognizer::kFfiLoadDoubleUnaligned:
9048 case MethodRecognizer::kFfiLoadPointer:
9049 case MethodRecognizer::kFfiStoreInt8:
9050 case MethodRecognizer::kFfiStoreInt16:
9051 case MethodRecognizer::kFfiStoreInt32:
9052 case MethodRecognizer::kFfiStoreInt64:
9053 case MethodRecognizer::kFfiStoreUint8:
9054 case MethodRecognizer::kFfiStoreUint16:
9055 case MethodRecognizer::kFfiStoreUint32:
9056 case MethodRecognizer::kFfiStoreUint64:
9057 case MethodRecognizer::kFfiStoreFloat:
9058 case MethodRecognizer::kFfiStoreFloatUnaligned:
9059 case MethodRecognizer::kFfiStoreDouble:
9060 case MethodRecognizer::kFfiStoreDoubleUnaligned:
9061 case MethodRecognizer::kFfiStorePointer:
9062 case MethodRecognizer::kFfiFromAddress:
9063 case MethodRecognizer::kFfiGetAddress:
9064 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
9065 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
9066 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
9067 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
9068 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
9069 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
9070 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
9071 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
9072 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
9073 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
9074 case MethodRecognizer::kGetNativeField:
9075 case MethodRecognizer::kRecord_fieldNames:
9076 case MethodRecognizer::kRecord_numFields:
9077 case MethodRecognizer::kUtf8DecoderScan:
9078 case MethodRecognizer::kDouble_hashCode:
9079 // Prevent the GC from running so that the operation is atomic from
9080 // a GC point of view. Always double check implementation in
9081 // kernel_to_il.cc that no GC can happen in between the relevant IL
9082 // instructions.
9083 // TODO(https://dartbug.com/48527): Support inlining.
9084 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
9085 // Both unboxed/untagged data and atomic-to-GC operation.
9086 case MethodRecognizer::kFinalizerEntry_allocate:
9087 return true;
9088 default:
9089 return false;
9090 }
9091}
9092
9093#if !defined(DART_PRECOMPILED_RUNTIME)
9094bool Function::CanBeInlined() const {
9095 // Our force-optimized functions cannot deoptimize to an unoptimized frame.
9096 // If the instructions of the force-optimized function body get moved via
9097 // code motion, we might attempt do deoptimize a frame where the force-
9098 // optimized function has only partially finished. Since force-optimized
9099 // functions cannot deoptimize to unoptimized frames we prevent them from
9100 // being inlined (for now).
9101 if (ForceOptimize()) {
9102 if (IsFfiTrampoline()) {
9103 // We currently don't support inlining FFI trampolines. Some of them
9104 // are naturally non-inlinable because they contain a try/catch block,
9105 // but this condition is broader than strictly necessary.
9106 // The work necessary for inlining FFI trampolines is tracked by
9107 // http://dartbug.com/45055.
9108 return false;
9109 }
9110 return CompilerState::Current().is_aot();
9111 }
9112
9113 if (HasBreakpoint()) {
9114 return false;
9115 }
9116
9117 return is_inlinable();
9118}
9119#endif // !defined(DART_PRECOMPILED_RUNTIME)
9120
9121intptr_t Function::NumImplicitParameters() const {
9122 const UntaggedFunction::Kind k = kind();
9123 if (k == UntaggedFunction::kConstructor) {
9124 // Type arguments for factory; instance for generative constructor.
9125 return 1;
9126 }
9127 if ((k == UntaggedFunction::kClosureFunction) ||
9128 (k == UntaggedFunction::kImplicitClosureFunction) ||
9129 (k == UntaggedFunction::kFfiTrampoline)) {
9130 return 1; // Closure object.
9131 }
9132 if (!is_static()) {
9133 // Closure functions defined inside instance (i.e. non-static) functions are
9134 // marked as non-static, but they do not have a receiver.
9135 // Closures are handled above.
9136 ASSERT((k != UntaggedFunction::kClosureFunction) &&
9137 (k != UntaggedFunction::kImplicitClosureFunction));
9138 return 1; // Receiver.
9139 }
9140 return 0; // No implicit parameters.
9141}
9142
9143bool Function::AreValidArgumentCounts(intptr_t num_type_arguments,
9144 intptr_t num_arguments,
9145 intptr_t num_named_arguments,
9146 String* error_message) const {
9147 if ((num_type_arguments != 0) &&
9148 (num_type_arguments != NumTypeParameters())) {
9149 if (error_message != nullptr) {
9150 const intptr_t kMessageBufferSize = 64;
9151 char message_buffer[kMessageBufferSize];
9152 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9153 format: "%" Pd " type arguments passed, but %" Pd " expected",
9154 num_type_arguments, NumTypeParameters());
9155 // Allocate in old space because it can be invoked in background
9156 // optimizing compilation.
9157 *error_message = String::New(cstr: message_buffer, space: Heap::kOld);
9158 }
9159 return false; // Too many type arguments.
9160 }
9161 if (num_named_arguments > NumOptionalNamedParameters()) {
9162 if (error_message != nullptr) {
9163 const intptr_t kMessageBufferSize = 64;
9164 char message_buffer[kMessageBufferSize];
9165 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9166 format: "%" Pd " named passed, at most %" Pd " expected",
9167 num_named_arguments, NumOptionalNamedParameters());
9168 // Allocate in old space because it can be invoked in background
9169 // optimizing compilation.
9170 *error_message = String::New(cstr: message_buffer, space: Heap::kOld);
9171 }
9172 return false; // Too many named arguments.
9173 }
9174 const intptr_t num_pos_args = num_arguments - num_named_arguments;
9175 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
9176 const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params;
9177 if (num_pos_args > num_pos_params) {
9178 if (error_message != nullptr) {
9179 const intptr_t kMessageBufferSize = 64;
9180 char message_buffer[kMessageBufferSize];
9181 // Hide implicit parameters to the user.
9182 const intptr_t num_hidden_params = NumImplicitParameters();
9183 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9184 format: "%" Pd "%s passed, %s%" Pd " expected",
9185 num_pos_args - num_hidden_params,
9186 num_opt_pos_params > 0 ? " positional" : "",
9187 num_opt_pos_params > 0 ? "at most " : "",
9188 num_pos_params - num_hidden_params);
9189 // Allocate in old space because it can be invoked in background
9190 // optimizing compilation.
9191 *error_message = String::New(cstr: message_buffer, space: Heap::kOld);
9192 }
9193 return false; // Too many fixed and/or positional arguments.
9194 }
9195 if (num_pos_args < num_fixed_parameters()) {
9196 if (error_message != nullptr) {
9197 const intptr_t kMessageBufferSize = 64;
9198 char message_buffer[kMessageBufferSize];
9199 // Hide implicit parameters to the user.
9200 const intptr_t num_hidden_params = NumImplicitParameters();
9201 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9202 format: "%" Pd "%s passed, %s%" Pd " expected",
9203 num_pos_args - num_hidden_params,
9204 num_opt_pos_params > 0 ? " positional" : "",
9205 num_opt_pos_params > 0 ? "at least " : "",
9206 num_fixed_parameters() - num_hidden_params);
9207 // Allocate in old space because it can be invoked in background
9208 // optimizing compilation.
9209 *error_message = String::New(cstr: message_buffer, space: Heap::kOld);
9210 }
9211 return false; // Too few fixed and/or positional arguments.
9212 }
9213 return true;
9214}
9215
9216bool Function::AreValidArguments(intptr_t num_type_arguments,
9217 intptr_t num_arguments,
9218 const Array& argument_names,
9219 String* error_message) const {
9220 const Array& args_desc_array = Array::Handle(ptr: ArgumentsDescriptor::NewBoxed(
9221 type_args_len: num_type_arguments, num_arguments, optional_arguments_names: argument_names, space: Heap::kNew));
9222 ArgumentsDescriptor args_desc(args_desc_array);
9223 return AreValidArguments(args_desc, error_message);
9224}
9225
9226bool Function::AreValidArguments(const ArgumentsDescriptor& args_desc,
9227 String* error_message) const {
9228 const intptr_t num_type_arguments = args_desc.TypeArgsLen();
9229 const intptr_t num_arguments = args_desc.Count();
9230 const intptr_t num_named_arguments = args_desc.NamedCount();
9231
9232 if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
9233 num_named_arguments, error_message)) {
9234 return false;
9235 }
9236 // Verify that all argument names are valid parameter names.
9237 Thread* thread = Thread::Current();
9238 auto isolate_group = thread->isolate_group();
9239 Zone* zone = thread->zone();
9240 String& argument_name = String::Handle(zone);
9241 String& parameter_name = String::Handle(zone);
9242 const intptr_t num_positional_args = num_arguments - num_named_arguments;
9243 const intptr_t num_parameters = NumParameters();
9244 for (intptr_t i = 0; i < num_named_arguments; i++) {
9245 argument_name = args_desc.NameAt(i);
9246 ASSERT(argument_name.IsSymbol());
9247 bool found = false;
9248 for (intptr_t j = num_positional_args; j < num_parameters; j++) {
9249 parameter_name = ParameterNameAt(index: j);
9250 ASSERT(parameter_name.IsSymbol());
9251 if (argument_name.Equals(str: parameter_name)) {
9252 found = true;
9253 break;
9254 }
9255 }
9256 if (!found) {
9257 if (error_message != nullptr) {
9258 const intptr_t kMessageBufferSize = 64;
9259 char message_buffer[kMessageBufferSize];
9260 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9261 format: "no optional formal parameter named '%s'",
9262 argument_name.ToCString());
9263 *error_message = String::New(cstr: message_buffer);
9264 }
9265 return false;
9266 }
9267 }
9268 if (isolate_group->use_strict_null_safety_checks()) {
9269 // Verify that all required named parameters are filled.
9270 for (intptr_t j = num_parameters - NumOptionalNamedParameters();
9271 j < num_parameters; j++) {
9272 if (IsRequiredAt(index: j)) {
9273 parameter_name = ParameterNameAt(index: j);
9274 ASSERT(parameter_name.IsSymbol());
9275 bool found = false;
9276 for (intptr_t i = 0; i < num_named_arguments; i++) {
9277 argument_name = args_desc.NameAt(i);
9278 ASSERT(argument_name.IsSymbol());
9279 if (argument_name.Equals(str: parameter_name)) {
9280 found = true;
9281 break;
9282 }
9283 }
9284 if (!found) {
9285 if (error_message != nullptr) {
9286 const intptr_t kMessageBufferSize = 64;
9287 char message_buffer[kMessageBufferSize];
9288 Utils::SNPrint(str: message_buffer, size: kMessageBufferSize,
9289 format: "missing required named parameter '%s'",
9290 parameter_name.ToCString());
9291 *error_message = String::New(cstr: message_buffer);
9292 }
9293 return false;
9294 }
9295 }
9296 }
9297 }
9298 return true;
9299}
9300
9301// Retrieves the function type arguments, if any. This could be explicitly
9302// passed type from the arguments array, delayed type arguments in closures,
9303// or instantiated bounds for the type parameters if no other source for
9304// function type arguments are found.
9305static TypeArgumentsPtr RetrieveFunctionTypeArguments(
9306 Thread* thread,
9307 Zone* zone,
9308 const Function& function,
9309 const Instance& receiver,
9310 const TypeArguments& instantiator_type_args,
9311 const Array& args,
9312 const ArgumentsDescriptor& args_desc) {
9313 ASSERT(!function.IsNull());
9314
9315 const intptr_t kNumCurrentTypeArgs = function.NumTypeParameters();
9316 const intptr_t kNumParentTypeArgs = function.NumParentTypeArguments();
9317 const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs;
9318 // Non-generic functions don't receive type arguments.
9319 if (kNumTypeArgs == 0) return Object::empty_type_arguments().ptr();
9320 // Closure functions require that the receiver be provided (and is a closure).
9321 ASSERT(!function.IsClosureFunction() || receiver.IsClosure());
9322
9323 // Only closure functions should have possibly generic parents.
9324 ASSERT(function.IsClosureFunction() || kNumParentTypeArgs == 0);
9325 const auto& parent_type_args =
9326 function.IsClosureFunction()
9327 ? TypeArguments::Handle(
9328 zone, ptr: Closure::Cast(obj: receiver).function_type_arguments())
9329 : Object::empty_type_arguments();
9330 // We don't try to instantiate the parent type parameters to their bounds
9331 // if not provided or check any closed-over type arguments against the parent
9332 // type parameter bounds (since they have been type checked already).
9333 if (kNumCurrentTypeArgs == 0) return parent_type_args.ptr();
9334
9335 auto& function_type_args = TypeArguments::Handle(zone);
9336 // First check for delayed type arguments before using either provided or
9337 // default type arguments.
9338 bool has_delayed_type_args = false;
9339 if (function.IsClosureFunction()) {
9340 const auto& closure = Closure::Cast(obj: receiver);
9341 function_type_args = closure.delayed_type_arguments();
9342 has_delayed_type_args =
9343 function_type_args.ptr() != Object::empty_type_arguments().ptr();
9344 }
9345
9346 if (args_desc.TypeArgsLen() > 0) {
9347 // We should never end up here when the receiver is a closure with delayed
9348 // type arguments unless this dynamically called closure function was
9349 // retrieved directly from the closure instead of going through
9350 // DartEntry::ResolveCallable, which appropriately checks for this case.
9351 ASSERT(!has_delayed_type_args);
9352 function_type_args ^= args.At(index: 0);
9353 } else if (!has_delayed_type_args) {
9354 // We have no explicitly provided function type arguments, so instantiate
9355 // the type parameters to bounds or replace as appropriate.
9356 Function::DefaultTypeArgumentsKind kind;
9357 function_type_args = function.InstantiateToBounds(thread, kind_out: &kind);
9358 switch (kind) {
9359 case Function::DefaultTypeArgumentsKind::kInvalid:
9360 // We shouldn't hit the invalid case.
9361 UNREACHABLE();
9362 break;
9363 case Function::DefaultTypeArgumentsKind::kIsInstantiated:
9364 // Nothing left to do.
9365 break;
9366 case Function::DefaultTypeArgumentsKind::kNeedsInstantiation:
9367 function_type_args = function_type_args.InstantiateAndCanonicalizeFrom(
9368 instantiator_type_arguments: instantiator_type_args, function_type_arguments: parent_type_args);
9369 break;
9370 case Function::DefaultTypeArgumentsKind::kSharesInstantiatorTypeArguments:
9371 function_type_args = instantiator_type_args.ptr();
9372 break;
9373 case Function::DefaultTypeArgumentsKind::kSharesFunctionTypeArguments:
9374 function_type_args = parent_type_args.ptr();
9375 break;
9376 }
9377 }
9378
9379 return function_type_args.Prepend(zone, other: parent_type_args, other_length: kNumParentTypeArgs,
9380 total_length: kNumTypeArgs);
9381}
9382
9383// Retrieves the instantiator type arguments, if any, from the receiver.
9384static TypeArgumentsPtr RetrieveInstantiatorTypeArguments(
9385 Zone* zone,
9386 const Function& function,
9387 const Instance& receiver) {
9388 if (function.IsClosureFunction()) {
9389 ASSERT(receiver.IsClosure());
9390 const auto& closure = Closure::Cast(obj: receiver);
9391 return closure.instantiator_type_arguments();
9392 }
9393 if (!receiver.IsNull()) {
9394 const auto& cls = Class::Handle(zone, ptr: receiver.clazz());
9395 if (cls.NumTypeArguments() > 0) {
9396 return receiver.GetTypeArguments();
9397 }
9398 }
9399 return Object::empty_type_arguments().ptr();
9400}
9401
9402ObjectPtr Function::DoArgumentTypesMatch(
9403 const Array& args,
9404 const ArgumentsDescriptor& args_desc) const {
9405#if defined(DART_PRECOMPILED_RUNTIME)
9406 if (signature() == FunctionType::null()) {
9407 // Precompiler deleted signature because of missing entry point pragma.
9408 return EntryPointMemberInvocationError(*this);
9409 }
9410#endif
9411 Thread* thread = Thread::Current();
9412 Zone* zone = thread->zone();
9413
9414 auto& receiver = Instance::Handle(zone);
9415 if (IsClosureFunction() || HasThisParameter()) {
9416 receiver ^= args.At(index: args_desc.FirstArgIndex());
9417 }
9418 const auto& instantiator_type_arguments = TypeArguments::Handle(
9419 zone, ptr: RetrieveInstantiatorTypeArguments(zone, function: *this, receiver));
9420 return Function::DoArgumentTypesMatch(args, arg_names: args_desc,
9421 instantiator_type_args: instantiator_type_arguments);
9422}
9423
9424ObjectPtr Function::DoArgumentTypesMatch(
9425 const Array& args,
9426 const ArgumentsDescriptor& args_desc,
9427 const TypeArguments& instantiator_type_arguments) const {
9428#if defined(DART_PRECOMPILED_RUNTIME)
9429 if (signature() == FunctionType::null()) {
9430 // Precompiler deleted signature because of missing entry point pragma.
9431 return EntryPointMemberInvocationError(*this);
9432 }
9433#endif
9434 Thread* thread = Thread::Current();
9435 Zone* zone = thread->zone();
9436
9437 auto& receiver = Instance::Handle(zone);
9438 if (IsClosureFunction() || HasThisParameter()) {
9439 receiver ^= args.At(index: args_desc.FirstArgIndex());
9440 }
9441
9442 const auto& function_type_arguments = TypeArguments::Handle(
9443 zone, ptr: RetrieveFunctionTypeArguments(thread, zone, function: *this, receiver,
9444 instantiator_type_args: instantiator_type_arguments, args,
9445 args_desc));
9446 return Function::DoArgumentTypesMatch(
9447 args, arg_names: args_desc, instantiator_type_args: instantiator_type_arguments, function_type_args: function_type_arguments);
9448}
9449
9450ObjectPtr Function::DoArgumentTypesMatch(
9451 const Array& args,
9452 const ArgumentsDescriptor& args_desc,
9453 const TypeArguments& instantiator_type_arguments,
9454 const TypeArguments& function_type_arguments) const {
9455#if defined(DART_PRECOMPILED_RUNTIME)
9456 if (signature() == FunctionType::null()) {
9457 // Precompiler deleted signature because of missing entry point pragma.
9458 return EntryPointMemberInvocationError(*this);
9459 }
9460#endif
9461 Thread* thread = Thread::Current();
9462 Zone* zone = thread->zone();
9463
9464 // Perform any non-covariant bounds checks on the provided function type
9465 // arguments to make sure they are appropriate subtypes of the bounds.
9466 const intptr_t kNumLocalTypeArgs = NumTypeParameters();
9467 if (kNumLocalTypeArgs > 0) {
9468 const intptr_t kNumParentTypeArgs = NumParentTypeArguments();
9469 ASSERT(function_type_arguments.HasCount(kNumParentTypeArgs +
9470 kNumLocalTypeArgs));
9471 const auto& params = TypeParameters::Handle(zone, ptr: type_parameters());
9472 // No checks are needed if all bounds are dynamic.
9473 if (!params.AllDynamicBounds()) {
9474 auto& param = AbstractType::Handle(zone);
9475 auto& bound = AbstractType::Handle(zone);
9476 for (intptr_t i = 0; i < kNumLocalTypeArgs; i++) {
9477 bound = params.BoundAt(index: i);
9478 // Only perform non-covariant checks where the bound is not
9479 // the top type.
9480 if (params.IsGenericCovariantImplAt(index: i) ||
9481 bound.IsTopTypeForSubtyping()) {
9482 continue;
9483 }
9484 param = TypeParameterAt(index: i);
9485 if (!AbstractType::InstantiateAndTestSubtype(
9486 subtype: &param, supertype: &bound, instantiator_type_args: instantiator_type_arguments,
9487 function_type_args: function_type_arguments)) {
9488 const auto& names = Array::Handle(zone, ptr: params.names());
9489 auto& name = String::Handle(zone);
9490 name ^= names.At(index: i);
9491 return Error::RawCast(
9492 raw: ThrowTypeError(token_pos: token_pos(), src_value: param, dst_type: bound, dst_name: name));
9493 }
9494 }
9495 }
9496 } else {
9497 ASSERT(function_type_arguments.HasCount(NumParentTypeArguments()));
9498 }
9499
9500 AbstractType& type = AbstractType::Handle(zone);
9501 Instance& argument = Instance::Handle(zone);
9502
9503 auto check_argument = [](const Instance& argument, const AbstractType& type,
9504 const TypeArguments& instantiator_type_args,
9505 const TypeArguments& function_type_args) -> bool {
9506 // If the argument type is the top type, no need to check.
9507 if (type.IsTopTypeForSubtyping()) return true;
9508 if (argument.IsNull()) {
9509 return Instance::NullIsAssignableTo(other: type, other_instantiator_type_arguments: instantiator_type_args,
9510 other_function_type_arguments: function_type_args);
9511 }
9512 return argument.IsAssignableTo(other: type, other_instantiator_type_arguments: instantiator_type_args,
9513 other_function_type_arguments: function_type_args);
9514 };
9515
9516 // Check types of the provided arguments against the expected parameter types.
9517 const intptr_t arg_offset = args_desc.FirstArgIndex();
9518 // Only check explicit arguments.
9519 const intptr_t arg_start = arg_offset + NumImplicitParameters();
9520 const intptr_t end_positional_args = arg_offset + args_desc.PositionalCount();
9521 for (intptr_t arg_index = arg_start; arg_index < end_positional_args;
9522 ++arg_index) {
9523 argument ^= args.At(index: arg_index);
9524 // Adjust for type arguments when they're present.
9525 const intptr_t param_index = arg_index - arg_offset;
9526 type = ParameterTypeAt(index: param_index);
9527 if (!check_argument(argument, type, instantiator_type_arguments,
9528 function_type_arguments)) {
9529 auto& name = String::Handle(zone, ptr: ParameterNameAt(index: param_index));
9530 if (!type.IsInstantiated()) {
9531 type =
9532 type.InstantiateFrom(instantiator_type_arguments,
9533 function_type_arguments, num_free_fun_type_params: kAllFree, space: Heap::kNew);
9534 }
9535 return ThrowTypeError(token_pos: token_pos(), src_value: argument, dst_type: type, dst_name: name);
9536 }
9537 }
9538
9539 const intptr_t num_named_arguments = args_desc.NamedCount();
9540 if (num_named_arguments == 0) {
9541 return Error::null();
9542 }
9543
9544 const int num_parameters = NumParameters();
9545 const int num_fixed_params = num_fixed_parameters();
9546
9547 String& argument_name = String::Handle(zone);
9548 String& parameter_name = String::Handle(zone);
9549
9550 // Check types of named arguments against expected parameter type.
9551 for (intptr_t named_index = 0; named_index < num_named_arguments;
9552 named_index++) {
9553 argument_name = args_desc.NameAt(i: named_index);
9554 ASSERT(argument_name.IsSymbol());
9555 argument ^= args.At(index: arg_offset + args_desc.PositionAt(i: named_index));
9556
9557 // Try to find the named parameter that matches the provided argument.
9558 // Even when annotated with @required, named parameters are still stored
9559 // as if they were optional and so come after the fixed parameters.
9560 // Currently O(n^2) as there's no guarantee from either the CFE or the
9561 // VM that named parameters and named arguments are sorted in the same way.
9562 intptr_t param_index = num_fixed_params;
9563 for (; param_index < num_parameters; param_index++) {
9564 parameter_name = ParameterNameAt(index: param_index);
9565 ASSERT(parameter_name.IsSymbol());
9566
9567 if (!parameter_name.Equals(str: argument_name)) continue;
9568
9569 type = ParameterTypeAt(index: param_index);
9570 if (!check_argument(argument, type, instantiator_type_arguments,
9571 function_type_arguments)) {
9572 auto& name = String::Handle(zone, ptr: ParameterNameAt(index: param_index));
9573 if (!type.IsInstantiated()) {
9574 type = type.InstantiateFrom(instantiator_type_arguments,
9575 function_type_arguments, num_free_fun_type_params: kAllFree,
9576 space: Heap::kNew);
9577 }
9578 return ThrowTypeError(token_pos: token_pos(), src_value: argument, dst_type: type, dst_name: name);
9579 }
9580 break;
9581 }
9582 // Only should fail if AreValidArguments returns a false positive.
9583 ASSERT(param_index < num_parameters);
9584 }
9585 return Error::null();
9586}
9587
9588// Helper allocating a C string buffer in the zone, printing the fully qualified
9589// name of a function in it, and replacing ':' by '_' to make sure the
9590// constructed name is a valid C++ identifier for debugging purpose.
9591// Set 'chars' to allocated buffer and return number of written characters.
9592
9593enum QualifiedFunctionLibKind {
9594 kQualifiedFunctionLibKindLibUrl,
9595 kQualifiedFunctionLibKindLibName
9596};
9597
9598static intptr_t ConstructFunctionFullyQualifiedCString(
9599 const Function& function,
9600 char** chars,
9601 intptr_t reserve_len,
9602 bool with_lib,
9603 QualifiedFunctionLibKind lib_kind) {
9604 Zone* zone = Thread::Current()->zone();
9605 const char* name = String::Handle(zone, ptr: function.name()).ToCString();
9606 const char* function_format = (reserve_len == 0) ? "%s" : "%s_";
9607 reserve_len += Utils::SNPrint(str: nullptr, size: 0, format: function_format, name);
9608 const Function& parent = Function::Handle(zone, ptr: function.parent_function());
9609 intptr_t written = 0;
9610 if (parent.IsNull()) {
9611 const Class& function_class = Class::Handle(zone, ptr: function.Owner());
9612 ASSERT(!function_class.IsNull());
9613 const char* class_name =
9614 String::Handle(zone, ptr: function_class.Name()).ToCString();
9615 ASSERT(class_name != nullptr);
9616 const char* library_name = nullptr;
9617 const char* lib_class_format = nullptr;
9618 if (with_lib) {
9619 const Library& library = Library::Handle(zone, ptr: function_class.library());
9620 ASSERT(!library.IsNull());
9621 switch (lib_kind) {
9622 case kQualifiedFunctionLibKindLibUrl:
9623 library_name = String::Handle(zone, ptr: library.url()).ToCString();
9624 break;
9625 case kQualifiedFunctionLibKindLibName:
9626 library_name = String::Handle(zone, ptr: library.name()).ToCString();
9627 break;
9628 default:
9629 UNREACHABLE();
9630 }
9631 ASSERT(library_name != nullptr);
9632 lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_";
9633 } else {
9634 library_name = "";
9635 lib_class_format = "%s%s.";
9636 }
9637 reserve_len +=
9638 Utils::SNPrint(str: nullptr, size: 0, format: lib_class_format, library_name, class_name);
9639 ASSERT(chars != nullptr);
9640 *chars = zone->Alloc<char>(len: reserve_len + 1);
9641 written = Utils::SNPrint(str: *chars, size: reserve_len + 1, format: lib_class_format,
9642 library_name, class_name);
9643 } else {
9644 written = ConstructFunctionFullyQualifiedCString(function: parent, chars, reserve_len,
9645 with_lib, lib_kind);
9646 }
9647 ASSERT(*chars != nullptr);
9648 char* next = *chars + written;
9649 written += Utils::SNPrint(str: next, size: reserve_len + 1, format: function_format, name);
9650 // Replace ":" with "_".
9651 while (true) {
9652 next = strchr(s: next, c: ':');
9653 if (next == nullptr) break;
9654 *next = '_';
9655 }
9656 return written;
9657}
9658
9659const char* Function::ToFullyQualifiedCString() const {
9660 char* chars = nullptr;
9661 ConstructFunctionFullyQualifiedCString(function: *this, chars: &chars, reserve_len: 0, with_lib: true,
9662 lib_kind: kQualifiedFunctionLibKindLibUrl);
9663 return chars;
9664}
9665
9666const char* Function::ToLibNamePrefixedQualifiedCString() const {
9667 char* chars = nullptr;
9668 ConstructFunctionFullyQualifiedCString(function: *this, chars: &chars, reserve_len: 0, with_lib: true,
9669 lib_kind: kQualifiedFunctionLibKindLibName);
9670 return chars;
9671}
9672
9673const char* Function::ToQualifiedCString() const {
9674 char* chars = nullptr;
9675 ConstructFunctionFullyQualifiedCString(function: *this, chars: &chars, reserve_len: 0, with_lib: false,
9676 lib_kind: kQualifiedFunctionLibKindLibUrl);
9677 return chars;
9678}
9679
9680AbstractTypePtr FunctionType::InstantiateFrom(
9681 const TypeArguments& instantiator_type_arguments,
9682 const TypeArguments& function_type_arguments,
9683 intptr_t num_free_fun_type_params,
9684 Heap::Space space,
9685 FunctionTypeMapping* function_type_mapping,
9686 intptr_t num_parent_type_args_adjustment) const {
9687 ASSERT(IsFinalized());
9688 Zone* zone = Thread::Current()->zone();
9689 const intptr_t num_parent_type_args = NumParentTypeArguments();
9690 bool delete_type_parameters = false;
9691 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
9692 // See the comment on kCurrentAndEnclosingFree to understand why we don't
9693 // adjust 'num_free_fun_type_params' downward in this case.
9694 num_free_fun_type_params = kAllFree;
9695 delete_type_parameters = true;
9696 } else {
9697 ASSERT(!IsInstantiated(kAny, num_free_fun_type_params));
9698 // We only consider the function type parameters declared by the parents
9699 // of this signature function as free.
9700 if (num_parent_type_args < num_free_fun_type_params) {
9701 num_free_fun_type_params = num_parent_type_args;
9702 }
9703 }
9704
9705 // The number of parent type parameters that remain uninstantiated.
9706 const intptr_t remaining_parent_type_params =
9707 num_free_fun_type_params < num_parent_type_args
9708 ? num_parent_type_args - num_free_fun_type_params
9709 : 0;
9710
9711 // Adjust number of parent type arguments for all nested substituted types.
9712 num_parent_type_args_adjustment =
9713 remaining_parent_type_params +
9714 (delete_type_parameters ? 0 : NumTypeParameters());
9715
9716 FunctionType& sig = FunctionType::Handle(
9717 ptr: FunctionType::New(num_parent_type_arguments: remaining_parent_type_params, nullability: nullability(), space));
9718 AbstractType& type = AbstractType::Handle(zone);
9719
9720 FunctionTypeMapping scope(zone, &function_type_mapping, *this, sig);
9721
9722 // Copy the type parameters and instantiate their bounds and defaults.
9723 if (!delete_type_parameters) {
9724 const TypeParameters& type_params =
9725 TypeParameters::Handle(zone, ptr: type_parameters());
9726 if (!type_params.IsNull()) {
9727 const TypeParameters& sig_type_params =
9728 TypeParameters::Handle(zone, ptr: TypeParameters::New());
9729 // No need to set names that are ignored in a signature, however, the
9730 // length of the names array defines the number of type parameters.
9731 sig_type_params.set_names(Array::Handle(zone, ptr: type_params.names()));
9732 sig_type_params.set_flags(Array::Handle(zone, ptr: type_params.flags()));
9733 sig.SetTypeParameters(sig_type_params);
9734 TypeArguments& type_args = TypeArguments::Handle(zone);
9735 type_args = type_params.bounds();
9736 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9737 type_args = type_args.InstantiateFrom(
9738 instantiator_type_arguments, function_type_arguments,
9739 num_free_fun_type_params, space, function_type_mapping,
9740 num_parent_type_args_adjustment);
9741 }
9742 sig_type_params.set_bounds(type_args);
9743 type_args = type_params.defaults();
9744 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9745 type_args = type_args.InstantiateFrom(
9746 instantiator_type_arguments, function_type_arguments,
9747 num_free_fun_type_params, space, function_type_mapping,
9748 num_parent_type_args_adjustment);
9749 }
9750 sig_type_params.set_defaults(type_args);
9751 }
9752 }
9753
9754 type = result_type();
9755 if (!type.IsInstantiated()) {
9756 type = type.InstantiateFrom(
9757 instantiator_type_arguments, function_type_arguments,
9758 num_free_fun_type_params, space, function_type_mapping,
9759 num_parent_type_args_adjustment);
9760 // A returned null type indicates a failed instantiation in dead code that
9761 // must be propagated up to the caller, the optimizing compiler.
9762 if (type.IsNull()) {
9763 return FunctionType::null();
9764 }
9765 }
9766 sig.set_result_type(type);
9767 const intptr_t num_params = NumParameters();
9768 sig.set_num_implicit_parameters(num_implicit_parameters());
9769 sig.set_num_fixed_parameters(num_fixed_parameters());
9770 sig.SetNumOptionalParameters(num_optional_parameters: NumOptionalParameters(),
9771 are_optional_positional: HasOptionalPositionalParameters());
9772 sig.set_parameter_types(Array::Handle(ptr: Array::New(len: num_params, space)));
9773 for (intptr_t i = 0; i < num_params; i++) {
9774 type = ParameterTypeAt(index: i);
9775 if (!type.IsInstantiated()) {
9776 type = type.InstantiateFrom(
9777 instantiator_type_arguments, function_type_arguments,
9778 num_free_fun_type_params, space, function_type_mapping,
9779 num_parent_type_args_adjustment);
9780 // A returned null type indicates a failed instantiation in dead code that
9781 // must be propagated up to the caller, the optimizing compiler.
9782 if (type.IsNull()) {
9783 return FunctionType::null();
9784 }
9785 }
9786 sig.SetParameterTypeAt(index: i, value: type);
9787 }
9788 sig.set_named_parameter_names(Array::Handle(zone, ptr: named_parameter_names()));
9789
9790 if (delete_type_parameters) {
9791 ASSERT(sig.IsInstantiated(kFunctions));
9792 }
9793
9794 sig.SetIsFinalized();
9795
9796 // Canonicalization is not part of instantiation.
9797 return sig.ptr();
9798}
9799
9800AbstractTypePtr FunctionType::UpdateFunctionTypes(
9801 intptr_t num_parent_type_args_adjustment,
9802 intptr_t num_free_fun_type_params,
9803 Heap::Space space,
9804 FunctionTypeMapping* function_type_mapping) const {
9805 ASSERT(num_parent_type_args_adjustment >= 0);
9806 ASSERT(IsFinalized());
9807 Zone* zone = Thread::Current()->zone();
9808
9809 const intptr_t old_num_parent_type_args = NumParentTypeArguments();
9810 // From now on, adjust all type parameter types
9811 // which belong to this or nested function types.
9812 if (num_free_fun_type_params > old_num_parent_type_args) {
9813 num_free_fun_type_params = old_num_parent_type_args;
9814 }
9815
9816 FunctionType& new_type = FunctionType::Handle(
9817 zone, ptr: FunctionType::New(
9818 num_parent_type_arguments: NumParentTypeArguments() + num_parent_type_args_adjustment,
9819 nullability: nullability(), space));
9820 AbstractType& type = AbstractType::Handle(zone);
9821
9822 FunctionTypeMapping scope(zone, &function_type_mapping, *this, new_type);
9823
9824 const TypeParameters& type_params =
9825 TypeParameters::Handle(zone, ptr: type_parameters());
9826 if (!type_params.IsNull()) {
9827 const TypeParameters& new_type_params =
9828 TypeParameters::Handle(zone, ptr: TypeParameters::New());
9829 // No need to set names that are ignored in a signature, however, the
9830 // length of the names array defines the number of type parameters.
9831 new_type_params.set_names(Array::Handle(zone, ptr: type_params.names()));
9832 new_type_params.set_flags(Array::Handle(zone, ptr: type_params.flags()));
9833 TypeArguments& type_args = TypeArguments::Handle(zone);
9834 type_args = type_params.bounds();
9835 if (!type_args.IsNull()) {
9836 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
9837 num_free_fun_type_params, space,
9838 function_type_mapping);
9839 }
9840 new_type_params.set_bounds(type_args);
9841 type_args = type_params.defaults();
9842 if (!type_args.IsNull()) {
9843 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
9844 num_free_fun_type_params, space,
9845 function_type_mapping);
9846 }
9847 new_type_params.set_defaults(type_args);
9848 new_type.SetTypeParameters(new_type_params);
9849 }
9850
9851 type = result_type();
9852 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
9853 num_free_fun_type_params, space,
9854 function_type_mapping);
9855 new_type.set_result_type(type);
9856
9857 const intptr_t num_params = NumParameters();
9858 new_type.set_num_implicit_parameters(num_implicit_parameters());
9859 new_type.set_num_fixed_parameters(num_fixed_parameters());
9860 new_type.SetNumOptionalParameters(num_optional_parameters: NumOptionalParameters(),
9861 are_optional_positional: HasOptionalPositionalParameters());
9862 new_type.set_parameter_types(Array::Handle(ptr: Array::New(len: num_params, space)));
9863 for (intptr_t i = 0; i < num_params; i++) {
9864 type = ParameterTypeAt(index: i);
9865 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
9866 num_free_fun_type_params, space,
9867 function_type_mapping);
9868 new_type.SetParameterTypeAt(index: i, value: type);
9869 }
9870 new_type.set_named_parameter_names(
9871 Array::Handle(zone, ptr: named_parameter_names()));
9872 new_type.SetIsFinalized();
9873
9874 return new_type.ptr();
9875}
9876
9877// Checks if the type of the specified parameter of this signature is a
9878// supertype of the type of the specified parameter of the other signature
9879// (i.e. check parameter contravariance).
9880// Note that types marked as covariant are already dealt with in the front-end.
9881bool FunctionType::IsContravariantParameter(
9882 intptr_t parameter_position,
9883 const FunctionType& other,
9884 intptr_t other_parameter_position,
9885 Heap::Space space,
9886 FunctionTypeMapping* function_type_equivalence) const {
9887 const AbstractType& param_type =
9888 AbstractType::Handle(ptr: ParameterTypeAt(index: parameter_position));
9889 if (param_type.IsTopTypeForSubtyping()) {
9890 return true;
9891 }
9892 const AbstractType& other_param_type =
9893 AbstractType::Handle(ptr: other.ParameterTypeAt(index: other_parameter_position));
9894 return other_param_type.IsSubtypeOf(other: param_type, space,
9895 function_type_equivalence);
9896}
9897
9898bool FunctionType::HasSameTypeParametersAndBounds(
9899 const FunctionType& other,
9900 TypeEquality kind,
9901 FunctionTypeMapping* function_type_equivalence) const {
9902 Zone* const zone = Thread::Current()->zone();
9903 TRACE_TYPE_CHECKS_VERBOSE(
9904 " FunctionType::HasSameTypeParametersAndBounds(%s, %s)\n", ToCString(),
9905 other.ToCString());
9906
9907 const intptr_t num_type_params = NumTypeParameters();
9908 if (num_type_params != other.NumTypeParameters()) {
9909 TRACE_TYPE_CHECKS_VERBOSE(
9910 " - result: false (number of type parameters)\n");
9911 return false;
9912 }
9913 if (num_type_params > 0) {
9914 const TypeParameters& type_params =
9915 TypeParameters::Handle(zone, ptr: type_parameters());
9916 ASSERT(!type_params.IsNull());
9917 const TypeParameters& other_type_params =
9918 TypeParameters::Handle(zone, ptr: other.type_parameters());
9919 ASSERT(!other_type_params.IsNull());
9920 if (kind == TypeEquality::kInSubtypeTest) {
9921 if (!type_params.AllDynamicBounds() ||
9922 !other_type_params.AllDynamicBounds()) {
9923 AbstractType& bound = AbstractType::Handle(zone);
9924 AbstractType& other_bound = AbstractType::Handle(zone);
9925 for (intptr_t i = 0; i < num_type_params; i++) {
9926 bound = type_params.BoundAt(index: i);
9927 other_bound = other_type_params.BoundAt(index: i);
9928 // Bounds that are mutual subtypes are considered equal.
9929 if (!bound.IsSubtypeOf(other: other_bound, space: Heap::kOld,
9930 function_type_equivalence) ||
9931 !other_bound.IsSubtypeOf(other: bound, space: Heap::kOld,
9932 function_type_equivalence)) {
9933 TRACE_TYPE_CHECKS_VERBOSE(
9934 " - result: false (bounds are not mutual subtypes)\n");
9935 return false;
9936 }
9937 }
9938 }
9939 } else {
9940 if (NumParentTypeArguments() != other.NumParentTypeArguments()) {
9941 TRACE_TYPE_CHECKS_VERBOSE(
9942 " - result: false (mismatch in number of type arguments)\n");
9943 return false;
9944 }
9945 const TypeArguments& bounds =
9946 TypeArguments::Handle(zone, ptr: type_params.bounds());
9947 const TypeArguments& other_bounds =
9948 TypeArguments::Handle(zone, ptr: other_type_params.bounds());
9949 if (!bounds.IsEquivalent(other: other_bounds, kind, function_type_equivalence)) {
9950 TRACE_TYPE_CHECKS_VERBOSE(
9951 " - result: false (bounds are not equivalent)\n");
9952 return false;
9953 }
9954 if (kind == TypeEquality::kCanonical) {
9955 // Compare default arguments.
9956 const TypeArguments& defaults =
9957 TypeArguments::Handle(zone, ptr: type_params.defaults());
9958 const TypeArguments& other_defaults =
9959 TypeArguments::Handle(zone, ptr: other_type_params.defaults());
9960 if (defaults.IsNull()) {
9961 if (!other_defaults.IsNull()) {
9962 TRACE_TYPE_CHECKS_VERBOSE(
9963 " - result: false (mismatch in defaults)\n");
9964 return false;
9965 }
9966 } else if (!defaults.IsEquivalent(other: other_defaults, kind,
9967 function_type_equivalence)) {
9968 TRACE_TYPE_CHECKS_VERBOSE(
9969 " - result: false (default types are not equivalent)\n");
9970 return false;
9971 }
9972 }
9973 }
9974 if (kind != TypeEquality::kInSubtypeTest) {
9975 // Compare flags (IsGenericCovariantImpl).
9976 if (!Array::Equals(a: type_params.flags(), b: other_type_params.flags())) {
9977 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (flags are not equal)\n");
9978 return false;
9979 }
9980 }
9981 }
9982 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
9983 return true;
9984}
9985
9986bool FunctionType::IsSubtypeOf(
9987 const FunctionType& other,
9988 Heap::Space space,
9989 FunctionTypeMapping* function_type_equivalence) const {
9990 TRACE_TYPE_CHECKS_VERBOSE(" FunctionType::IsSubtypeOf(%s, %s)\n",
9991 ToCString(), other.ToCString());
9992 const intptr_t num_fixed_params = num_fixed_parameters();
9993 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
9994 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
9995 const intptr_t other_num_fixed_params = other.num_fixed_parameters();
9996 const intptr_t other_num_opt_pos_params =
9997 other.NumOptionalPositionalParameters();
9998 const intptr_t other_num_opt_named_params =
9999 other.NumOptionalNamedParameters();
10000 // This signature requires the same arguments or less and accepts the same
10001 // arguments or more. We can ignore implicit parameters.
10002 const intptr_t num_ignored_params = num_implicit_parameters();
10003 const intptr_t other_num_ignored_params = other.num_implicit_parameters();
10004 if (((num_fixed_params - num_ignored_params) >
10005 (other_num_fixed_params - other_num_ignored_params)) ||
10006 ((num_fixed_params - num_ignored_params + num_opt_pos_params) <
10007 (other_num_fixed_params - other_num_ignored_params +
10008 other_num_opt_pos_params)) ||
10009 (num_opt_named_params < other_num_opt_named_params)) {
10010 TRACE_TYPE_CHECKS_VERBOSE(
10011 " - result: false (mismatch in number of parameters)\n");
10012 return false;
10013 }
10014 Thread* thread = Thread::Current();
10015 Zone* zone = thread->zone();
10016 auto isolate_group = thread->isolate_group();
10017 FunctionTypeMapping scope(zone, &function_type_equivalence, *this, other);
10018
10019 // Check the type parameters and bounds of generic functions.
10020 if (!HasSameTypeParametersAndBounds(other, kind: TypeEquality::kInSubtypeTest,
10021 function_type_equivalence)) {
10022 TRACE_TYPE_CHECKS_VERBOSE(
10023 " - result: false (mismatch in type parameters)\n");
10024 return false;
10025 }
10026 // Check the result type.
10027 const AbstractType& other_res_type =
10028 AbstractType::Handle(zone, ptr: other.result_type());
10029 // 'void Function()' is a subtype of 'Object Function()'.
10030 if (!other_res_type.IsTopTypeForSubtyping()) {
10031 const AbstractType& res_type = AbstractType::Handle(zone, ptr: result_type());
10032 if (!res_type.IsSubtypeOf(other: other_res_type, space,
10033 function_type_equivalence)) {
10034 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (result type)\n");
10035 return false;
10036 }
10037 }
10038 // Check the types of fixed and optional positional parameters.
10039 for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params +
10040 other_num_opt_pos_params);
10041 i++) {
10042 if (!IsContravariantParameter(parameter_position: i + num_ignored_params, other,
10043 other_parameter_position: i + other_num_ignored_params, space,
10044 function_type_equivalence)) {
10045 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (parameter type)\n");
10046 return false;
10047 }
10048 }
10049 // Check that for each optional named parameter of type T of the other
10050 // function type, there exists an optional named parameter of this function
10051 // type with an identical name and with a type S that is a supertype of T.
10052 // Note that SetParameterNameAt() guarantees that names are symbols, so we
10053 // can compare their raw pointers.
10054 const int num_params = num_fixed_params + num_opt_named_params;
10055 const int other_num_params =
10056 other_num_fixed_params + other_num_opt_named_params;
10057 bool found_param_name;
10058 String& other_param_name = String::Handle(zone);
10059 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10060 other_param_name = other.ParameterNameAt(index: i);
10061 ASSERT(other_param_name.IsSymbol());
10062 found_param_name = false;
10063 for (intptr_t j = num_fixed_params; j < num_params; j++) {
10064 ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
10065 if (ParameterNameAt(index: j) == other_param_name.ptr()) {
10066 found_param_name = true;
10067 if (!IsContravariantParameter(parameter_position: j, other, other_parameter_position: i, space,
10068 function_type_equivalence)) {
10069 TRACE_TYPE_CHECKS_VERBOSE(
10070 " - result: false (optional parameter type)\n");
10071 return false;
10072 }
10073 break;
10074 }
10075 }
10076 if (!found_param_name) {
10077 TRACE_TYPE_CHECKS_VERBOSE(
10078 " - result: false (named parameter not found)\n");
10079 return false;
10080 }
10081 }
10082 if (isolate_group->use_strict_null_safety_checks()) {
10083 // Check that for each required named parameter in this function, there's a
10084 // corresponding required named parameter in the other function.
10085 String& param_name = other_param_name;
10086 for (intptr_t j = num_params - num_opt_named_params; j < num_params; j++) {
10087 if (IsRequiredAt(index: j)) {
10088 param_name = ParameterNameAt(index: j);
10089 ASSERT(param_name.IsSymbol());
10090 bool found = false;
10091 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10092 ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol());
10093 if (other.ParameterNameAt(index: i) == param_name.ptr()) {
10094 found = true;
10095 if (!other.IsRequiredAt(index: i)) {
10096 TRACE_TYPE_CHECKS_VERBOSE(
10097 " - result: false (mismatch in required named "
10098 "parameters)\n");
10099 return false;
10100 }
10101 }
10102 }
10103 if (!found) {
10104 TRACE_TYPE_CHECKS_VERBOSE(
10105 " - result: false (required named parameter not found)\n");
10106 return false;
10107 }
10108 }
10109 }
10110 }
10111 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
10112 return true;
10113}
10114
10115// The compiler generates an implicit constructor if a class definition
10116// does not contain an explicit constructor or factory. The implicit
10117// constructor has the same token position as the owner class.
10118bool Function::IsImplicitConstructor() const {
10119 return IsGenerativeConstructor() && (token_pos() == end_token_pos());
10120}
10121
10122bool Function::IsImplicitStaticClosureFunction(FunctionPtr func) {
10123 NoSafepointScope no_safepoint;
10124 uint32_t kind_tag = func->untag()->kind_tag_.load(order: std::memory_order_relaxed);
10125 return (KindBits::decode(value: kind_tag) ==
10126 UntaggedFunction::kImplicitClosureFunction) &&
10127 StaticBit::decode(value: kind_tag);
10128}
10129
10130FunctionPtr Function::New(Heap::Space space) {
10131 ASSERT(Object::function_class() != Class::null());
10132 return Object::Allocate<Function>(space);
10133}
10134
10135FunctionPtr Function::New(const FunctionType& signature,
10136 const String& name,
10137 UntaggedFunction::Kind kind,
10138 bool is_static,
10139 bool is_const,
10140 bool is_abstract,
10141 bool is_external,
10142 bool is_native,
10143 const Object& owner,
10144 TokenPosition token_pos,
10145 Heap::Space space) {
10146 ASSERT(!owner.IsNull());
10147 ASSERT(!signature.IsNull());
10148 const Function& result = Function::Handle(ptr: Function::New(space));
10149 result.set_kind_tag(0);
10150 result.set_packed_fields(0);
10151 result.set_name(name);
10152 result.set_kind_tag(0); // Ensure determinism of uninitialized bits.
10153 result.set_kind(kind);
10154 result.set_recognized_kind(MethodRecognizer::kUnknown);
10155 result.set_modifier(UntaggedFunction::kNoModifier);
10156 result.set_is_static(is_static);
10157 result.set_is_const(is_const);
10158 result.set_is_abstract(is_abstract);
10159 result.set_is_external(is_external);
10160 result.set_is_native(is_native);
10161 result.set_is_reflectable(true); // Will be computed later.
10162 result.set_is_visible(true); // Will be computed later.
10163 result.set_is_debuggable(true); // Will be computed later.
10164 result.set_is_intrinsic(false);
10165 result.set_has_pragma(false);
10166 result.set_is_polymorphic_target(false);
10167 result.set_is_synthetic(false);
10168 NOT_IN_PRECOMPILED(result.set_state_bits(0));
10169 result.set_owner(owner);
10170 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
10171 NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos));
10172 NOT_IN_PRECOMPILED(result.set_usage_counter(0));
10173 NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0));
10174 NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0));
10175 NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0));
10176 NOT_IN_PRECOMPILED(result.set_inlining_depth(0));
10177 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
10178 result.set_is_optimizable(is_native ? false : true);
10179 result.set_is_inlinable(true);
10180 result.reset_unboxed_parameters_and_return();
10181 result.SetInstructionsSafe(StubCode::LazyCompile());
10182
10183 // See Function::set_data() for more information.
10184 if (kind == UntaggedFunction::kClosureFunction ||
10185 kind == UntaggedFunction::kImplicitClosureFunction) {
10186 ASSERT(space == Heap::kOld);
10187 const ClosureData& data = ClosureData::Handle(ptr: ClosureData::New());
10188 data.set_awaiter_link({});
10189 result.set_data(data);
10190 } else if (kind == UntaggedFunction::kFfiTrampoline) {
10191 const FfiTrampolineData& data =
10192 FfiTrampolineData::Handle(ptr: FfiTrampolineData::New());
10193 result.set_data(data);
10194 } else if (is_native) {
10195 const auto& data =
10196 Array::Handle(ptr: Array::New(len: NativeFunctionData::kLength, space: Heap::kOld));
10197 result.set_data(data);
10198 } else {
10199 // Functions other than signature functions have no reason to be allocated
10200 // in new space.
10201 ASSERT(space == Heap::kOld);
10202 }
10203
10204 // Force-optimized functions are not debuggable because they cannot
10205 // deoptimize.
10206 if (result.ForceOptimize()) {
10207 result.set_is_debuggable(false);
10208 }
10209 signature.set_num_implicit_parameters(result.NumImplicitParameters());
10210 result.SetSignature(signature);
10211 NOT_IN_PRECOMPILED(
10212 result.set_positional_parameter_names(Object::empty_array()));
10213 return result.ptr();
10214}
10215
10216FunctionPtr Function::NewClosureFunctionWithKind(UntaggedFunction::Kind kind,
10217 const String& name,
10218 const Function& parent,
10219 bool is_static,
10220 TokenPosition token_pos,
10221 const Object& owner) {
10222 ASSERT((kind == UntaggedFunction::kClosureFunction) ||
10223 (kind == UntaggedFunction::kImplicitClosureFunction));
10224 ASSERT(!parent.IsNull());
10225 ASSERT(!owner.IsNull());
10226 const FunctionType& signature = FunctionType::Handle(ptr: FunctionType::New(
10227 num_parent_type_arguments: kind == UntaggedFunction::kClosureFunction ? parent.NumTypeArguments()
10228 : 0));
10229 const Function& result = Function::Handle(
10230 ptr: Function::New(signature, name, kind,
10231 /* is_static = */ is_static,
10232 /* is_const = */ false,
10233 /* is_abstract = */ false,
10234 /* is_external = */ false,
10235 /* is_native = */ false, owner, token_pos));
10236 result.set_parent_function(parent);
10237 return result.ptr();
10238}
10239
10240FunctionPtr Function::NewClosureFunction(const String& name,
10241 const Function& parent,
10242 TokenPosition token_pos) {
10243 // Use the owner defining the parent function and not the class containing it.
10244 const Object& parent_owner = Object::Handle(ptr: parent.RawOwner());
10245 return NewClosureFunctionWithKind(kind: UntaggedFunction::kClosureFunction, name,
10246 parent, is_static: parent.is_static(), token_pos,
10247 owner: parent_owner);
10248}
10249
10250FunctionPtr Function::NewImplicitClosureFunction(const String& name,
10251 const Function& parent,
10252 TokenPosition token_pos) {
10253 // Use the owner defining the parent function and not the class containing it.
10254 const Object& parent_owner = Object::Handle(ptr: parent.RawOwner());
10255 return NewClosureFunctionWithKind(
10256 kind: UntaggedFunction::kImplicitClosureFunction, name, parent,
10257 is_static: parent.is_static() || parent.IsConstructor(), token_pos, owner: parent_owner);
10258}
10259
10260bool Function::SafeToClosurize() const {
10261#if defined(DART_PRECOMPILED_RUNTIME)
10262 return HasImplicitClosureFunction();
10263#else
10264 return true;
10265#endif
10266}
10267
10268bool Function::IsDynamicClosureCallDispatcher(Thread* thread) const {
10269 if (!IsInvokeFieldDispatcher()) return false;
10270 if (thread->isolate_group()->object_store()->closure_class() != Owner()) {
10271 return false;
10272 }
10273 const auto& handle = String::Handle(zone: thread->zone(), ptr: name());
10274 return handle.Equals(str: Symbols::DynamicCall());
10275}
10276
10277FunctionPtr Function::ImplicitClosureFunction() const {
10278 // Return the existing implicit closure function if any.
10279 if (implicit_closure_function() != Function::null()) {
10280 return implicit_closure_function();
10281 }
10282
10283#if defined(DART_PRECOMPILED_RUNTIME)
10284 // In AOT mode all implicit closures are pre-created.
10285 FATAL("Cannot create implicit closure in AOT!");
10286 return Function::null();
10287#else
10288 ASSERT(!IsClosureFunction());
10289 Thread* thread = Thread::Current();
10290 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10291
10292 if (implicit_closure_function() != Function::null()) {
10293 return implicit_closure_function();
10294 }
10295
10296 // Create closure function.
10297 Zone* zone = thread->zone();
10298 const String& closure_name = String::Handle(zone, ptr: name());
10299 const Function& closure_function = Function::Handle(
10300 zone, ptr: NewImplicitClosureFunction(name: closure_name, parent: *this, token_pos: token_pos()));
10301
10302 // Set closure function's context scope.
10303 if (is_static() || IsConstructor()) {
10304 closure_function.set_context_scope(Object::empty_context_scope());
10305 } else {
10306 const ContextScope& context_scope = ContextScope::Handle(
10307 zone, ptr: LocalScope::CreateImplicitClosureScope(func: *this));
10308 closure_function.set_context_scope(context_scope);
10309 }
10310
10311 FunctionType& closure_signature =
10312 FunctionType::Handle(zone, ptr: closure_function.signature());
10313
10314 const auto& cls = Class::Handle(zone, ptr: Owner());
10315 const intptr_t num_type_params =
10316 IsConstructor() ? cls.NumTypeParameters() : NumTypeParameters();
10317
10318 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10319 TypeArguments& function_type_arguments = TypeArguments::Handle(zone);
10320
10321 FunctionTypeMapping* function_type_mapping = nullptr;
10322 FunctionTypeMapping scope(zone, &function_type_mapping,
10323 FunctionType::Handle(zone, ptr: signature()),
10324 closure_signature);
10325
10326 auto transform_type = [&](AbstractType& type) {
10327 if (num_type_params > 0) {
10328 if (IsConstructor()) {
10329 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment: num_type_params, num_free_fun_type_params: kAllFree, space: Heap::kOld,
10330 function_type_mapping: nullptr);
10331 if (!type.IsInstantiated(genericity: kCurrentClass)) {
10332 type = type.InstantiateFrom(
10333 instantiator_type_arguments, function_type_arguments,
10334 num_free_fun_type_params: kNoneFree /* avoid truncating parent type args */, space: Heap::kOld);
10335 }
10336 } else {
10337 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment: 0, num_free_fun_type_params: kNoneFree, space: Heap::kOld,
10338 function_type_mapping);
10339 }
10340 }
10341 };
10342
10343 auto transform_type_args = [&](TypeArguments& type_args) {
10344 ASSERT(num_type_params > 0);
10345 if (!type_args.IsNull()) {
10346 if (IsConstructor()) {
10347 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment: num_type_params, num_free_fun_type_params: kAllFree,
10348 space: Heap::kOld, function_type_mapping: nullptr);
10349 if (!type_args.IsInstantiated(genericity: kCurrentClass)) {
10350 type_args = type_args.InstantiateFrom(
10351 instantiator_type_arguments, function_type_arguments,
10352 num_free_fun_type_params: kNoneFree /* avoid truncating parent type args */, space: Heap::kOld);
10353 }
10354 } else {
10355 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment: 0, num_free_fun_type_params: kNoneFree, space: Heap::kOld,
10356 function_type_mapping);
10357 }
10358 }
10359 };
10360
10361 // Set closure function's type parameters.
10362 if (num_type_params > 0) {
10363 const TypeParameters& old_type_params = TypeParameters::Handle(
10364 zone, ptr: IsConstructor() ? cls.type_parameters() : type_parameters());
10365 const TypeParameters& new_type_params =
10366 TypeParameters::Handle(zone, ptr: TypeParameters::New());
10367 // No need to set names that are ignored in a signature, however, the
10368 // length of the names array defines the number of type parameters.
10369 new_type_params.set_names(Array::Handle(zone, ptr: old_type_params.names()));
10370 new_type_params.set_flags(Array::Handle(zone, ptr: old_type_params.flags()));
10371
10372 closure_signature.SetTypeParameters(new_type_params);
10373 ASSERT(closure_signature.NumTypeParameters() == num_type_params);
10374
10375 TypeArguments& type_args = TypeArguments::Handle(zone);
10376 type_args = TypeArguments::New(len: num_type_params);
10377 TypeParameter& type_param = TypeParameter::Handle(zone);
10378 for (intptr_t i = 0; i < num_type_params; i++) {
10379 type_param = closure_signature.TypeParameterAt(index: i);
10380 type_args.SetTypeAt(index: i, value: type_param);
10381 }
10382
10383 if (IsConstructor()) {
10384 instantiator_type_arguments =
10385 type_args.ToInstantiatorTypeArguments(thread, cls);
10386 } else {
10387 ASSERT(NumTypeArguments() == type_args.Length());
10388 function_type_arguments = type_args.ptr();
10389 }
10390
10391 type_args = old_type_params.bounds();
10392 transform_type_args(type_args);
10393 new_type_params.set_bounds(type_args);
10394
10395 type_args = old_type_params.defaults();
10396 transform_type_args(type_args);
10397 new_type_params.set_defaults(type_args);
10398 }
10399
10400 // Set closure function's result type.
10401 AbstractType& result_type = AbstractType::Handle(zone);
10402 if (IsConstructor()) {
10403 const Nullability result_nullability =
10404 (nnbd_mode() == NNBDMode::kOptedInLib) ? Nullability::kNonNullable
10405 : Nullability::kLegacy;
10406 result_type = cls.DeclarationType();
10407 result_type =
10408 Type::Cast(obj: result_type).ToNullability(value: result_nullability, space: Heap::kOld);
10409 } else {
10410 result_type = this->result_type();
10411 }
10412 transform_type(result_type);
10413 closure_signature.set_result_type(result_type);
10414
10415 // Set closure function's end token to this end token.
10416 closure_function.set_end_token_pos(end_token_pos());
10417
10418 // The closurized method stub just calls into the original method and should
10419 // therefore be skipped by the debugger and in stack traces.
10420 closure_function.set_is_debuggable(false);
10421 closure_function.set_is_visible(false);
10422
10423 // Set closure function's formal parameters to this formal parameters,
10424 // removing the receiver if this is an instance method and adding the closure
10425 // object as first parameter.
10426 const int kClosure = 1;
10427 const int num_implicit_params = NumImplicitParameters();
10428 const int num_fixed_params =
10429 kClosure - num_implicit_params + num_fixed_parameters();
10430 const int num_opt_params = NumOptionalParameters();
10431 const bool has_opt_pos_params = HasOptionalPositionalParameters();
10432 const int num_params = num_fixed_params + num_opt_params;
10433 const int num_pos_params = has_opt_pos_params ? num_params : num_fixed_params;
10434 closure_signature.set_num_fixed_parameters(num_fixed_params);
10435 closure_signature.SetNumOptionalParameters(num_optional_parameters: num_opt_params,
10436 are_optional_positional: has_opt_pos_params);
10437 closure_signature.set_parameter_types(
10438 Array::Handle(zone, ptr: Array::New(len: num_params, space: Heap::kOld)));
10439 closure_function.CreateNameArray();
10440 closure_signature.CreateNameArrayIncludingFlags();
10441 AbstractType& param_type = AbstractType::Handle(zone);
10442 String& param_name = String::Handle(zone);
10443 // Add implicit closure object parameter.
10444 param_type = Type::DynamicType();
10445 closure_signature.SetParameterTypeAt(index: 0, value: param_type);
10446 closure_function.SetParameterNameAt(index: 0, value: Symbols::ClosureParameter());
10447 for (int i = kClosure; i < num_pos_params; i++) {
10448 param_type = ParameterTypeAt(index: num_implicit_params - kClosure + i);
10449 transform_type(param_type);
10450 closure_signature.SetParameterTypeAt(index: i, value: param_type);
10451 param_name = ParameterNameAt(index: num_implicit_params - kClosure + i);
10452 // Set the name in the function for positional parameters.
10453 closure_function.SetParameterNameAt(index: i, value: param_name);
10454 }
10455 for (int i = num_pos_params; i < num_params; i++) {
10456 param_type = ParameterTypeAt(index: num_implicit_params - kClosure + i);
10457 transform_type(param_type);
10458 closure_signature.SetParameterTypeAt(index: i, value: param_type);
10459 param_name = ParameterNameAt(index: num_implicit_params - kClosure + i);
10460 // Set the name in the signature for named parameters.
10461 closure_signature.SetParameterNameAt(index: i, value: param_name);
10462 if (IsRequiredAt(index: num_implicit_params - kClosure + i)) {
10463 closure_signature.SetIsRequiredAt(i);
10464 }
10465 }
10466 closure_signature.FinalizeNameArray();
10467 closure_function.InheritKernelOffsetFrom(src: *this);
10468
10469 if (!is_static() && !IsConstructor()) {
10470 // Change covariant parameter types to either Object? for an opted-in
10471 // implicit closure or to Object* for a legacy implicit closure.
10472 BitVector is_covariant(zone, NumParameters());
10473 BitVector is_generic_covariant_impl(zone, NumParameters());
10474 kernel::ReadParameterCovariance(function: *this, is_covariant: &is_covariant,
10475 is_generic_covariant_impl: &is_generic_covariant_impl);
10476
10477 Type& object_type = Type::Handle(zone, ptr: Type::ObjectType());
10478 ObjectStore* object_store = IsolateGroup::Current()->object_store();
10479 object_type = nnbd_mode() == NNBDMode::kOptedInLib
10480 ? object_store->nullable_object_type()
10481 : object_store->legacy_object_type();
10482 ASSERT(object_type.IsCanonical());
10483 for (intptr_t i = kClosure; i < num_params; ++i) {
10484 const intptr_t original_param_index = num_implicit_params - kClosure + i;
10485 if (is_covariant.Contains(i: original_param_index) ||
10486 is_generic_covariant_impl.Contains(i: original_param_index)) {
10487 closure_signature.SetParameterTypeAt(index: i, value: object_type);
10488 }
10489 }
10490 }
10491 ASSERT(!closure_signature.IsFinalized());
10492 closure_signature ^= ClassFinalizer::FinalizeType(type: closure_signature);
10493 closure_function.SetSignature(closure_signature);
10494 set_implicit_closure_function(closure_function);
10495 ASSERT(closure_function.IsImplicitClosureFunction());
10496 ASSERT(HasImplicitClosureFunction());
10497 return closure_function.ptr();
10498#endif // defined(DART_PRECOMPILED_RUNTIME)
10499}
10500
10501void Function::DropUncompiledImplicitClosureFunction() const {
10502 if (implicit_closure_function() != Function::null()) {
10503 const Function& func = Function::Handle(ptr: implicit_closure_function());
10504 if (!func.HasCode()) {
10505 set_implicit_closure_function(Function::Handle());
10506 }
10507 }
10508}
10509
10510StringPtr Function::InternalSignature() const {
10511#if defined(DART_PRECOMPILED_RUNTIME)
10512 if (signature() == FunctionType::null()) {
10513 return String::null();
10514 }
10515#endif
10516 Thread* thread = Thread::Current();
10517 ZoneTextBuffer printer(thread->zone());
10518 const FunctionType& sig = FunctionType::Handle(ptr: signature());
10519 sig.Print(name_visibility: kInternalName, printer: &printer);
10520 return Symbols::New(thread, cstr: printer.buffer());
10521}
10522
10523StringPtr Function::UserVisibleSignature() const {
10524#if defined(DART_PRECOMPILED_RUNTIME)
10525 if (signature() == FunctionType::null()) {
10526 return String::null();
10527 }
10528#endif
10529 Thread* thread = Thread::Current();
10530 ZoneTextBuffer printer(thread->zone());
10531 const FunctionType& sig = FunctionType::Handle(ptr: signature());
10532 sig.Print(name_visibility: kUserVisibleName, printer: &printer);
10533 return Symbols::New(thread, cstr: printer.buffer());
10534}
10535
10536void FunctionType::PrintParameters(Thread* thread,
10537 Zone* zone,
10538 NameVisibility name_visibility,
10539 BaseTextBuffer* printer) const {
10540 AbstractType& param_type = AbstractType::Handle(zone);
10541 const intptr_t num_params = NumParameters();
10542 const intptr_t num_fixed_params = num_fixed_parameters();
10543 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
10544 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
10545 const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params;
10546 ASSERT((num_fixed_params + num_opt_params) == num_params);
10547 intptr_t i = 0;
10548 if (name_visibility == kUserVisibleName) {
10549 // Hide implicit parameters.
10550 i = num_implicit_parameters();
10551 }
10552 String& name = String::Handle(zone);
10553 while (i < num_fixed_params) {
10554 param_type = ParameterTypeAt(index: i);
10555 ASSERT(!param_type.IsNull());
10556 param_type.PrintName(visibility: name_visibility, printer);
10557 if (i != (num_params - 1)) {
10558 printer->AddString(s: ", ");
10559 }
10560 i++;
10561 }
10562 if (num_opt_params > 0) {
10563 if (num_opt_pos_params > 0) {
10564 printer->AddString(s: "[");
10565 } else {
10566 printer->AddString(s: "{");
10567 }
10568 for (intptr_t i = num_fixed_params; i < num_params; i++) {
10569 if (num_opt_named_params > 0 && IsRequiredAt(index: i)) {
10570 printer->AddString(s: "required ");
10571 }
10572 param_type = ParameterTypeAt(index: i);
10573 ASSERT(!param_type.IsNull());
10574 param_type.PrintName(visibility: name_visibility, printer);
10575 // The parameter name of an optional positional parameter does not need
10576 // to be part of the signature, since it is not used.
10577 if (num_opt_named_params > 0) {
10578 name = ParameterNameAt(index: i);
10579 printer->AddString(s: " ");
10580 printer->AddString(s: name.ToCString());
10581 }
10582 if (i != (num_params - 1)) {
10583 printer->AddString(s: ", ");
10584 }
10585 }
10586 if (num_opt_pos_params > 0) {
10587 printer->AddString(s: "]");
10588 } else {
10589 printer->AddString(s: "}");
10590 }
10591 }
10592}
10593
10594ClosurePtr Function::ImplicitStaticClosure() const {
10595 ASSERT(IsImplicitStaticClosureFunction());
10596 if (implicit_static_closure() != Closure::null()) {
10597 return implicit_static_closure();
10598 }
10599
10600 auto thread = Thread::Current();
10601 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10602
10603 if (implicit_static_closure() != Closure::null()) {
10604 return implicit_static_closure();
10605 }
10606
10607 Zone* zone = thread->zone();
10608 const auto& null_context = Context::Handle(zone);
10609 const auto& closure =
10610 Closure::Handle(zone, ptr: Closure::New(instantiator_type_arguments: Object::null_type_arguments(),
10611 function_type_arguments: Object::null_type_arguments(), function: *this,
10612 context: null_context, space: Heap::kOld));
10613 set_implicit_static_closure(closure);
10614 return implicit_static_closure();
10615}
10616
10617ClosurePtr Function::ImplicitInstanceClosure(const Instance& receiver) const {
10618 ASSERT(IsImplicitClosureFunction());
10619 Zone* zone = Thread::Current()->zone();
10620 const Context& context = Context::Handle(zone, ptr: Context::New(num_variables: 1));
10621 context.SetAt(index: 0, value: receiver);
10622 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10623 if (!HasInstantiatedSignature(genericity: kCurrentClass)) {
10624 instantiator_type_arguments = receiver.GetTypeArguments();
10625 }
10626 ASSERT(!HasGenericParent()); // No generic parent function.
10627 return Closure::New(instantiator_type_arguments,
10628 function_type_arguments: Object::null_type_arguments(), function: *this, context);
10629}
10630
10631FunctionPtr Function::ImplicitClosureTarget(Zone* zone) const {
10632 const auto& parent = Function::Handle(zone, ptr: parent_function());
10633 const auto& func_name = String::Handle(zone, ptr: parent.name());
10634 const auto& owner = Class::Handle(zone, ptr: parent.Owner());
10635 Thread* thread = Thread::Current();
10636 const auto& error = owner.EnsureIsFinalized(thread);
10637 ASSERT(error == Error::null());
10638 auto& target =
10639 Function::Handle(zone, ptr: Resolver::ResolveFunction(zone, receiver_class: owner, function_name: func_name));
10640
10641 if (!target.IsNull() && (target.ptr() != parent.ptr())) {
10642 DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload());
10643 if ((target.is_static() != parent.is_static()) ||
10644 (target.kind() != parent.kind())) {
10645 target = Function::null();
10646 }
10647 }
10648
10649 return target.ptr();
10650}
10651
10652void FunctionType::Print(NameVisibility name_visibility,
10653 BaseTextBuffer* printer) const {
10654 if (IsNull()) {
10655 printer->AddString(s: "null"); // Signature optimized out in precompiler.
10656 return;
10657 }
10658 Thread* thread = Thread::Current();
10659 Zone* zone = thread->zone();
10660 const TypeParameters& type_params =
10661 TypeParameters::Handle(zone, ptr: type_parameters());
10662 if (!type_params.IsNull()) {
10663 printer->AddString(s: "<");
10664 const intptr_t base = NumParentTypeArguments();
10665 const bool kIsClassTypeParameter = false;
10666 // Type parameter names are meaningless after canonicalization.
10667 type_params.Print(thread, zone, are_class_type_parameters: kIsClassTypeParameter, base,
10668 name_visibility, printer);
10669 printer->AddString(s: ">");
10670 }
10671 printer->AddString(s: "(");
10672 PrintParameters(thread, zone, name_visibility, printer);
10673 printer->AddString(s: ") => ");
10674 const AbstractType& res_type = AbstractType::Handle(zone, ptr: result_type());
10675 if (!res_type.IsNull()) {
10676 res_type.PrintName(visibility: name_visibility, printer);
10677 } else {
10678 printer->AddString(s: "null");
10679 }
10680}
10681
10682bool Function::HasInstantiatedSignature(
10683 Genericity genericity,
10684 intptr_t num_free_fun_type_params) const {
10685 return FunctionType::Handle(ptr: signature())
10686 .IsInstantiated(genericity, num_free_fun_type_params);
10687}
10688
10689bool FunctionType::IsInstantiated(Genericity genericity,
10690 intptr_t num_free_fun_type_params) const {
10691 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
10692 num_free_fun_type_params = kAllFree;
10693 } else if (genericity != kCurrentClass) {
10694 const intptr_t num_parent_type_args = NumParentTypeArguments();
10695 if (num_parent_type_args > 0 && num_free_fun_type_params > 0) {
10696 // The number of parent type arguments is cached in the FunctionType, so
10697 // we can't consider any FunctionType with free parent type arguments as
10698 // fully instantiated. Instead, the FunctionType must be instantiated to
10699 // reduce the number of parent type arguments, even if they're unused in
10700 // its component types.
10701 return false;
10702 }
10703 // Don't consider local function type parameters as free.
10704 if (num_free_fun_type_params > num_parent_type_args) {
10705 num_free_fun_type_params = num_parent_type_args;
10706 }
10707 }
10708 AbstractType& type = AbstractType::Handle(ptr: result_type());
10709 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10710 return false;
10711 }
10712 const intptr_t num_parameters = NumParameters();
10713 for (intptr_t i = 0; i < num_parameters; i++) {
10714 type = ParameterTypeAt(index: i);
10715 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10716 return false;
10717 }
10718 }
10719 const intptr_t num_type_params = NumTypeParameters();
10720 if (num_type_params > 0) {
10721 TypeParameters& type_params = TypeParameters::Handle(ptr: type_parameters());
10722 if (!type_params.AllDynamicBounds()) {
10723 for (intptr_t i = 0; i < type_params.Length(); ++i) {
10724 type = type_params.BoundAt(index: i);
10725 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10726 return false;
10727 }
10728 }
10729 }
10730 }
10731 return true;
10732}
10733
10734bool Function::IsPrivate() const {
10735 return Library::IsPrivate(name: String::Handle(ptr: name()));
10736}
10737
10738ClassPtr Function::Owner() const {
10739 ASSERT(untag()->owner() != Object::null());
10740 if (untag()->owner()->IsClass()) {
10741 return Class::RawCast(raw: untag()->owner());
10742 }
10743 const Object& obj = Object::Handle(ptr: untag()->owner());
10744 ASSERT(obj.IsPatchClass());
10745 return PatchClass::Cast(obj).wrapped_class();
10746}
10747
10748void Function::InheritKernelOffsetFrom(const Function& src) const {
10749#if defined(DART_PRECOMPILED_RUNTIME)
10750 UNREACHABLE();
10751#else
10752 StoreNonPointer(addr: &untag()->kernel_offset_, value: src.untag()->kernel_offset_);
10753#endif
10754}
10755
10756void Function::InheritKernelOffsetFrom(const Field& src) const {
10757#if defined(DART_PRECOMPILED_RUNTIME)
10758 UNREACHABLE();
10759#else
10760 set_kernel_offset(src.kernel_offset());
10761#endif
10762}
10763
10764void Function::SetKernelLibraryAndEvalScript(
10765 const Script& script,
10766 const class KernelProgramInfo& kernel_program_info,
10767 intptr_t index) const {
10768 Array& data_field = Array::Handle(
10769 ptr: Array::New(len: static_cast<intptr_t>(EvalFunctionData::kLength)));
10770 data_field.SetAt(index: static_cast<intptr_t>(EvalFunctionData::kScript), value: script);
10771 data_field.SetAt(index: static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo),
10772 value: kernel_program_info);
10773 data_field.SetAt(index: static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex),
10774 value: Smi::Handle(ptr: Smi::New(value: index)));
10775 set_data(data_field);
10776}
10777
10778ScriptPtr Function::script() const {
10779 // NOTE(turnidge): If you update this function, you probably want to
10780 // update Class::PatchFieldsAndFunctions() at the same time.
10781 if (IsDynamicInvocationForwarder()) {
10782 const Function& target = Function::Handle(ptr: ForwardingTarget());
10783 return target.IsNull() ? Script::null() : target.script();
10784 }
10785 if (IsImplicitGetterOrSetter()) {
10786 const auto& field = Field::Handle(ptr: accessor_field());
10787 return field.IsNull() ? Script::null() : field.Script();
10788 }
10789 if (is_eval_function()) {
10790 const auto& fdata = Array::Handle(ptr: Array::RawCast(raw: data()));
10791 return Script::RawCast(
10792 raw: fdata.At(index: static_cast<intptr_t>(EvalFunctionData::kScript)));
10793 }
10794 if (token_pos() == TokenPosition::kMinSource) {
10795 // Testing for position 0 is an optimization that relies on temporary
10796 // eval functions having token position 0.
10797 const Script& script = Script::Handle(ptr: eval_script());
10798 if (!script.IsNull()) {
10799 return script.ptr();
10800 }
10801 }
10802 const Object& obj = Object::Handle(ptr: untag()->owner());
10803 if (obj.IsPatchClass()) {
10804 return PatchClass::Cast(obj).script();
10805 }
10806 if (IsClosureFunction()) {
10807 const Function& function = Function::Handle(ptr: parent_function());
10808 if (function.IsNull()) return Script::null();
10809 return function.script();
10810 }
10811 ASSERT(obj.IsClass());
10812 return Class::Cast(obj).script();
10813}
10814
10815#if !defined(DART_PRECOMPILED_RUNTIME)
10816KernelProgramInfoPtr Function::KernelProgramInfo() const {
10817 if (is_eval_function()) {
10818 const auto& fdata = Array::Handle(ptr: Array::RawCast(raw: data()));
10819 return KernelProgramInfo::RawCast(
10820 raw: fdata.At(index: static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo)));
10821 }
10822 if (IsClosureFunction()) {
10823 const auto& parent = Function::Handle(ptr: parent_function());
10824 return parent.KernelProgramInfo();
10825 }
10826 const auto& owner = Object::Handle(ptr: RawOwner());
10827 if (owner.IsClass()) {
10828 return Class::Cast(obj: owner).KernelProgramInfo();
10829 }
10830 return PatchClass::Cast(obj: owner).kernel_program_info();
10831}
10832
10833TypedDataViewPtr Function::KernelLibrary() const {
10834 const auto& info = KernelProgramInfo::Handle(ptr: KernelProgramInfo());
10835 return info.KernelLibrary(library_index: KernelLibraryIndex());
10836}
10837
10838intptr_t Function::KernelLibraryOffset() const {
10839 const intptr_t kernel_library_index = KernelLibraryIndex();
10840 if (kernel_library_index == -1) return 0;
10841 const auto& info = KernelProgramInfo::Handle(ptr: KernelProgramInfo());
10842 return info.KernelLibraryStartOffset(library_index: kernel_library_index);
10843}
10844
10845intptr_t Function::KernelLibraryIndex() const {
10846 if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher() ||
10847 IsFfiTrampoline()) {
10848 return -1;
10849 }
10850 if (is_eval_function()) {
10851 const auto& fdata = Array::Handle(ptr: Array::RawCast(raw: data()));
10852 return Smi::Value(raw_smi: static_cast<SmiPtr>(fdata.At(
10853 index: static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex))));
10854 }
10855 if (IsClosureFunction()) {
10856 const auto& parent = Function::Handle(ptr: parent_function());
10857 ASSERT(!parent.IsNull());
10858 return parent.KernelLibraryIndex();
10859 }
10860
10861 const auto& obj = Object::Handle(ptr: untag()->owner());
10862 if (obj.IsClass()) {
10863 const auto& lib = Library::Handle(ptr: Class::Cast(obj).library());
10864 return lib.kernel_library_index();
10865 }
10866 ASSERT(obj.IsPatchClass());
10867 return PatchClass::Cast(obj).kernel_library_index();
10868}
10869#endif
10870
10871bool Function::HasOptimizedCode() const {
10872 return HasCode() && Code::Handle(ptr: CurrentCode()).is_optimized();
10873}
10874
10875const char* Function::NameCString(NameVisibility name_visibility) const {
10876 switch (name_visibility) {
10877 case kInternalName:
10878 return String::Handle(ptr: name()).ToCString();
10879 case kScrubbedName:
10880 case kUserVisibleName:
10881 return UserVisibleNameCString();
10882 }
10883 UNREACHABLE();
10884 return nullptr;
10885}
10886
10887const char* Function::UserVisibleNameCString() const {
10888 if (FLAG_show_internal_names) {
10889 return String::Handle(ptr: name()).ToCString();
10890 }
10891 return String::ScrubName(name: String::Handle(ptr: name()), is_extension: is_extension_member());
10892}
10893
10894StringPtr Function::UserVisibleName() const {
10895 if (FLAG_show_internal_names) {
10896 return name();
10897 }
10898 return Symbols::New(
10899 thread: Thread::Current(),
10900 cstr: String::ScrubName(name: String::Handle(ptr: name()), is_extension: is_extension_member()));
10901}
10902
10903StringPtr Function::QualifiedScrubbedName() const {
10904 Thread* thread = Thread::Current();
10905 ZoneTextBuffer printer(thread->zone());
10906 PrintName(params: NameFormattingParams(kScrubbedName), printer: &printer);
10907 return Symbols::New(thread, cstr: printer.buffer());
10908}
10909
10910const char* Function::QualifiedScrubbedNameCString() const {
10911 Thread* thread = Thread::Current();
10912 ZoneTextBuffer printer(thread->zone());
10913 PrintName(params: NameFormattingParams(kScrubbedName), printer: &printer);
10914 return printer.buffer();
10915}
10916
10917StringPtr Function::QualifiedUserVisibleName() const {
10918 Thread* thread = Thread::Current();
10919 ZoneTextBuffer printer(thread->zone());
10920 PrintName(params: NameFormattingParams(kUserVisibleName), printer: &printer);
10921 return Symbols::New(thread, cstr: printer.buffer());
10922}
10923
10924const char* Function::QualifiedUserVisibleNameCString() const {
10925 Thread* thread = Thread::Current();
10926 ZoneTextBuffer printer(thread->zone());
10927 PrintName(params: NameFormattingParams(kUserVisibleName), printer: &printer);
10928 return printer.buffer();
10929}
10930
10931static void FunctionPrintNameHelper(const Function& fun,
10932 const NameFormattingParams& params,
10933 BaseTextBuffer* printer) {
10934 if (fun.IsNonImplicitClosureFunction()) {
10935 if (params.include_parent_name) {
10936 const auto& parent = Function::Handle(ptr: fun.parent_function());
10937 if (parent.IsNull()) {
10938 printer->AddString(s: Symbols::OptimizedOut().ToCString());
10939 } else {
10940 parent.PrintName(params, printer);
10941 }
10942 // A function's scrubbed name and its user visible name are identical.
10943 printer->AddString(s: ".");
10944 }
10945 if (params.disambiguate_names &&
10946 fun.name() == Symbols::AnonymousClosure().ptr()) {
10947 printer->Printf(format: "<anonymous closure @%" Pd ">", fun.token_pos().Pos());
10948 } else {
10949 printer->AddString(s: fun.NameCString(name_visibility: params.name_visibility));
10950 if (params.disambiguate_names) {
10951 printer->Printf(format: "@<%" Pd ">", fun.token_pos().Pos());
10952 }
10953 }
10954 return;
10955 }
10956 if (params.disambiguate_names) {
10957 if (fun.IsInvokeFieldDispatcher()) {
10958 printer->AddString(s: "[invoke-field] ");
10959 }
10960 if (fun.IsNoSuchMethodDispatcher()) {
10961 printer->AddString(s: "[no-such-method] ");
10962 }
10963 if (fun.IsImplicitClosureFunction()) {
10964 printer->AddString(s: "[tear-off] ");
10965 }
10966 if (fun.IsMethodExtractor()) {
10967 printer->AddString(s: "[tear-off-extractor] ");
10968 }
10969 }
10970
10971 if (fun.kind() == UntaggedFunction::kConstructor) {
10972 printer->AddString(s: "new ");
10973 } else if (params.include_class_name) {
10974 const Class& cls = Class::Handle(ptr: fun.Owner());
10975 if (!cls.IsTopLevel()) {
10976 const Class& mixin = Class::Handle(ptr: cls.Mixin());
10977 printer->AddString(s: params.name_visibility == Object::kUserVisibleName
10978 ? mixin.UserVisibleNameCString()
10979 : cls.NameCString(name_visibility: params.name_visibility));
10980 printer->AddString(s: ".");
10981 }
10982 }
10983
10984 printer->AddString(s: fun.NameCString(name_visibility: params.name_visibility));
10985
10986 // Dispatchers that are created with an arguments descriptor need both the
10987 // name and the saved arguments descriptor to disambiguate.
10988 if (params.disambiguate_names && fun.HasSavedArgumentsDescriptor()) {
10989 const auto& args_desc_array = Array::Handle(ptr: fun.saved_args_desc());
10990 const ArgumentsDescriptor args_desc(args_desc_array);
10991 args_desc.PrintTo(buffer: printer);
10992 }
10993}
10994
10995void Function::PrintName(const NameFormattingParams& params,
10996 BaseTextBuffer* printer) const {
10997 if (!IsLocalFunction()) {
10998 FunctionPrintNameHelper(fun: *this, params, printer);
10999 return;
11000 }
11001 auto& fun = Function::Handle(ptr: ptr());
11002 FunctionPrintNameHelper(fun, params, printer);
11003}
11004
11005StringPtr Function::GetSource() const {
11006 if (IsImplicitConstructor() || is_synthetic()) {
11007 // We may need to handle more cases when the restrictions on mixins are
11008 // relaxed. In particular we might start associating some source with the
11009 // forwarding constructors when it becomes possible to specify a particular
11010 // constructor from the mixin to use.
11011 return String::null();
11012 }
11013 Zone* zone = Thread::Current()->zone();
11014 const Script& func_script = Script::Handle(zone, ptr: script());
11015
11016 intptr_t from_line, from_col;
11017 if (!func_script.GetTokenLocation(token_pos: token_pos(), line: &from_line, column: &from_col)) {
11018 return String::null();
11019 }
11020 intptr_t to_line, to_col;
11021 if (!func_script.GetTokenLocation(token_pos: end_token_pos(), line: &to_line, column: &to_col)) {
11022 return String::null();
11023 }
11024 intptr_t to_length = func_script.GetTokenLength(token_pos: end_token_pos());
11025 if (to_length < 0) {
11026 return String::null();
11027 }
11028
11029 if (to_length == 1) {
11030 // Handle special cases for end tokens of closures (where we exclude the
11031 // last token):
11032 // (1) "foo(() => null, bar);": End token is `,', but we don't print it.
11033 // (2) "foo(() => null);": End token is ')`, but we don't print it.
11034 // (3) "var foo = () => null;": End token is `;', but in this case the
11035 // token semicolon belongs to the assignment so we skip it.
11036 const String& src = String::Handle(ptr: func_script.Source());
11037 if (src.IsNull() || src.Length() == 0) {
11038 return Symbols::OptimizedOut().ptr();
11039 }
11040 uint16_t end_char = src.CharAt(index: end_token_pos().Pos());
11041 if ((end_char == ',') || // Case 1.
11042 (end_char == ')') || // Case 2.
11043 (end_char == ';' && String::Handle(zone, ptr: name())
11044 .Equals(cstr: "<anonymous closure>"))) { // Case 3.
11045 to_length = 0;
11046 }
11047 }
11048
11049 return func_script.GetSnippet(from_line, from_column: from_col, to_line,
11050 to_column: to_col + to_length);
11051}
11052
11053// Construct fingerprint from token stream. The token stream contains also
11054// arguments.
11055int32_t Function::SourceFingerprint() const {
11056#if !defined(DART_PRECOMPILED_RUNTIME)
11057 return kernel::KernelSourceFingerprintHelper::CalculateFunctionFingerprint(
11058 func: *this);
11059#else
11060 return 0;
11061#endif // !defined(DART_PRECOMPILED_RUNTIME)
11062}
11063
11064void Function::SaveICDataMap(
11065 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
11066 const Array& edge_counters_array,
11067 const Array& coverage_array) const {
11068#if !defined(DART_PRECOMPILED_RUNTIME)
11069 // Already installed nothing to do.
11070 if (ic_data_array() != Array::null()) {
11071 ASSERT(coverage_array.ptr() == GetCoverageArray());
11072 return;
11073 }
11074
11075 // Compute number of ICData objects to save.
11076 intptr_t count = 0;
11077 for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
11078 if (deopt_id_to_ic_data[i] != nullptr) {
11079 count++;
11080 }
11081 }
11082
11083 // Compress sparse deopt_id_to_ic_data mapping into a linear sequence of
11084 // ICData objects.
11085 const Array& array = Array::Handle(
11086 ptr: Array::New(len: ICDataArrayIndices::kFirstICData + count, space: Heap::kOld));
11087 for (intptr_t i = 0, pos = ICDataArrayIndices::kFirstICData;
11088 i < deopt_id_to_ic_data.length(); i++) {
11089 if (deopt_id_to_ic_data[i] != nullptr) {
11090 ASSERT(i == deopt_id_to_ic_data[i]->deopt_id());
11091 array.SetAt(index: pos++, value: *deopt_id_to_ic_data[i]);
11092 }
11093 }
11094 array.SetAt(index: ICDataArrayIndices::kEdgeCounters, value: edge_counters_array);
11095 // Preserve coverage_array which is stored early after graph construction.
11096 array.SetAt(index: ICDataArrayIndices::kCoverageData, value: coverage_array);
11097 set_ic_data_array(array);
11098#else // DART_PRECOMPILED_RUNTIME
11099 UNREACHABLE();
11100#endif // DART_PRECOMPILED_RUNTIME
11101}
11102
11103void Function::RestoreICDataMap(
11104 ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
11105 bool clone_ic_data) const {
11106#if !defined(DART_PRECOMPILED_RUNTIME)
11107 if (FLAG_force_clone_compiler_objects) {
11108 clone_ic_data = true;
11109 }
11110 ASSERT(deopt_id_to_ic_data->is_empty());
11111 Zone* zone = Thread::Current()->zone();
11112 const Array& saved_ic_data = Array::Handle(zone, ptr: ic_data_array());
11113 if (saved_ic_data.IsNull()) {
11114 // Could happen with not-yet compiled unoptimized code or force-optimized
11115 // functions.
11116 return;
11117 }
11118 const intptr_t saved_length = saved_ic_data.Length();
11119 ASSERT(saved_length > 0);
11120 if (saved_length > ICDataArrayIndices::kFirstICData) {
11121 const intptr_t restored_length =
11122 ICData::Cast(obj: Object::Handle(zone, ptr: saved_ic_data.At(index: saved_length - 1)))
11123 .deopt_id() +
11124 1;
11125 deopt_id_to_ic_data->SetLength(restored_length);
11126 for (intptr_t i = 0; i < restored_length; i++) {
11127 (*deopt_id_to_ic_data)[i] = nullptr;
11128 }
11129 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < saved_length; i++) {
11130 ICData& ic_data = ICData::ZoneHandle(zone);
11131 ic_data ^= saved_ic_data.At(index: i);
11132 if (clone_ic_data) {
11133 const ICData& original_ic_data = ICData::Handle(zone, ptr: ic_data.ptr());
11134 ic_data = ICData::Clone(from: ic_data);
11135 ic_data.SetOriginal(original_ic_data);
11136 }
11137 ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr);
11138 (*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data;
11139 }
11140 }
11141#else // DART_PRECOMPILED_RUNTIME
11142 UNREACHABLE();
11143#endif // DART_PRECOMPILED_RUNTIME
11144}
11145
11146ArrayPtr Function::GetCoverageArray() const {
11147 const Array& arr = Array::Handle(ptr: ic_data_array());
11148 if (arr.IsNull()) {
11149 return Array::null();
11150 }
11151 return Array::RawCast(raw: arr.At(index: ICDataArrayIndices::kCoverageData));
11152}
11153
11154void Function::set_ic_data_array(const Array& value) const {
11155 untag()->set_ic_data_array<std::memory_order_release>(value.ptr());
11156}
11157
11158ArrayPtr Function::ic_data_array() const {
11159 return untag()->ic_data_array<std::memory_order_acquire>();
11160}
11161
11162void Function::ClearICDataArray() const {
11163 set_ic_data_array(Array::null_array());
11164}
11165
11166ICDataPtr Function::FindICData(intptr_t deopt_id) const {
11167 const Array& array = Array::Handle(ptr: ic_data_array());
11168 ICData& ic_data = ICData::Handle();
11169 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11170 ic_data ^= array.At(index: i);
11171 if (ic_data.deopt_id() == deopt_id) {
11172 return ic_data.ptr();
11173 }
11174 }
11175 return ICData::null();
11176}
11177
11178void Function::SetDeoptReasonForAll(intptr_t deopt_id,
11179 ICData::DeoptReasonId reason) {
11180 const Array& array = Array::Handle(ptr: ic_data_array());
11181 ICData& ic_data = ICData::Handle();
11182 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11183 ic_data ^= array.At(index: i);
11184 if (ic_data.deopt_id() == deopt_id) {
11185 ic_data.AddDeoptReason(reason);
11186 }
11187 }
11188}
11189
11190bool Function::CheckSourceFingerprint(int32_t fp, const char* kind) const {
11191#if !defined(DEBUG)
11192 return true; // Only check on debug.
11193#endif
11194
11195#if !defined(DART_PRECOMPILED_RUNTIME)
11196 // Check that the function is marked as recognized via the vm:recognized
11197 // pragma. This is so that optimizations that change the signature will know
11198 // not to touch it.
11199 if (kind != nullptr && !MethodRecognizer::IsMarkedAsRecognized(function: *this, kind)) {
11200 OS::PrintErr(
11201 format: "Recognized method %s should be marked with: "
11202 "@pragma(\"vm:recognized\", \"%s\")\n",
11203 ToQualifiedCString(), kind);
11204 return false;
11205 }
11206#endif
11207
11208 if (IsolateGroup::Current()->obfuscate() || FLAG_precompiled_mode ||
11209 (Dart::vm_snapshot_kind() != Snapshot::kNone)) {
11210 return true; // The kernel structure has been altered, skip checking.
11211 }
11212
11213 if (SourceFingerprint() != fp) {
11214 // This output can be copied into a file, then used with sed
11215 // to replace the old values.
11216 // sed -i.bak -f /tmp/newkeys \
11217 // runtime/vm/compiler/recognized_methods_list.h
11218 THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
11219 return false;
11220 }
11221 return true;
11222}
11223
11224CodePtr Function::EnsureHasCode() const {
11225 if (HasCode()) return CurrentCode();
11226 Thread* thread = Thread::Current();
11227 ASSERT(thread->IsDartMutatorThread());
11228 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
11229 Zone* zone = thread->zone();
11230 const Object& result =
11231 Object::Handle(zone, ptr: Compiler::CompileFunction(thread, function: *this));
11232 if (result.IsError()) {
11233 if (result.ptr() == Object::out_of_memory_error().ptr()) {
11234 Exceptions::ThrowOOM();
11235 UNREACHABLE();
11236 }
11237 if (result.IsLanguageError()) {
11238 Exceptions::ThrowCompileTimeError(error: LanguageError::Cast(obj: result));
11239 UNREACHABLE();
11240 }
11241 Exceptions::PropagateError(error: Error::Cast(obj: result));
11242 UNREACHABLE();
11243 }
11244 // Compiling in unoptimized mode should never fail if there are no errors.
11245 RELEASE_ASSERT(HasCode());
11246 ASSERT(ForceOptimize() || unoptimized_code() == result.ptr());
11247 return CurrentCode();
11248}
11249
11250bool Function::NeedsMonomorphicCheckedEntry(Zone* zone) const {
11251#if !defined(DART_PRECOMPILED_RUNTIME)
11252 if (!IsDynamicFunction()) {
11253 return false;
11254 }
11255
11256 // For functions which need an args descriptor the switchable call sites will
11257 // transition directly to calling via a stub (and therefore never call the
11258 // monomorphic entry).
11259 //
11260 // See runtime_entry.cc:DEFINE_RUNTIME_ENTRY(UnlinkedCall)
11261 if (PrologueNeedsArgumentsDescriptor()) {
11262 return false;
11263 }
11264
11265 // All dyn:* forwarders are called via SwitchableCalls and all except the ones
11266 // with `PrologueNeedsArgumentsDescriptor()` transition into monomorphic
11267 // state.
11268 if (Function::IsDynamicInvocationForwarderName(name: name())) {
11269 return true;
11270 }
11271
11272 // AOT mode uses table dispatch.
11273 // In JIT mode all instance calls use switchable calls.
11274 if (!FLAG_precompiled_mode) {
11275 return true;
11276 }
11277
11278 // Only if there are dynamic callers and if we didn't create a dyn:* forwarder
11279 // for it do we need the monomorphic checked entry.
11280 return HasDynamicCallers(zone) &&
11281 !kernel::NeedsDynamicInvocationForwarder(function: *this);
11282#else
11283 UNREACHABLE();
11284 return true;
11285#endif
11286}
11287
11288bool Function::HasDynamicCallers(Zone* zone) const {
11289#if !defined(DART_PRECOMPILED_RUNTIME)
11290 // Issue(dartbug.com/42719):
11291 // Right now the metadata of _Closure.call says there are no dynamic callers -
11292 // even though there can be. To be conservative we return true.
11293 if ((name() == Symbols::GetCall().ptr() || name() == Symbols::call().ptr()) &&
11294 Class::IsClosureClass(cls: Owner())) {
11295 return true;
11296 }
11297
11298 // Use the results of TFA to determine whether this function is ever
11299 // called dynamically, i.e. using switchable calls.
11300 kernel::ProcedureAttributesMetadata metadata;
11301 metadata = kernel::ProcedureAttributesOf(function: *this, zone);
11302 if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) {
11303 // Dynamic method call through field/getter involves dynamic call of
11304 // the field/getter.
11305 return metadata.getter_called_dynamically ||
11306 metadata.method_or_setter_called_dynamically;
11307 } else {
11308 return metadata.method_or_setter_called_dynamically;
11309 }
11310#else
11311 UNREACHABLE();
11312 return true;
11313#endif
11314}
11315
11316bool Function::PrologueNeedsArgumentsDescriptor() const {
11317 // These functions have a saved compile-time arguments descriptor that is
11318 // used in lieu of the runtime arguments descriptor in generated IL.
11319 if (HasSavedArgumentsDescriptor()) {
11320 return false;
11321 }
11322 // The prologue of those functions need to examine the arg descriptor for
11323 // various purposes.
11324 return IsGeneric() || HasOptionalParameters();
11325}
11326
11327bool Function::MayHaveUncheckedEntryPoint() const {
11328 return FLAG_enable_multiple_entrypoints &&
11329 (NeedsTypeArgumentTypeChecks() || NeedsArgumentTypeChecks());
11330}
11331
11332intptr_t Function::SourceSize() const {
11333 const TokenPosition& start = token_pos();
11334 const TokenPosition& end = end_token_pos();
11335 if (!end.IsReal() || start.IsNoSource() || start.IsClassifying()) {
11336 // No source information, so just return 0.
11337 return 0;
11338 }
11339 if (start.IsSynthetic()) {
11340 // Try and approximate the source size using the parent's source size.
11341 const auto& parent = Function::Handle(ptr: parent_function());
11342 ASSERT(!parent.IsNull());
11343 const intptr_t parent_size = parent.SourceSize();
11344 if (parent_size == 0) {
11345 return parent_size;
11346 }
11347 // Parent must have a real ending position.
11348 return parent_size - (parent.end_token_pos().Pos() - end.Pos());
11349 }
11350 return end.Pos() - start.Pos();
11351}
11352
11353const char* Function::ToCString() const {
11354 if (IsNull()) {
11355 return "Function: null";
11356 }
11357 Zone* zone = Thread::Current()->zone();
11358 ZoneTextBuffer buffer(zone);
11359 buffer.Printf(format: "Function '%s':", String::Handle(zone, ptr: name()).ToCString());
11360 if (is_static()) {
11361 buffer.AddString(s: " static");
11362 }
11363 if (is_abstract()) {
11364 buffer.AddString(s: " abstract");
11365 }
11366 switch (kind()) {
11367 case UntaggedFunction::kRegularFunction:
11368 case UntaggedFunction::kClosureFunction:
11369 case UntaggedFunction::kImplicitClosureFunction:
11370 case UntaggedFunction::kGetterFunction:
11371 case UntaggedFunction::kSetterFunction:
11372 break;
11373 case UntaggedFunction::kConstructor:
11374 buffer.AddString(s: is_static() ? " factory" : " constructor");
11375 break;
11376 case UntaggedFunction::kImplicitGetter:
11377 buffer.AddString(s: " getter");
11378 break;
11379 case UntaggedFunction::kImplicitSetter:
11380 buffer.AddString(s: " setter");
11381 break;
11382 case UntaggedFunction::kImplicitStaticGetter:
11383 buffer.AddString(s: " static-getter");
11384 break;
11385 case UntaggedFunction::kFieldInitializer:
11386 buffer.AddString(s: " field-initializer");
11387 break;
11388 case UntaggedFunction::kMethodExtractor:
11389 buffer.AddString(s: " method-extractor");
11390 break;
11391 case UntaggedFunction::kNoSuchMethodDispatcher:
11392 buffer.AddString(s: " no-such-method-dispatcher");
11393 break;
11394 case UntaggedFunction::kDynamicInvocationForwarder:
11395 buffer.AddString(s: " dynamic-invocation-forwarder");
11396 break;
11397 case UntaggedFunction::kInvokeFieldDispatcher:
11398 buffer.AddString(s: " invoke-field-dispatcher");
11399 break;
11400 case UntaggedFunction::kIrregexpFunction:
11401 buffer.AddString(s: " irregexp-function");
11402 break;
11403 case UntaggedFunction::kFfiTrampoline:
11404 buffer.AddString(s: " ffi-trampoline-function");
11405 break;
11406 case UntaggedFunction::kRecordFieldGetter:
11407 buffer.AddString(s: " record-field-getter");
11408 break;
11409 default:
11410 UNREACHABLE();
11411 }
11412 if (HasSavedArgumentsDescriptor()) {
11413 const auto& args_desc_array = Array::Handle(zone, ptr: saved_args_desc());
11414 const ArgumentsDescriptor args_desc(args_desc_array);
11415 buffer.AddChar(ch: '[');
11416 args_desc.PrintTo(buffer: &buffer);
11417 buffer.AddChar(ch: ']');
11418 }
11419 if (is_const()) {
11420 buffer.AddString(s: " const");
11421 }
11422 buffer.AddChar(ch: '.');
11423 return buffer.buffer();
11424}
11425
11426void FunctionType::set_packed_parameter_counts(
11427 uint32_t packed_parameter_counts) const {
11428 untag()->packed_parameter_counts_ = packed_parameter_counts;
11429}
11430
11431void FunctionType::set_packed_type_parameter_counts(
11432 uint16_t packed_type_parameter_counts) const {
11433 untag()->packed_type_parameter_counts_ = packed_type_parameter_counts;
11434}
11435
11436void FunctionType::set_num_implicit_parameters(intptr_t value) const {
11437 ASSERT(value >= 0);
11438 untag()->packed_parameter_counts_.Update<PackedNumImplicitParameters>(value);
11439}
11440
11441ClosureData::DefaultTypeArgumentsKind ClosureData::default_type_arguments_kind()
11442 const {
11443 return untag()
11444 ->packed_fields_
11445 .Read<UntaggedClosureData::PackedDefaultTypeArgumentsKind>();
11446}
11447
11448void ClosureData::set_default_type_arguments_kind(
11449 DefaultTypeArgumentsKind value) const {
11450 untag()
11451 ->packed_fields_
11452 .Update<UntaggedClosureData::PackedDefaultTypeArgumentsKind>(value);
11453}
11454
11455Function::AwaiterLink ClosureData::awaiter_link() const {
11456 const uint8_t depth =
11457 untag()
11458 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkDepth>();
11459 const uint8_t index =
11460 untag()
11461 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkIndex>();
11462 return {.depth: depth, .index: index};
11463}
11464
11465void ClosureData::set_awaiter_link(Function::AwaiterLink link) const {
11466 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkDepth>(
11467 value: link.depth);
11468 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkIndex>(
11469 value: link.index);
11470}
11471
11472ClosureDataPtr ClosureData::New() {
11473 ASSERT(Object::closure_data_class() != Class::null());
11474 return Object::Allocate<ClosureData>(space: Heap::kOld);
11475}
11476
11477const char* ClosureData::ToCString() const {
11478 if (IsNull()) {
11479 return "ClosureData: null";
11480 }
11481 auto const zone = Thread::Current()->zone();
11482 ZoneTextBuffer buffer(zone);
11483 buffer.Printf(format: "ClosureData: context_scope: 0x%" Px "",
11484 static_cast<uword>(context_scope()));
11485 buffer.AddString(s: " parent_function: ");
11486 if (parent_function() == Object::null()) {
11487 buffer.AddString(s: "null");
11488 } else {
11489 buffer.AddString(s: Object::Handle(ptr: parent_function()).ToCString());
11490 }
11491 buffer.Printf(format: " implicit_static_closure: 0x%" Px "",
11492 static_cast<uword>(implicit_static_closure()));
11493 return buffer.buffer();
11494}
11495
11496void FunctionType::set_num_fixed_parameters(intptr_t value) const {
11497 ASSERT(value >= 0);
11498 untag()->packed_parameter_counts_.Update<PackedNumFixedParameters>(value);
11499}
11500
11501void FfiTrampolineData::set_callback_target(const Function& value) const {
11502 untag()->set_callback_target(value.ptr());
11503}
11504
11505void FunctionType::SetNumOptionalParameters(
11506 intptr_t value,
11507 bool are_optional_positional) const {
11508 // HasOptionalNamedParameters only checks this bit, so only set it if there
11509 // are actual named parameters.
11510 untag()->packed_parameter_counts_.Update<PackedHasNamedOptionalParameters>(
11511 value: (value > 0) && !are_optional_positional);
11512 untag()->packed_parameter_counts_.Update<PackedNumOptionalParameters>(value);
11513}
11514
11515FunctionTypePtr FunctionType::New(Heap::Space space) {
11516 return Object::Allocate<FunctionType>(space);
11517}
11518
11519FunctionTypePtr FunctionType::New(intptr_t num_parent_type_arguments,
11520 Nullability nullability,
11521 Heap::Space space) {
11522 Zone* Z = Thread::Current()->zone();
11523 const FunctionType& result =
11524 FunctionType::Handle(zone: Z, ptr: FunctionType::New(space));
11525 result.set_packed_parameter_counts(0);
11526 result.set_packed_type_parameter_counts(0);
11527 result.set_named_parameter_names(Object::empty_array());
11528 result.SetNumParentTypeArguments(num_parent_type_arguments);
11529 result.SetHash(0);
11530 result.set_flags(0);
11531 result.set_nullability(nullability);
11532 result.set_type_state(UntaggedAbstractType::kAllocated);
11533 result.InitializeTypeTestingStubNonAtomic(
11534 stub: Code::Handle(zone: Z, ptr: TypeTestingStubGenerator::DefaultCodeForType(type: result)));
11535 return result.ptr();
11536}
11537
11538FunctionTypePtr FunctionType::Clone(const FunctionType& orig,
11539 Heap::Space space) {
11540 if (orig.IsGeneric()) {
11541 // Need a deep clone in order to update owners of type parameters.
11542 return FunctionType::RawCast(
11543 raw: orig.UpdateFunctionTypes(num_parent_type_args_adjustment: 0, num_free_fun_type_params: kAllFree, space, function_type_mapping: nullptr));
11544 } else {
11545 return FunctionType::RawCast(raw: Object::Clone(orig, space));
11546 }
11547}
11548
11549const char* FunctionType::ToUserVisibleCString() const {
11550 Zone* zone = Thread::Current()->zone();
11551 ZoneTextBuffer printer(zone);
11552 Print(name_visibility: kUserVisibleName, printer: &printer);
11553 return printer.buffer();
11554}
11555
11556StringPtr FunctionType::ToUserVisibleString() const {
11557 Thread* thread = Thread::Current();
11558 ZoneTextBuffer printer(thread->zone());
11559 Print(name_visibility: kUserVisibleName, printer: &printer);
11560 return Symbols::New(thread, cstr: printer.buffer());
11561}
11562
11563const char* FunctionType::ToCString() const {
11564 if (IsNull()) {
11565 return "FunctionType: null";
11566 }
11567 Zone* zone = Thread::Current()->zone();
11568 ZoneTextBuffer printer(zone);
11569 const char* suffix = NullabilitySuffix(name_visibility: kInternalName);
11570 if (suffix[0] != '\0') {
11571 printer.AddString(s: "(");
11572 }
11573 Print(name_visibility: kInternalName, printer: &printer);
11574 if (suffix[0] != '\0') {
11575 printer.AddString(s: ")");
11576 printer.AddString(s: suffix);
11577 }
11578 return printer.buffer();
11579}
11580
11581void ClosureData::set_context_scope(const ContextScope& value) const {
11582 untag()->set_context_scope(value.ptr());
11583}
11584
11585void ClosureData::set_implicit_static_closure(const Closure& closure) const {
11586 ASSERT(!closure.IsNull());
11587 ASSERT(untag()->closure() == Closure::null());
11588 untag()->set_closure<std::memory_order_release>(closure.ptr());
11589}
11590
11591void FfiTrampolineData::set_c_signature(const FunctionType& value) const {
11592 untag()->set_c_signature(value.ptr());
11593}
11594
11595void FfiTrampolineData::set_callback_id(int32_t callback_id) const {
11596 StoreNonPointer(addr: &untag()->callback_id_, value: callback_id);
11597}
11598
11599void FfiTrampolineData::set_is_leaf(bool is_leaf) const {
11600 StoreNonPointer(addr: &untag()->is_leaf_, value: is_leaf);
11601}
11602
11603void FfiTrampolineData::set_callback_exceptional_return(
11604 const Instance& value) const {
11605 untag()->set_callback_exceptional_return(value.ptr());
11606}
11607
11608void FfiTrampolineData::set_trampoline_kind(FfiTrampolineKind kind) const {
11609 StoreNonPointer(addr: &untag()->trampoline_kind_, value: static_cast<uint8_t>(kind));
11610}
11611
11612FfiTrampolineDataPtr FfiTrampolineData::New() {
11613 ASSERT(Object::ffi_trampoline_data_class() != Class::null());
11614 const auto& data = FfiTrampolineData::Handle(
11615 ptr: Object::Allocate<FfiTrampolineData>(space: Heap::kOld));
11616 data.set_callback_id(-1);
11617 return data.ptr();
11618}
11619
11620const char* FfiTrampolineData::ToCString() const {
11621 const FunctionType& c_sig = FunctionType::Handle(ptr: c_signature());
11622 return OS::SCreate(zone: Thread::Current()->zone(),
11623 format: "TrampolineData: c_signature=%s",
11624 c_sig.ToUserVisibleCString());
11625}
11626
11627FieldPtr Field::CloneFromOriginal() const {
11628 return this->Clone(original: *this);
11629}
11630
11631FieldPtr Field::Original() const {
11632 if (IsNull()) {
11633 return Field::null();
11634 }
11635 if (untag()->owner()->IsField()) {
11636 return static_cast<FieldPtr>(untag()->owner());
11637 }
11638 return this->ptr();
11639}
11640
11641intptr_t Field::guarded_cid() const {
11642#if defined(DEBUG)
11643 // This assertion ensures that the cid seen by the background compiler is
11644 // consistent. So the assertion passes if the field is a clone. It also
11645 // passes if the field is static, because we don't use field guards on
11646 // static fields. It also passes if we're compiling unoptimized
11647 // code (in which case the caller might get different answers if it obtains
11648 // the guarded cid multiple times).
11649 Thread* thread = Thread::Current();
11650#if defined(DART_PRECOMPILED_RUNTIME)
11651 ASSERT(!thread->IsInsideCompiler() || is_static());
11652#else
11653 ASSERT(!thread->IsInsideCompiler() ||
11654 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11655 is_static());
11656#endif
11657#endif
11658 return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>(
11659 addr: &untag()->guarded_cid_);
11660}
11661
11662bool Field::is_nullable() const {
11663#if defined(DEBUG)
11664 // Same assert as guarded_cid(), because is_nullable() also needs to be
11665 // consistent for the background compiler.
11666 Thread* thread = Thread::Current();
11667#if defined(DART_PRECOMPILED_RUNTIME)
11668 ASSERT(!thread->IsInsideCompiler() || is_static());
11669#else
11670 ASSERT(!thread->IsInsideCompiler() ||
11671 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11672 is_static());
11673#endif
11674#endif
11675 return is_nullable_unsafe();
11676}
11677
11678void Field::SetOriginal(const Field& value) const {
11679 ASSERT(value.IsOriginal());
11680 ASSERT(!value.IsNull());
11681 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
11682}
11683
11684StringPtr Field::GetterName(const String& field_name) {
11685 return String::Concat(str1: Symbols::GetterPrefix(), str2: field_name);
11686}
11687
11688StringPtr Field::GetterSymbol(const String& field_name) {
11689 return Symbols::FromGet(thread: Thread::Current(), str: field_name);
11690}
11691
11692StringPtr Field::LookupGetterSymbol(const String& field_name) {
11693 return Symbols::LookupFromGet(thread: Thread::Current(), str: field_name);
11694}
11695
11696StringPtr Field::SetterName(const String& field_name) {
11697 return String::Concat(str1: Symbols::SetterPrefix(), str2: field_name);
11698}
11699
11700StringPtr Field::SetterSymbol(const String& field_name) {
11701 return Symbols::FromSet(thread: Thread::Current(), str: field_name);
11702}
11703
11704StringPtr Field::LookupSetterSymbol(const String& field_name) {
11705 return Symbols::LookupFromSet(thread: Thread::Current(), str: field_name);
11706}
11707
11708StringPtr Field::NameFromGetter(const String& getter_name) {
11709 return Symbols::New(thread: Thread::Current(), str: getter_name, begin_index: kGetterPrefixLength,
11710 length: getter_name.Length() - kGetterPrefixLength);
11711}
11712
11713StringPtr Field::NameFromSetter(const String& setter_name) {
11714 return Symbols::New(thread: Thread::Current(), str: setter_name, begin_index: kSetterPrefixLength,
11715 length: setter_name.Length() - kSetterPrefixLength);
11716}
11717
11718StringPtr Field::NameFromInit(const String& init_name) {
11719 return Symbols::New(thread: Thread::Current(), str: init_name, begin_index: kInitPrefixLength,
11720 length: init_name.Length() - kInitPrefixLength);
11721}
11722
11723bool Field::IsGetterName(const String& function_name) {
11724 return function_name.StartsWith(other: Symbols::GetterPrefix());
11725}
11726
11727bool Field::IsSetterName(const String& function_name) {
11728 return function_name.StartsWith(other: Symbols::SetterPrefix());
11729}
11730
11731bool Field::IsInitName(const String& function_name) {
11732 return function_name.StartsWith(other: Symbols::InitPrefix());
11733}
11734
11735void Field::set_name(const String& value) const {
11736 ASSERT(value.IsSymbol());
11737 ASSERT(IsOriginal());
11738 untag()->set_name(value.ptr());
11739}
11740
11741ObjectPtr Field::RawOwner() const {
11742 if (IsOriginal()) {
11743 return untag()->owner();
11744 } else {
11745 const Field& field = Field::Handle(ptr: Original());
11746 ASSERT(field.IsOriginal());
11747 ASSERT(!Object::Handle(field.untag()->owner()).IsField());
11748 return field.untag()->owner();
11749 }
11750}
11751
11752ClassPtr Field::Owner() const {
11753 const Field& field = Field::Handle(ptr: Original());
11754 ASSERT(field.IsOriginal());
11755 const Object& obj = Object::Handle(ptr: field.untag()->owner());
11756 if (obj.IsClass()) {
11757 return Class::Cast(obj).ptr();
11758 }
11759 ASSERT(obj.IsPatchClass());
11760 return PatchClass::Cast(obj).wrapped_class();
11761}
11762
11763ScriptPtr Field::Script() const {
11764 // NOTE(turnidge): If you update this function, you probably want to
11765 // update Class::PatchFieldsAndFunctions() at the same time.
11766 const Field& field = Field::Handle(ptr: Original());
11767 ASSERT(field.IsOriginal());
11768 const Object& obj = Object::Handle(ptr: field.untag()->owner());
11769 if (obj.IsClass()) {
11770 return Class::Cast(obj).script();
11771 }
11772 ASSERT(obj.IsPatchClass());
11773 return PatchClass::Cast(obj).script();
11774}
11775
11776#if !defined(DART_PRECOMPILED_RUNTIME)
11777KernelProgramInfoPtr Field::KernelProgramInfo() const {
11778 const auto& owner = Object::Handle(ptr: RawOwner());
11779 if (owner.IsClass()) {
11780 return Class::Cast(obj: owner).KernelProgramInfo();
11781 }
11782 return PatchClass::Cast(obj: owner).kernel_program_info();
11783}
11784#endif
11785
11786uint32_t Field::Hash() const {
11787 return String::HashRawSymbol(symbol: name());
11788}
11789
11790void Field::InheritKernelOffsetFrom(const Field& src) const {
11791#if defined(DART_PRECOMPILED_RUNTIME)
11792 UNREACHABLE();
11793#else
11794 StoreNonPointer(addr: &untag()->kernel_offset_, value: src.untag()->kernel_offset_);
11795#endif
11796}
11797
11798#if !defined(DART_PRECOMPILED_RUNTIME)
11799TypedDataViewPtr Field::KernelLibrary() const {
11800 const auto& info = KernelProgramInfo::Handle(ptr: KernelProgramInfo());
11801 return info.KernelLibrary(library_index: KernelLibraryIndex());
11802}
11803
11804intptr_t Field::KernelLibraryOffset() const {
11805 const intptr_t kernel_library_index = KernelLibraryIndex();
11806 if (kernel_library_index == -1) return 0;
11807 const auto& info = KernelProgramInfo::Handle(ptr: KernelProgramInfo());
11808 return info.KernelLibraryStartOffset(library_index: kernel_library_index);
11809}
11810
11811intptr_t Field::KernelLibraryIndex() const {
11812 const Object& obj = Object::Handle(ptr: untag()->owner());
11813 // During background JIT compilation field objects are copied
11814 // and copy points to the original field via the owner field.
11815 if (obj.IsField()) {
11816 return Field::Cast(obj).KernelLibraryIndex();
11817 } else if (obj.IsClass()) {
11818 const auto& lib = Library::Handle(ptr: Class::Cast(obj).library());
11819 return lib.kernel_library_index();
11820 }
11821 ASSERT(obj.IsPatchClass());
11822 return PatchClass::Cast(obj).kernel_library_index();
11823}
11824#endif // !defined(DART_PRECOMPILED_RUNTIME)
11825
11826void Field::SetFieldTypeSafe(const AbstractType& value) const {
11827 ASSERT(IsOriginal());
11828 ASSERT(!value.IsNull());
11829 if (value.ptr() != type()) {
11830 untag()->set_type(value.ptr());
11831 }
11832}
11833
11834// Called at finalization time
11835void Field::SetFieldType(const AbstractType& value) const {
11836 DEBUG_ASSERT(
11837 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
11838 SetFieldTypeSafe(value);
11839}
11840
11841FieldPtr Field::New() {
11842 ASSERT(Object::field_class() != Class::null());
11843 return Object::Allocate<Field>(space: Heap::kOld);
11844}
11845
11846void Field::InitializeNew(const Field& result,
11847 const String& name,
11848 bool is_static,
11849 bool is_final,
11850 bool is_const,
11851 bool is_reflectable,
11852 bool is_late,
11853 const Object& owner,
11854 TokenPosition token_pos,
11855 TokenPosition end_token_pos) {
11856 result.set_kind_bits(0);
11857 result.set_name(name);
11858 result.set_is_static(is_static);
11859 if (is_static) {
11860 result.set_field_id_unsafe(-1);
11861 } else {
11862 result.SetOffset(host_offset_in_bytes: 0, target_offset_in_bytes: 0);
11863 }
11864 result.set_is_final(is_final);
11865 result.set_is_const(is_const);
11866 result.set_is_reflectable(is_reflectable);
11867 result.set_is_late(is_late);
11868 result.set_owner(owner);
11869 result.set_token_pos(token_pos);
11870 result.set_end_token_pos(end_token_pos);
11871 result.set_has_nontrivial_initializer_unsafe(false);
11872 result.set_has_initializer_unsafe(false);
11873 // We will make unboxing decision once we read static type or
11874 // in KernelLoader::ReadInferredType.
11875 result.set_is_unboxed_unsafe(false);
11876 result.set_initializer_changed_after_initialization(false);
11877 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
11878 result.set_has_pragma(false);
11879 result.set_static_type_exactness_state_unsafe(
11880 StaticTypeExactnessState::NotTracking());
11881 auto isolate_group = IsolateGroup::Current();
11882
11883// Use field guards if they are enabled and the isolate has never reloaded.
11884// TODO(johnmccutchan): The reload case assumes the worst case (everything is
11885// dynamic and possibly null). Attempt to relax this later.
11886#if defined(PRODUCT)
11887 const bool use_guarded_cid =
11888 FLAG_precompiled_mode || isolate_group->use_field_guards();
11889#else
11890 const bool use_guarded_cid =
11891 FLAG_precompiled_mode || (isolate_group->use_field_guards() &&
11892 !isolate_group->HasAttemptedReload());
11893#endif // !defined(PRODUCT)
11894 result.set_guarded_cid_unsafe(use_guarded_cid ? kIllegalCid : kDynamicCid);
11895 result.set_is_nullable_unsafe(use_guarded_cid ? false : true);
11896 result.set_guarded_list_length_in_object_offset_unsafe(
11897 Field::kUnknownLengthOffset);
11898 // Presently, we only attempt to remember the list length for final fields.
11899 if (is_final && use_guarded_cid) {
11900 result.set_guarded_list_length_unsafe(Field::kUnknownFixedLength);
11901 } else {
11902 result.set_guarded_list_length_unsafe(Field::kNoFixedLength);
11903 }
11904}
11905
11906FieldPtr Field::New(const String& name,
11907 bool is_static,
11908 bool is_final,
11909 bool is_const,
11910 bool is_reflectable,
11911 bool is_late,
11912 const Object& owner,
11913 const AbstractType& type,
11914 TokenPosition token_pos,
11915 TokenPosition end_token_pos) {
11916 ASSERT(!owner.IsNull());
11917 const Field& result = Field::Handle(ptr: Field::New());
11918 InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
11919 is_late, owner, token_pos, end_token_pos);
11920 result.SetFieldTypeSafe(type);
11921#if !defined(DART_PRECOMPILED_RUNTIME)
11922 compiler::target::UnboxFieldIfSupported(field: result, type);
11923#endif
11924 return result.ptr();
11925}
11926
11927FieldPtr Field::NewTopLevel(const String& name,
11928 bool is_final,
11929 bool is_const,
11930 bool is_late,
11931 const Object& owner,
11932 TokenPosition token_pos,
11933 TokenPosition end_token_pos) {
11934 ASSERT(!owner.IsNull());
11935 const Field& result = Field::Handle(ptr: Field::New());
11936 InitializeNew(result, name, is_static: true, /* is_static */
11937 is_final, is_const, is_reflectable: true, /* is_reflectable */
11938 is_late, owner, token_pos, end_token_pos);
11939 return result.ptr();
11940}
11941
11942FieldPtr Field::Clone(const Field& original) const {
11943 if (original.IsNull()) {
11944 return Field::null();
11945 }
11946 ASSERT(original.IsOriginal());
11947 Field& clone = Field::Handle();
11948 // Using relaxed loading is fine because concurrent fields changes are all
11949 // guarded, will be reconciled during optimized code installation.
11950 clone ^= Object::Clone(orig: *this, space: Heap::kOld, /*load_with_relaxed_atomics=*/true);
11951 clone.SetOriginal(original);
11952 clone.InheritKernelOffsetFrom(src: original);
11953 return clone.ptr();
11954}
11955
11956int32_t Field::SourceFingerprint() const {
11957#if !defined(DART_PRECOMPILED_RUNTIME)
11958 return kernel::KernelSourceFingerprintHelper::CalculateFieldFingerprint(
11959 field: *this);
11960#else
11961 return 0;
11962#endif // !defined(DART_PRECOMPILED_RUNTIME)
11963}
11964
11965StringPtr Field::InitializingExpression() const {
11966 UNREACHABLE();
11967 return String::null();
11968}
11969
11970const char* Field::UserVisibleNameCString() const {
11971 NoSafepointScope no_safepoint;
11972 if (FLAG_show_internal_names) {
11973 return String::Handle(ptr: name()).ToCString();
11974 }
11975 return String::ScrubName(name: String::Handle(ptr: name()), is_extension: is_extension_member());
11976}
11977
11978StringPtr Field::UserVisibleName() const {
11979 if (FLAG_show_internal_names) {
11980 return name();
11981 }
11982 return Symbols::New(
11983 thread: Thread::Current(),
11984 cstr: String::ScrubName(name: String::Handle(ptr: name()), is_extension: is_extension_member()));
11985}
11986
11987intptr_t Field::guarded_list_length() const {
11988 return Smi::Value(raw_smi: untag()->guarded_list_length());
11989}
11990
11991void Field::set_guarded_list_length_unsafe(intptr_t list_length) const {
11992 ASSERT(IsOriginal());
11993 untag()->set_guarded_list_length(Smi::New(value: list_length));
11994}
11995
11996intptr_t Field::guarded_list_length_in_object_offset() const {
11997 return untag()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
11998}
11999
12000void Field::set_guarded_list_length_in_object_offset_unsafe(
12001 intptr_t list_length_offset) const {
12002 ASSERT(IsOriginal());
12003 StoreNonPointer<int8_t, int8_t, std::memory_order_relaxed>(
12004 addr: &untag()->guarded_list_length_in_object_offset_,
12005 value: static_cast<int8_t>(list_length_offset - kHeapObjectTag));
12006 ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
12007}
12008
12009bool Field::NeedsSetter() const {
12010 // According to the Dart language specification, final fields don't have
12011 // a setter, except late final fields without initializer.
12012 if (is_final()) {
12013 // Late final fields without initializer always need a setter to check
12014 // if they are already initialized.
12015 if (is_late() && !has_initializer()) {
12016 return true;
12017 }
12018 return false;
12019 }
12020
12021 // Instance non-final fields always need a setter.
12022 if (!is_static()) {
12023 return true;
12024 }
12025
12026 // Setter is needed to make null assertions.
12027 if (FLAG_null_assertions) {
12028 Thread* thread = Thread::Current();
12029 IsolateGroup* isolate_group = thread->isolate_group();
12030 if (!isolate_group->null_safety() && isolate_group->asserts()) {
12031 if (AbstractType::Handle(zone: thread->zone(), ptr: type()).NeedsNullAssertion()) {
12032 return true;
12033 }
12034 }
12035 }
12036
12037 // Otherwise, setters for static fields can be omitted
12038 // and fields can be accessed directly.
12039 return false;
12040}
12041
12042bool Field::NeedsGetter() const {
12043 // All instance fields need a getter.
12044 if (!is_static()) return true;
12045
12046 // Static fields also need a getter if they have a non-trivial initializer,
12047 // because it needs to be initialized lazily.
12048 if (has_nontrivial_initializer()) return true;
12049
12050 // Static late fields with no initializer also need a getter, to check if it's
12051 // been initialized.
12052 return is_late() && !has_initializer();
12053}
12054
12055const char* Field::ToCString() const {
12056 NoSafepointScope no_safepoint;
12057 if (IsNull()) {
12058 return "Field: null";
12059 }
12060 const char* kF0 = is_static() ? " static" : "";
12061 const char* kF1 = is_late() ? " late" : "";
12062 const char* kF2 = is_final() ? " final" : "";
12063 const char* kF3 = is_const() ? " const" : "";
12064 const char* field_name = String::Handle(ptr: name()).ToCString();
12065 const Class& cls = Class::Handle(ptr: Owner());
12066 const char* cls_name = String::Handle(ptr: cls.Name()).ToCString();
12067 return OS::SCreate(zone: Thread::Current()->zone(), format: "Field <%s.%s>:%s%s%s%s",
12068 cls_name, field_name, kF0, kF1, kF2, kF3);
12069}
12070
12071// Build a closure object that gets (or sets) the contents of a static
12072// field f and cache the closure in a newly created static field
12073// named #f (or #f= in case of a setter).
12074InstancePtr Field::AccessorClosure(bool make_setter) const {
12075 Thread* thread = Thread::Current();
12076 Zone* zone = thread->zone();
12077 ASSERT(is_static());
12078 const Class& field_owner = Class::Handle(zone, ptr: Owner());
12079
12080 String& closure_name = String::Handle(zone, ptr: this->name());
12081 closure_name = Symbols::FromConcat(thread, str1: Symbols::HashMark(), str2: closure_name);
12082 if (make_setter) {
12083 closure_name =
12084 Symbols::FromConcat(thread, str1: Symbols::HashMark(), str2: closure_name);
12085 }
12086
12087 Field& closure_field = Field::Handle(zone);
12088 closure_field = field_owner.LookupStaticField(name: closure_name);
12089 if (!closure_field.IsNull()) {
12090 ASSERT(closure_field.is_static());
12091 const Instance& closure =
12092 Instance::Handle(zone, ptr: Instance::RawCast(raw: closure_field.StaticValue()));
12093 ASSERT(!closure.IsNull());
12094 ASSERT(closure.IsClosure());
12095 return closure.ptr();
12096 }
12097
12098 UNREACHABLE();
12099 return Instance::null();
12100}
12101
12102InstancePtr Field::GetterClosure() const {
12103 return AccessorClosure(make_setter: false);
12104}
12105
12106InstancePtr Field::SetterClosure() const {
12107 return AccessorClosure(make_setter: true);
12108}
12109
12110WeakArrayPtr Field::dependent_code() const {
12111 DEBUG_ASSERT(
12112 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
12113 return untag()->dependent_code();
12114}
12115
12116void Field::set_dependent_code(const WeakArray& array) const {
12117 ASSERT(IsOriginal());
12118 DEBUG_ASSERT(
12119 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12120 untag()->set_dependent_code(array.ptr());
12121}
12122
12123class FieldDependentArray : public WeakCodeReferences {
12124 public:
12125 explicit FieldDependentArray(const Field& field)
12126 : WeakCodeReferences(WeakArray::Handle(ptr: field.dependent_code())),
12127 field_(field) {}
12128
12129 virtual void UpdateArrayTo(const WeakArray& value) {
12130 field_.set_dependent_code(value);
12131 }
12132
12133 virtual void ReportDeoptimization(const Code& code) {
12134 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12135 Function& function = Function::Handle(ptr: code.function());
12136 THR_Print("Deoptimizing %s because guard on field %s failed.\n",
12137 function.ToFullyQualifiedCString(), field_.ToCString());
12138 }
12139 }
12140
12141 virtual void ReportSwitchingCode(const Code& code) {
12142 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12143 Function& function = Function::Handle(ptr: code.function());
12144 THR_Print(
12145 "Switching '%s' to unoptimized code because guard"
12146 " on field '%s' was violated.\n",
12147 function.ToFullyQualifiedCString(), field_.ToCString());
12148 }
12149 }
12150
12151 private:
12152 const Field& field_;
12153 DISALLOW_COPY_AND_ASSIGN(FieldDependentArray);
12154};
12155
12156void Field::RegisterDependentCode(const Code& code) const {
12157 ASSERT(IsOriginal());
12158 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
12159 ASSERT(code.is_optimized());
12160 FieldDependentArray a(*this);
12161 a.Register(value: code);
12162}
12163
12164void Field::DeoptimizeDependentCode(bool are_mutators_stopped) const {
12165 DEBUG_ASSERT(
12166 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12167 ASSERT(IsOriginal());
12168 FieldDependentArray a(*this);
12169 if (FLAG_trace_deoptimization && a.HasCodes()) {
12170 THR_Print("Deopt for field guard (field %s)\n", ToCString());
12171 }
12172 a.DisableCode(are_mutators_stopped);
12173}
12174
12175bool Field::IsConsistentWith(const Field& other) const {
12176 return (untag()->guarded_cid_ == other.untag()->guarded_cid_) &&
12177 (untag()->is_nullable_ == other.untag()->is_nullable_) &&
12178 (untag()->guarded_list_length() ==
12179 other.untag()->guarded_list_length()) &&
12180 (is_unboxed() == other.is_unboxed()) &&
12181 (static_type_exactness_state().Encode() ==
12182 other.static_type_exactness_state().Encode());
12183}
12184
12185bool Field::IsUninitialized() const {
12186 Thread* thread = Thread::Current();
12187 const FieldTable* field_table = thread->isolate()->field_table();
12188 const ObjectPtr raw_value = field_table->At(index: field_id());
12189 ASSERT(raw_value != Object::transition_sentinel().ptr());
12190 return raw_value == Object::sentinel().ptr();
12191}
12192
12193FunctionPtr Field::EnsureInitializerFunction() const {
12194 ASSERT(has_nontrivial_initializer());
12195 ASSERT(IsOriginal());
12196 Thread* thread = Thread::Current();
12197 Zone* zone = thread->zone();
12198 Function& initializer = Function::Handle(zone, ptr: InitializerFunction());
12199 if (initializer.IsNull()) {
12200#if defined(DART_PRECOMPILED_RUNTIME)
12201 UNREACHABLE();
12202#else
12203 SafepointMutexLocker ml(
12204 thread->isolate_group()->initializer_functions_mutex());
12205 // Double check after grabbing the lock.
12206 initializer = InitializerFunction();
12207 if (initializer.IsNull()) {
12208 initializer = kernel::CreateFieldInitializerFunction(thread, zone, field: *this);
12209 }
12210#endif
12211 }
12212 return initializer.ptr();
12213}
12214
12215void Field::SetInitializerFunction(const Function& initializer) const {
12216#if defined(DART_PRECOMPILED_RUNTIME)
12217 UNREACHABLE();
12218#else
12219 ASSERT(IsOriginal());
12220 ASSERT(IsolateGroup::Current()
12221 ->initializer_functions_mutex()
12222 ->IsOwnedByCurrentThread());
12223 // We have to ensure that all stores into the initializer function object
12224 // happen before releasing the pointer to the initializer as it may be
12225 // accessed without grabbing the lock.
12226 untag()->set_initializer_function<std::memory_order_release>(
12227 initializer.ptr());
12228#endif
12229}
12230
12231bool Field::HasInitializerFunction() const {
12232 return untag()->initializer_function() != Function::null();
12233}
12234
12235ErrorPtr Field::InitializeInstance(const Instance& instance) const {
12236 ASSERT(IsOriginal());
12237 ASSERT(is_instance());
12238 ASSERT(instance.GetField(*this) == Object::sentinel().ptr());
12239 Object& value = Object::Handle();
12240
12241 if (has_nontrivial_initializer()) {
12242 const Function& initializer = Function::Handle(ptr: EnsureInitializerFunction());
12243 const Array& args = Array::Handle(ptr: Array::New(len: 1));
12244 args.SetAt(index: 0, value: instance);
12245 value = DartEntry::InvokeFunction(function: initializer, arguments: args);
12246 if (!value.IsNull() && value.IsError()) {
12247 return Error::Cast(obj: value).ptr();
12248 }
12249 } else {
12250 if (is_late() && !has_initializer()) {
12251 Exceptions::ThrowLateFieldNotInitialized(name: String::Handle(ptr: name()));
12252 UNREACHABLE();
12253 }
12254#if defined(DART_PRECOMPILED_RUNTIME)
12255 UNREACHABLE();
12256#else
12257 // Our trivial initializer is `null`. Any non-`null` initializer is
12258 // non-trivial (see `KernelLoader::CheckForInitializer()`).
12259 value = Object::null();
12260#endif
12261 }
12262 ASSERT(value.IsNull() || value.IsInstance());
12263 if (is_late() && is_final() &&
12264 (instance.GetField(field: *this) != Object::sentinel().ptr())) {
12265 Exceptions::ThrowLateFieldAssignedDuringInitialization(
12266 name: String::Handle(ptr: name()));
12267 UNREACHABLE();
12268 }
12269 instance.SetField(field: *this, value);
12270 return Error::null();
12271}
12272
12273ErrorPtr Field::InitializeStatic() const {
12274 ASSERT(IsOriginal());
12275 ASSERT(is_static());
12276 if (StaticValue() == Object::sentinel().ptr()) {
12277 auto& value = Object::Handle();
12278 if (is_late()) {
12279 if (!has_initializer()) {
12280 Exceptions::ThrowLateFieldNotInitialized(name: String::Handle(ptr: name()));
12281 UNREACHABLE();
12282 }
12283 value = EvaluateInitializer();
12284 if (value.IsError()) {
12285 return Error::Cast(obj: value).ptr();
12286 }
12287 if (is_final() && (StaticValue() != Object::sentinel().ptr())) {
12288 Exceptions::ThrowLateFieldAssignedDuringInitialization(
12289 name: String::Handle(ptr: name()));
12290 UNREACHABLE();
12291 }
12292 } else {
12293 SetStaticValue(Object::transition_sentinel());
12294 value = EvaluateInitializer();
12295 if (value.IsError()) {
12296 SetStaticValue(Object::null_instance());
12297 return Error::Cast(obj: value).ptr();
12298 }
12299 }
12300 ASSERT(value.IsNull() || value.IsInstance());
12301 SetStaticValue(value.IsNull() ? Instance::null_instance()
12302 : Instance::Cast(obj: value));
12303 return Error::null();
12304 } else if (StaticValue() == Object::transition_sentinel().ptr()) {
12305 ASSERT(!is_late());
12306 const Array& ctor_args = Array::Handle(ptr: Array::New(len: 1));
12307 const String& field_name = String::Handle(ptr: name());
12308 ctor_args.SetAt(index: 0, value: field_name);
12309 Exceptions::ThrowByType(type: Exceptions::kCyclicInitializationError, arguments: ctor_args);
12310 UNREACHABLE();
12311 }
12312 return Error::null();
12313}
12314
12315ObjectPtr Field::StaticConstFieldValue() const {
12316 ASSERT(is_static() &&
12317 (is_const() || (is_final() && has_trivial_initializer())));
12318
12319 auto thread = Thread::Current();
12320 auto zone = thread->zone();
12321 auto initial_field_table = thread->isolate_group()->initial_field_table();
12322
12323 // We can safely cache the value of the static const field in the initial
12324 // field table.
12325 auto& value = Object::Handle(
12326 zone, ptr: initial_field_table->At(index: field_id(), /*concurrent_use=*/true));
12327 if (value.ptr() == Object::sentinel().ptr()) {
12328 // Fields with trivial initializers get their initial value
12329 // eagerly when they are registered.
12330 ASSERT(is_const());
12331 ASSERT(has_initializer());
12332 ASSERT(has_nontrivial_initializer());
12333 value = EvaluateInitializer();
12334 if (!value.IsError()) {
12335 ASSERT(value.IsNull() || value.IsInstance());
12336 SetStaticConstFieldValue(value: value.IsNull() ? Instance::null_instance()
12337 : Instance::Cast(obj: value));
12338 }
12339 }
12340 return value.ptr();
12341}
12342
12343void Field::SetStaticConstFieldValue(const Instance& value,
12344 bool assert_initializing_store) const {
12345 ASSERT(is_static());
12346 auto thread = Thread::Current();
12347 auto initial_field_table = thread->isolate_group()->initial_field_table();
12348
12349 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12350 ASSERT(initial_field_table->At(field_id()) == Object::sentinel().ptr() ||
12351 initial_field_table->At(field_id()) == value.ptr() ||
12352 !assert_initializing_store);
12353 initial_field_table->SetAt(index: field_id(),
12354 raw_instance: value.IsNull() ? Instance::null_instance().ptr()
12355 : Instance::Cast(obj: value).ptr(),
12356 /*concurrent_use=*/true);
12357}
12358
12359ObjectPtr Field::EvaluateInitializer() const {
12360 ASSERT(Thread::Current()->IsDartMutatorThread());
12361
12362#if !defined(DART_PRECOMPILED_RUNTIME)
12363 if (is_static() && is_const()) {
12364 return kernel::EvaluateStaticConstFieldInitializer(field: *this);
12365 }
12366#endif // !defined(DART_PRECOMPILED_RUNTIME)
12367
12368 const Function& initializer = Function::Handle(ptr: EnsureInitializerFunction());
12369 return DartEntry::InvokeFunction(function: initializer, arguments: Object::empty_array());
12370}
12371
12372static intptr_t GetListLength(const Object& value) {
12373 if (value.IsTypedDataBase()) {
12374 return TypedDataBase::Cast(obj: value).Length();
12375 } else if (value.IsArray()) {
12376 return Array::Cast(obj: value).Length();
12377 } else if (value.IsGrowableObjectArray()) {
12378 // List length is variable.
12379 return Field::kNoFixedLength;
12380 }
12381 return Field::kNoFixedLength;
12382}
12383
12384static intptr_t GetListLengthOffset(intptr_t cid) {
12385 if (IsTypedDataClassId(index: cid) || IsTypedDataViewClassId(index: cid) ||
12386 IsUnmodifiableTypedDataViewClassId(index: cid) ||
12387 IsExternalTypedDataClassId(index: cid)) {
12388 return TypedData::length_offset();
12389 } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
12390 return Array::length_offset();
12391 } else if (cid == kGrowableObjectArrayCid) {
12392 // List length is variable.
12393 return Field::kUnknownLengthOffset;
12394 }
12395 return Field::kUnknownLengthOffset;
12396}
12397
12398const char* Field::GuardedPropertiesAsCString() const {
12399 if (guarded_cid() == kIllegalCid) {
12400 return "<?>";
12401 } else if (guarded_cid() == kDynamicCid) {
12402 ASSERT(!static_type_exactness_state().IsExactOrUninitialized());
12403 return "<*>";
12404 }
12405
12406 Zone* zone = Thread::Current()->zone();
12407
12408 const char* exactness = "";
12409 if (static_type_exactness_state().IsTracking()) {
12410 exactness =
12411 zone->PrintToString(format: " {%s}", static_type_exactness_state().ToCString());
12412 }
12413
12414 const Class& cls =
12415 Class::Handle(ptr: IsolateGroup::Current()->class_table()->At(cid: guarded_cid()));
12416 const char* class_name = String::Handle(ptr: cls.Name()).ToCString();
12417
12418 if (IsBuiltinListClassId(index: guarded_cid()) && !is_nullable() && is_final()) {
12419 ASSERT(guarded_list_length() != kUnknownFixedLength);
12420 if (guarded_list_length() == kNoFixedLength) {
12421 return zone->PrintToString(format: "<%s [*]%s>", class_name, exactness);
12422 } else {
12423 return zone->PrintToString(
12424 format: "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(),
12425 guarded_list_length_in_object_offset(), exactness);
12426 }
12427 }
12428
12429 return zone->PrintToString(format: "<%s %s%s>",
12430 is_nullable() ? "nullable" : "not-nullable",
12431 class_name, exactness);
12432}
12433
12434void Field::InitializeGuardedListLengthInObjectOffset(bool unsafe) const {
12435 auto setter = unsafe ? &Field::set_guarded_list_length_in_object_offset_unsafe
12436 : &Field::set_guarded_list_length_in_object_offset;
12437 ASSERT(IsOriginal());
12438 if (needs_length_check() &&
12439 (guarded_list_length() != Field::kUnknownFixedLength)) {
12440 const intptr_t offset = GetListLengthOffset(cid: guarded_cid());
12441 (this->*setter)(offset);
12442 ASSERT(offset != Field::kUnknownLengthOffset);
12443 } else {
12444 (this->*setter)(Field::kUnknownLengthOffset);
12445 }
12446}
12447
12448class FieldGuardUpdater {
12449 public:
12450 FieldGuardUpdater(const Field* field, const Object& value);
12451
12452 bool IsUpdateNeeded() {
12453 return does_guarded_cid_need_update_ || does_is_nullable_need_update_ ||
12454 does_list_length_and_offset_need_update_ ||
12455 does_static_type_exactness_state_need_update_;
12456 }
12457 void DoUpdate();
12458
12459 private:
12460 void ReviewExactnessState();
12461 void ReviewGuards();
12462
12463 intptr_t guarded_cid() { return guarded_cid_; }
12464 void set_guarded_cid(intptr_t guarded_cid) {
12465 guarded_cid_ = guarded_cid;
12466 does_guarded_cid_need_update_ = true;
12467 }
12468
12469 bool is_nullable() { return is_nullable_; }
12470 void set_is_nullable(bool is_nullable) {
12471 is_nullable_ = is_nullable;
12472 does_is_nullable_need_update_ = true;
12473 }
12474
12475 intptr_t guarded_list_length() { return list_length_; }
12476 void set_guarded_list_length_and_offset(
12477 intptr_t list_length,
12478 intptr_t list_length_in_object_offset) {
12479 list_length_ = list_length;
12480 list_length_in_object_offset_ = list_length_in_object_offset;
12481 does_list_length_and_offset_need_update_ = true;
12482 }
12483
12484 StaticTypeExactnessState static_type_exactness_state() {
12485 return static_type_exactness_state_;
12486 }
12487 void set_static_type_exactness_state(StaticTypeExactnessState state) {
12488 static_type_exactness_state_ = state;
12489 does_static_type_exactness_state_need_update_ = true;
12490 }
12491
12492 const Field* field_;
12493 const Object& value_;
12494
12495 intptr_t guarded_cid_;
12496 bool is_nullable_;
12497 intptr_t list_length_;
12498 intptr_t list_length_in_object_offset_;
12499 StaticTypeExactnessState static_type_exactness_state_;
12500
12501 bool does_guarded_cid_need_update_ = false;
12502 bool does_is_nullable_need_update_ = false;
12503 bool does_list_length_and_offset_need_update_ = false;
12504 bool does_static_type_exactness_state_need_update_ = false;
12505};
12506
12507void FieldGuardUpdater::ReviewGuards() {
12508 ASSERT(field_->IsOriginal());
12509 const intptr_t cid = value_.GetClassId();
12510
12511 if (guarded_cid() == kIllegalCid) {
12512 set_guarded_cid(cid);
12513 set_is_nullable(cid == kNullCid);
12514
12515 // Start tracking length if needed.
12516 ASSERT((guarded_list_length() == Field::kUnknownFixedLength) ||
12517 (guarded_list_length() == Field::kNoFixedLength));
12518 if (field_->needs_length_check()) {
12519 ASSERT(guarded_list_length() == Field::kUnknownFixedLength);
12520 set_guarded_list_length_and_offset(list_length: GetListLength(value: value_),
12521 list_length_in_object_offset: GetListLengthOffset(cid));
12522 }
12523
12524 if (FLAG_trace_field_guards) {
12525 THR_Print(" => %s\n", field_->GuardedPropertiesAsCString());
12526 }
12527 return;
12528 }
12529
12530 if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) {
12531 // Class id of the assigned value matches expected class id and nullability.
12532
12533 // If we are tracking length check if it has matches.
12534 if (field_->needs_length_check() &&
12535 (guarded_list_length() != GetListLength(value: value_))) {
12536 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12537 set_guarded_list_length_and_offset(list_length: Field::kNoFixedLength,
12538 list_length_in_object_offset: Field::kUnknownLengthOffset);
12539 return;
12540 }
12541
12542 // Everything matches.
12543 return;
12544 }
12545
12546 if ((cid == kNullCid) && !is_nullable()) {
12547 // Assigning null value to a non-nullable field makes it nullable.
12548 set_is_nullable(true);
12549 } else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) {
12550 // Assigning non-null value to a field that previously contained only null
12551 // turns it into a nullable field with the given class id.
12552 ASSERT(is_nullable());
12553 set_guarded_cid(cid);
12554 } else {
12555 // Give up on tracking class id of values contained in this field.
12556 ASSERT(guarded_cid() != cid);
12557 set_guarded_cid(kDynamicCid);
12558 set_is_nullable(true);
12559 }
12560
12561 // If we were tracking length drop collected feedback.
12562 if (field_->needs_length_check()) {
12563 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12564 set_guarded_list_length_and_offset(list_length: Field::kNoFixedLength,
12565 list_length_in_object_offset: Field::kUnknownLengthOffset);
12566 }
12567}
12568
12569bool Class::FindInstantiationOf(Zone* zone,
12570 const Class& cls,
12571 GrowableArray<const Type*>* path,
12572 bool consider_only_super_classes) const {
12573 ASSERT(cls.is_type_finalized());
12574 if (cls.ptr() == ptr()) {
12575 return true; // Found instantiation.
12576 }
12577
12578 Class& cls2 = Class::Handle(zone);
12579 Type& super = Type::Handle(zone, ptr: super_type());
12580 if (!super.IsNull() && !super.IsObjectType()) {
12581 cls2 = super.type_class();
12582 if (path != nullptr) {
12583 path->Add(value: &super);
12584 }
12585 if (cls2.FindInstantiationOf(zone, cls, path,
12586 consider_only_super_classes)) {
12587 return true; // Found instantiation.
12588 }
12589 if (path != nullptr) {
12590 path->RemoveLast();
12591 }
12592 }
12593
12594 if (!consider_only_super_classes) {
12595 Array& super_interfaces = Array::Handle(zone, ptr: interfaces());
12596 for (intptr_t i = 0; i < super_interfaces.Length(); i++) {
12597 super ^= super_interfaces.At(index: i);
12598 cls2 = super.type_class();
12599 if (path != nullptr) {
12600 path->Add(value: &super);
12601 }
12602 if (cls2.FindInstantiationOf(zone, cls, path)) {
12603 return true; // Found instantiation.
12604 }
12605 if (path != nullptr) {
12606 path->RemoveLast();
12607 }
12608 }
12609 }
12610
12611 return false; // Not found.
12612}
12613
12614bool Class::FindInstantiationOf(Zone* zone,
12615 const Type& type,
12616 GrowableArray<const Type*>* path,
12617 bool consider_only_super_classes) const {
12618 return FindInstantiationOf(zone, cls: Class::Handle(zone, ptr: type.type_class()), path,
12619 consider_only_super_classes);
12620}
12621
12622TypePtr Class::GetInstantiationOf(Zone* zone, const Class& cls) const {
12623 if (ptr() == cls.ptr()) {
12624 return DeclarationType();
12625 }
12626 if (FindInstantiationOf(zone, cls, /*consider_only_super_classes=*/true)) {
12627 // Since [cls] is a superclass of [this], use [cls]'s declaration type.
12628 return cls.DeclarationType();
12629 }
12630 const auto& decl_type = Type::Handle(zone, ptr: DeclarationType());
12631 GrowableArray<const Type*> path(zone, 0);
12632 if (!FindInstantiationOf(zone, cls, path: &path)) {
12633 return Type::null();
12634 }
12635 Thread* thread = Thread::Current();
12636 ASSERT(!path.is_empty());
12637 auto& calculated_type = Type::Handle(zone, ptr: decl_type.ptr());
12638 auto& calculated_type_class =
12639 Class::Handle(zone, ptr: calculated_type.type_class());
12640 auto& calculated_type_args =
12641 TypeArguments::Handle(zone, ptr: calculated_type.arguments());
12642 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12643 thread, cls: calculated_type_class);
12644 for (auto* const type : path) {
12645 calculated_type ^= type->ptr();
12646 if (!calculated_type.IsInstantiated()) {
12647 calculated_type ^= calculated_type.InstantiateFrom(
12648 instantiator_type_arguments: calculated_type_args, function_type_arguments: Object::null_type_arguments(), num_free_fun_type_params: kAllFree,
12649 space: Heap::kNew);
12650 }
12651 calculated_type_class = calculated_type.type_class();
12652 calculated_type_args = calculated_type.arguments();
12653 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12654 thread, cls: calculated_type_class);
12655 }
12656 ASSERT_EQUAL(calculated_type.type_class_id(), cls.id());
12657 return calculated_type.ptr();
12658}
12659
12660TypePtr Class::GetInstantiationOf(Zone* zone, const Type& type) const {
12661 return GetInstantiationOf(zone, cls: Class::Handle(zone, ptr: type.type_class()));
12662}
12663
12664void Field::SetStaticValue(const Object& value) const {
12665 auto thread = Thread::Current();
12666 ASSERT(thread->IsDartMutatorThread());
12667 ASSERT(value.IsNull() || value.IsSentinel() || value.IsInstance());
12668
12669 ASSERT(is_static()); // Valid only for static dart fields.
12670 const intptr_t id = field_id();
12671 ASSERT(id >= 0);
12672
12673 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12674 thread->isolate()->field_table()->SetAt(index: id, raw_instance: value.ptr());
12675}
12676
12677static StaticTypeExactnessState TrivialTypeExactnessFor(const Class& cls) {
12678 const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset();
12679 ASSERT(type_arguments_offset != Class::kNoTypeArguments);
12680 if (StaticTypeExactnessState::CanRepresentAsTriviallyExact(
12681 type_arguments_offset_in_bytes: type_arguments_offset / kCompressedWordSize)) {
12682 return StaticTypeExactnessState::TriviallyExact(type_arguments_offset_in_bytes: type_arguments_offset /
12683 kCompressedWordSize);
12684 } else {
12685 return StaticTypeExactnessState::NotExact();
12686 }
12687}
12688
12689static const char* SafeTypeArgumentsToCString(const TypeArguments& args) {
12690 return (args.ptr() == TypeArguments::null()) ? "<null>" : args.ToCString();
12691}
12692
12693StaticTypeExactnessState StaticTypeExactnessState::Compute(
12694 const Type& static_type,
12695 const Instance& value,
12696 bool print_trace /* = false */) {
12697 ASSERT(!value.IsNull()); // Should be handled by the caller.
12698 ASSERT(value.ptr() != Object::sentinel().ptr());
12699 ASSERT(value.ptr() != Object::transition_sentinel().ptr());
12700
12701 Thread* thread = Thread::Current();
12702 Zone* const zone = thread->zone();
12703 const TypeArguments& static_type_args =
12704 TypeArguments::Handle(zone, ptr: static_type.GetInstanceTypeArguments(thread));
12705
12706 TypeArguments& args = TypeArguments::Handle(zone);
12707
12708 ASSERT(static_type.IsFinalized());
12709 const Class& cls = Class::Handle(zone, ptr: value.clazz());
12710 GrowableArray<const Type*> path(10);
12711
12712 bool is_super_class = true;
12713 if (!cls.FindInstantiationOf(zone, type: static_type, path: &path,
12714 /*consider_only_super_classes=*/true)) {
12715 is_super_class = false;
12716 bool found_super_interface =
12717 cls.FindInstantiationOf(zone, type: static_type, path: &path);
12718 ASSERT(found_super_interface);
12719 }
12720
12721 // Trivial case: field has type G<T0, ..., Tn> and value has type
12722 // G<U0, ..., Un>. Check if type arguments match.
12723 if (path.is_empty()) {
12724 ASSERT(cls.ptr() == static_type.type_class());
12725 args = value.GetTypeArguments();
12726 // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
12727 // disregards superclass own arguments) improves precision of the
12728 // tracking.
12729 if (args.ptr() == static_type_args.ptr()) {
12730 return TrivialTypeExactnessFor(cls);
12731 }
12732
12733 if (print_trace) {
12734 THR_Print(" expected %s got %s type arguments\n",
12735 SafeTypeArgumentsToCString(static_type_args),
12736 SafeTypeArgumentsToCString(args));
12737 }
12738 return StaticTypeExactnessState::NotExact();
12739 }
12740
12741 // Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C.
12742 // Compute C<X0, ..., Xn> at G (Xi are free type arguments).
12743 // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn,
12744 // such that S0 is an immediate supertype of C and Sn is G<...>.
12745 // Each Si might depend on type parameters of the previous supertype S{i-1}.
12746 // To compute C<X0, ..., Xn> at G we walk the chain backwards and
12747 // instantiate Si using type parameters of S{i-1} which gives us a type
12748 // depending on type parameters of S{i-2}.
12749 Type& type = Type::Handle(zone, ptr: path.Last()->ptr());
12750 for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
12751 i--) {
12752 args = path[i]->GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12753 type ^= type.InstantiateFrom(instantiator_type_arguments: args, function_type_arguments: TypeArguments::null_type_arguments(),
12754 num_free_fun_type_params: kAllFree, space: Heap::kNew);
12755 }
12756
12757 if (type.IsInstantiated()) {
12758 // C<X0, ..., Xn> at G is fully instantiated and does not depend on
12759 // Xi. In this case just check if type arguments match.
12760 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12761 if (args.Equals(other: static_type_args)) {
12762 return is_super_class ? StaticTypeExactnessState::HasExactSuperClass()
12763 : StaticTypeExactnessState::HasExactSuperType();
12764 }
12765
12766 if (print_trace) {
12767 THR_Print(" expected %s got %s type arguments\n",
12768 SafeTypeArgumentsToCString(static_type_args),
12769 SafeTypeArgumentsToCString(args));
12770 }
12771
12772 return StaticTypeExactnessState::NotExact();
12773 }
12774
12775 // The most complicated case: C<X0, ..., Xn> at G depends on
12776 // Xi values. To compare type arguments we would need to instantiate
12777 // it fully from value's type arguments and compare with <U0, ..., Un>.
12778 // However this would complicate fast path in the native code. To avoid this
12779 // complication we would optimize for the trivial case: we check if
12780 // C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply
12781 // compare values type arguments (<T0, ..., Tn>) to fields type arguments
12782 // (<U0, ..., Un>) to establish if field type is exact.
12783 ASSERT(cls.IsGeneric());
12784 const intptr_t num_type_params = cls.NumTypeParameters();
12785 bool trivial_case =
12786 (num_type_params ==
12787 Class::Handle(zone, ptr: static_type.type_class()).NumTypeParameters()) &&
12788 (value.GetTypeArguments() == static_type_args.ptr());
12789 if (!trivial_case && FLAG_trace_field_guards) {
12790 THR_Print("Not a simple case: %" Pd " vs %" Pd
12791 " type parameters, %s vs %s type arguments\n",
12792 num_type_params,
12793 Class::Handle(zone, static_type.type_class()).NumTypeParameters(),
12794 SafeTypeArgumentsToCString(
12795 TypeArguments::Handle(zone, value.GetTypeArguments())),
12796 SafeTypeArgumentsToCString(static_type_args));
12797 }
12798
12799 AbstractType& type_arg = AbstractType::Handle(zone);
12800 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12801 for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) {
12802 type_arg = args.TypeAt(index: i);
12803 if (!type_arg.IsTypeParameter() ||
12804 (TypeParameter::Cast(obj: type_arg).index() != i)) {
12805 if (FLAG_trace_field_guards) {
12806 THR_Print(" => encountered %s at index % " Pd "\n",
12807 type_arg.ToCString(), i);
12808 }
12809 trivial_case = false;
12810 }
12811 }
12812
12813 return trivial_case ? TrivialTypeExactnessFor(cls)
12814 : StaticTypeExactnessState::NotExact();
12815}
12816
12817const char* StaticTypeExactnessState::ToCString() const {
12818 if (!IsTracking()) {
12819 return "not-tracking";
12820 } else if (!IsExactOrUninitialized()) {
12821 return "not-exact";
12822 } else if (IsTriviallyExact()) {
12823 return Thread::Current()->zone()->PrintToString(
12824 format: "trivially-exact(%hhu)", GetTypeArgumentsOffsetInWords());
12825 } else if (IsHasExactSuperType()) {
12826 return "has-exact-super-type";
12827 } else if (IsHasExactSuperClass()) {
12828 return "has-exact-super-class";
12829 } else {
12830 ASSERT(IsUninitialized());
12831 return "uninitialized-exactness";
12832 }
12833}
12834
12835void FieldGuardUpdater::ReviewExactnessState() {
12836 if (!static_type_exactness_state().IsExactOrUninitialized()) {
12837 // Nothing to update.
12838 return;
12839 }
12840
12841 if (guarded_cid() == kDynamicCid) {
12842 if (FLAG_trace_field_guards) {
12843 THR_Print(
12844 " => switching off exactness tracking because guarded cid is "
12845 "dynamic\n");
12846 }
12847 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
12848 return;
12849 }
12850
12851 // If we are storing null into a field or we have an exact super type
12852 // then there is nothing to do.
12853 if (value_.IsNull() || static_type_exactness_state().IsHasExactSuperType() ||
12854 static_type_exactness_state().IsHasExactSuperClass()) {
12855 return;
12856 }
12857
12858 // If we are storing a non-null value into a field that is considered
12859 // to be trivially exact then we need to check if value has an appropriate
12860 // type.
12861 ASSERT(guarded_cid() != kNullCid);
12862
12863 const Type& field_type = Type::Cast(obj: AbstractType::Handle(ptr: field_->type()));
12864 const Instance& instance = Instance::Cast(obj: value_);
12865
12866 if (static_type_exactness_state().IsTriviallyExact()) {
12867 const TypeArguments& args =
12868 TypeArguments::Handle(ptr: instance.GetTypeArguments());
12869 const TypeArguments& field_type_args = TypeArguments::Handle(
12870 ptr: field_type.GetInstanceTypeArguments(thread: Thread::Current()));
12871 if (args.ptr() == field_type_args.ptr()) {
12872 return;
12873 }
12874
12875 if (FLAG_trace_field_guards) {
12876 THR_Print(" expected %s got %s type arguments\n",
12877 field_type_args.ToCString(), args.ToCString());
12878 }
12879
12880 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
12881 return;
12882 }
12883
12884 ASSERT(static_type_exactness_state().IsUninitialized());
12885 set_static_type_exactness_state(StaticTypeExactnessState::Compute(
12886 static_type: field_type, value: instance, print_trace: FLAG_trace_field_guards));
12887 return;
12888}
12889
12890FieldGuardUpdater::FieldGuardUpdater(const Field* field, const Object& value)
12891 : field_(field),
12892 value_(value),
12893 guarded_cid_(field->guarded_cid()),
12894 is_nullable_(field->is_nullable()),
12895 list_length_(field->guarded_list_length()),
12896 list_length_in_object_offset_(
12897 field->guarded_list_length_in_object_offset()),
12898 static_type_exactness_state_(field->static_type_exactness_state()) {
12899 ReviewGuards();
12900 ReviewExactnessState();
12901}
12902
12903void FieldGuardUpdater::DoUpdate() {
12904 if (does_guarded_cid_need_update_) {
12905 field_->set_guarded_cid(guarded_cid_);
12906 }
12907 if (does_is_nullable_need_update_) {
12908 field_->set_is_nullable(is_nullable_);
12909 }
12910 if (does_list_length_and_offset_need_update_) {
12911 field_->set_guarded_list_length(list_length_);
12912 field_->set_guarded_list_length_in_object_offset(
12913 list_length_in_object_offset_);
12914 }
12915 if (does_static_type_exactness_state_need_update_) {
12916 field_->set_static_type_exactness_state(static_type_exactness_state_);
12917 }
12918}
12919
12920void Field::RecordStore(const Object& value) const {
12921 ASSERT(IsOriginal());
12922 Thread* const thread = Thread::Current();
12923 if (!thread->isolate_group()->use_field_guards()) {
12924 return;
12925 }
12926
12927 // We should never try to record a sentinel.
12928 ASSERT(value.ptr() != Object::sentinel().ptr());
12929
12930 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12931 if ((guarded_cid() == kDynamicCid) ||
12932 (is_nullable() && value.ptr() == Object::null())) {
12933 // Nothing to do: the field is not guarded or we are storing null into
12934 // a nullable field.
12935 return;
12936 }
12937
12938 if (FLAG_trace_field_guards) {
12939 THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(),
12940 value.ToCString());
12941 }
12942
12943 FieldGuardUpdater updater(this, value);
12944 if (updater.IsUpdateNeeded()) {
12945 if (FLAG_trace_field_guards) {
12946 THR_Print(" => %s\n", GuardedPropertiesAsCString());
12947 }
12948 // Nobody else could have updated guard state since we are holding write
12949 // program lock. But we need to ensure we stop mutators as we update
12950 // guard state as we can't have optimized code running with updated fields.
12951 auto isolate_group = IsolateGroup::Current();
12952 isolate_group->RunWithStoppedMutators(function: [&]() {
12953 updater.DoUpdate();
12954 DeoptimizeDependentCode(/*are_mutators_stopped=*/true);
12955 });
12956 }
12957}
12958
12959void Field::ForceDynamicGuardedCidAndLength() const {
12960 if (!is_unboxed()) {
12961 set_guarded_cid(kDynamicCid);
12962 set_is_nullable(true);
12963 }
12964 set_guarded_list_length(Field::kNoFixedLength);
12965 set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
12966 if (static_type_exactness_state().IsTracking()) {
12967 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
12968 }
12969 // Drop any code that relied on the above assumptions.
12970 DeoptimizeDependentCode();
12971}
12972
12973StringPtr Script::resolved_url() const {
12974#if defined(DART_PRECOMPILER)
12975 return String::RawCast(
12976 WeakSerializationReference::Unwrap(untag()->resolved_url()));
12977#else
12978 return untag()->resolved_url();
12979#endif
12980}
12981
12982bool Script::HasSource() const {
12983 return untag()->source() != String::null();
12984}
12985
12986StringPtr Script::Source() const {
12987 return untag()->source();
12988}
12989
12990bool Script::IsPartOfDartColonLibrary() const {
12991 const String& script_url = String::Handle(ptr: url());
12992 return (script_url.StartsWith(other: Symbols::DartScheme()) ||
12993 script_url.StartsWith(other: Symbols::DartSchemePrivate()));
12994}
12995
12996#if !defined(DART_PRECOMPILED_RUNTIME)
12997void Script::LoadSourceFromKernel(const uint8_t* kernel_buffer,
12998 intptr_t kernel_buffer_len) const {
12999 String& uri = String::Handle(ptr: resolved_url());
13000 String& source = String::Handle(ptr: kernel::KernelLoader::FindSourceForScript(
13001 kernel_buffer, kernel_buffer_length: kernel_buffer_len, url: uri));
13002 set_source(source);
13003}
13004
13005void Script::InitializeFromKernel(
13006 const KernelProgramInfo& info,
13007 intptr_t script_index,
13008 const TypedData& line_starts,
13009 const TypedDataView& constant_coverage) const {
13010 StoreNonPointer(addr: &untag()->kernel_script_index_, value: script_index);
13011 untag()->set_kernel_program_info(info.ptr());
13012 untag()->set_line_starts(line_starts.ptr());
13013 untag()->set_debug_positions(Array::null_array().ptr());
13014 NOT_IN_PRODUCT(untag()->set_constant_coverage(constant_coverage.ptr()));
13015}
13016#endif
13017
13018GrowableObjectArrayPtr Script::GenerateLineNumberArray() const {
13019 Zone* zone = Thread::Current()->zone();
13020 const GrowableObjectArray& info =
13021 GrowableObjectArray::Handle(zone, ptr: GrowableObjectArray::New());
13022 const Object& line_separator = Object::Handle(zone);
13023 if (line_starts() == TypedData::null()) {
13024 // Scripts in the AOT snapshot do not have a line starts array.
13025 // A well-formed line number array has a leading null.
13026 info.Add(value: line_separator); // New line.
13027 return info.ptr();
13028 }
13029#if !defined(DART_PRECOMPILED_RUNTIME)
13030 Smi& value = Smi::Handle(zone);
13031 const TypedData& line_starts_data = TypedData::Handle(zone, ptr: line_starts());
13032 intptr_t line_count = line_starts_data.Length();
13033 const Array& debug_positions_array = Array::Handle(ptr: debug_positions());
13034 intptr_t token_count = debug_positions_array.Length();
13035 int token_index = 0;
13036
13037 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13038 for (int line_index = 0; line_index < line_count; ++line_index) {
13039 intptr_t start = line_starts_reader.At(index: line_index);
13040 // Output the rest of the tokens if we have no next line.
13041 intptr_t end = TokenPosition::kMaxSourcePos;
13042 if (line_index + 1 < line_count) {
13043 end = line_starts_reader.At(index: line_index + 1);
13044 }
13045 bool first = true;
13046 while (token_index < token_count) {
13047 value ^= debug_positions_array.At(index: token_index);
13048 intptr_t debug_position = value.Value();
13049 if (debug_position >= end) break;
13050
13051 if (first) {
13052 info.Add(value: line_separator); // New line.
13053 value = Smi::New(value: line_index + 1); // Line number.
13054 info.Add(value);
13055 first = false;
13056 }
13057
13058 value ^= debug_positions_array.At(index: token_index);
13059 info.Add(value); // Token position.
13060 value = Smi::New(value: debug_position - start + 1); // Column.
13061 info.Add(value);
13062 ++token_index;
13063 }
13064 }
13065#endif // !defined(DART_PRECOMPILED_RUNTIME)
13066 return info.ptr();
13067}
13068
13069TokenPosition Script::MaxPosition() const {
13070#if !defined(DART_PRECOMPILED_RUNTIME)
13071 if (HasCachedMaxPosition()) {
13072 return TokenPosition::Deserialize(
13073 value: UntaggedScript::CachedMaxPositionBitField::decode(
13074 value: untag()->flags_and_max_position_));
13075 }
13076 auto const zone = Thread::Current()->zone();
13077 if (!HasCachedMaxPosition() && line_starts() != TypedData::null()) {
13078 const auto& starts = TypedData::Handle(zone, ptr: line_starts());
13079 kernel::KernelLineStartsReader reader(starts, zone);
13080 const intptr_t max_position = reader.MaxPosition();
13081 SetCachedMaxPosition(max_position);
13082 SetHasCachedMaxPosition(true);
13083 return TokenPosition::Deserialize(value: max_position);
13084 }
13085#endif
13086 return TokenPosition::kNoSource;
13087}
13088
13089void Script::set_url(const String& value) const {
13090 untag()->set_url(value.ptr());
13091}
13092
13093void Script::set_resolved_url(const String& value) const {
13094 untag()->set_resolved_url(value.ptr());
13095}
13096
13097void Script::set_source(const String& value) const {
13098 untag()->set_source(value.ptr());
13099}
13100
13101#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13102TypedDataViewPtr Script::constant_coverage() const {
13103 return untag()->constant_coverage();
13104}
13105#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13106
13107void Script::set_debug_positions(const Array& value) const {
13108 untag()->set_debug_positions(value.ptr());
13109}
13110
13111TypedDataPtr Script::line_starts() const {
13112 return untag()->line_starts();
13113}
13114
13115ArrayPtr Script::debug_positions() const {
13116#if !defined(DART_PRECOMPILED_RUNTIME)
13117 Array& debug_positions_array = Array::Handle(ptr: untag()->debug_positions());
13118 if (debug_positions_array.IsNull()) {
13119 // This is created lazily. Now we need it.
13120 CollectTokenPositionsFor();
13121 }
13122#endif // !defined(DART_PRECOMPILED_RUNTIME)
13123 return untag()->debug_positions();
13124}
13125
13126#if !defined(DART_PRECOMPILED_RUNTIME)
13127bool Script::HasCachedMaxPosition() const {
13128 return UntaggedScript::HasCachedMaxPositionBit::decode(
13129 value: untag()->flags_and_max_position_);
13130}
13131
13132void Script::SetHasCachedMaxPosition(bool value) const {
13133 StoreNonPointer(addr: &untag()->flags_and_max_position_,
13134 value: UntaggedScript::HasCachedMaxPositionBit::update(
13135 value, original: untag()->flags_and_max_position_));
13136}
13137
13138void Script::SetCachedMaxPosition(intptr_t value) const {
13139 StoreNonPointer(addr: &untag()->flags_and_max_position_,
13140 value: UntaggedScript::CachedMaxPositionBitField::update(
13141 value, original: untag()->flags_and_max_position_));
13142}
13143#endif
13144
13145void Script::set_load_timestamp(int64_t value) const {
13146 StoreNonPointer(addr: &untag()->load_timestamp_, value);
13147}
13148
13149bool Script::IsValidTokenPosition(TokenPosition token_pos) const {
13150 const TokenPosition& max_position = MaxPosition();
13151 // We may end up with scripts that have the empty string as a source file
13152 // in testing and the like, so allow any token position when the max position
13153 // is 0 as well as when it is kNoSource.
13154 return !max_position.IsReal() || !token_pos.IsReal() ||
13155 max_position.Pos() == 0 || token_pos <= max_position;
13156}
13157
13158#if !defined(DART_PRECOMPILED_RUNTIME)
13159static bool IsLetter(int32_t c) {
13160 return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z'));
13161}
13162
13163static bool IsDecimalDigit(int32_t c) {
13164 return '0' <= c && c <= '9';
13165}
13166
13167static bool IsIdentStartChar(int32_t c) {
13168 return IsLetter(c) || (c == '_') || (c == '$');
13169}
13170
13171static bool IsIdentChar(int32_t c) {
13172 return IsLetter(c) || IsDecimalDigit(c) || (c == '_') || (c == '$');
13173}
13174#endif // !defined(DART_PRECOMPILED_RUNTIME)
13175
13176bool Script::GetTokenLocation(const TokenPosition& token_pos,
13177 intptr_t* line,
13178 intptr_t* column) const {
13179 ASSERT(line != nullptr);
13180#if defined(DART_PRECOMPILED_RUNTIME)
13181 // Scripts in the AOT snapshot do not have a line starts array.
13182 return false;
13183#else
13184 if (!token_pos.IsReal()) return false;
13185
13186 auto const zone = Thread::Current()->zone();
13187 const TypedData& line_starts_data = TypedData::Handle(zone, ptr: line_starts());
13188 if (line_starts_data.IsNull()) return false;
13189 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13190 return line_starts_reader.LocationForPosition(position: token_pos.Pos(), line, col: column);
13191#endif // defined(DART_PRECOMPILED_RUNTIME)
13192}
13193
13194intptr_t Script::GetTokenLength(const TokenPosition& token_pos) const {
13195#if defined(DART_PRECOMPILED_RUNTIME)
13196 // Scripts in the AOT snapshot do not have their source.
13197 return -1;
13198#else
13199 if (!HasSource() || !token_pos.IsReal()) return -1;
13200 auto const zone = Thread::Current()->zone();
13201 // We don't explicitly save this data: Load the source and find it from there.
13202 const String& source = String::Handle(zone, ptr: Source());
13203 const intptr_t start = token_pos.Pos();
13204 if (start >= source.Length()) return -1; // Can't determine token_len.
13205 intptr_t end = start;
13206 if (IsIdentStartChar(c: source.CharAt(index: end++))) {
13207 for (; end < source.Length(); ++end) {
13208 if (!IsIdentChar(c: source.CharAt(index: end))) break;
13209 }
13210 }
13211 return end - start;
13212#endif
13213}
13214
13215bool Script::TokenRangeAtLine(intptr_t line_number,
13216 TokenPosition* first_token_index,
13217 TokenPosition* last_token_index) const {
13218 ASSERT(first_token_index != nullptr && last_token_index != nullptr);
13219#if defined(DART_PRECOMPILED_RUNTIME)
13220 // Scripts in the AOT snapshot do not have a line starts array.
13221 return false;
13222#else
13223 // Line numbers are 1-indexed.
13224 if (line_number <= 0) return false;
13225 Zone* zone = Thread::Current()->zone();
13226 const TypedData& line_starts_data = TypedData::Handle(zone, ptr: line_starts());
13227 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13228 if (!line_starts_reader.TokenRangeAtLine(line_number, first_token_index,
13229 last_token_index)) {
13230 return false;
13231 }
13232#if defined(DEBUG)
13233 intptr_t source_length;
13234 if (!HasSource()) {
13235 Smi& value = Smi::Handle(zone);
13236 const Array& debug_positions_array = Array::Handle(zone, debug_positions());
13237 value ^= debug_positions_array.At(debug_positions_array.Length() - 1);
13238 source_length = value.Value();
13239 } else {
13240 const String& source = String::Handle(zone, Source());
13241 source_length = source.Length();
13242 }
13243 ASSERT(last_token_index->Serialize() <= source_length);
13244#endif
13245 return true;
13246#endif // !defined(DART_PRECOMPILED_RUNTIME)
13247}
13248
13249// Returns the index in the given source string for the given (1-based) absolute
13250// line and column numbers. The line and column offsets are used to calculate
13251// the absolute line and column number for the starting index in the source.
13252//
13253// If the given line number is outside the range of lines represented by the
13254// source, the given column number invalid for the given line, or a negative
13255// starting index is given, a negative value is returned to indicate failure.
13256static intptr_t GetRelativeSourceIndex(const String& src,
13257 intptr_t line,
13258 intptr_t line_offset = 0,
13259 intptr_t column = 1,
13260 intptr_t column_offset = 0,
13261 intptr_t starting_index = 0) {
13262 if (starting_index < 0 || line < 1 || column < 1 || line <= line_offset ||
13263 (line == line_offset + 1 && column <= column_offset)) {
13264 return -1;
13265 }
13266 intptr_t len = src.Length();
13267 intptr_t current_line = line_offset + 1;
13268 intptr_t current_index = starting_index;
13269 for (; current_index < len; current_index++) {
13270 if (current_line == line) {
13271 break;
13272 }
13273 const uint16_t c = src.CharAt(index: current_index);
13274 if (c == '\n' || c == '\r') {
13275 current_line++;
13276 }
13277 if (c == '\r' && current_index + 1 < len &&
13278 src.CharAt(index: current_index + 1) == '\n') {
13279 // \r\n is treated as a single line terminator.
13280 current_index++;
13281 }
13282 }
13283 if (current_line != line) {
13284 return -1;
13285 }
13286 // Only adjust with column offset when still on the first line.
13287 intptr_t current_column = 1 + (line == line_offset + 1 ? column_offset : 0);
13288 for (; current_index < len; current_index++, current_column++) {
13289 if (current_column == column) {
13290 return current_index;
13291 }
13292 const uint16_t c = src.CharAt(index: current_index);
13293 if (c == '\n' || c == '\r') {
13294 break;
13295 }
13296 }
13297 // Check for a column value representing the source's end.
13298 if (current_column == column) {
13299 return current_index;
13300 }
13301 return -1;
13302}
13303
13304StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const {
13305 if (!HasSource()) {
13306 return Symbols::OptimizedOut().ptr();
13307 }
13308 const String& src = String::Handle(ptr: Source());
13309 const intptr_t start =
13310 GetRelativeSourceIndex(src, line: line_number, line_offset: line_offset());
13311 if (start < 0) {
13312 return Symbols::Empty().ptr();
13313 }
13314 intptr_t end = start;
13315 for (; end < src.Length(); end++) {
13316 const uint16_t c = src.CharAt(index: end);
13317 if (c == '\n' || c == '\r') {
13318 break;
13319 }
13320 }
13321 return String::SubString(str: src, begin_index: start, length: end - start, space);
13322}
13323
13324StringPtr Script::GetSnippet(intptr_t from_line,
13325 intptr_t from_column,
13326 intptr_t to_line,
13327 intptr_t to_column) const {
13328 if (!HasSource()) {
13329 return Symbols::OptimizedOut().ptr();
13330 }
13331 const String& src = String::Handle(ptr: Source());
13332 const intptr_t start = GetRelativeSourceIndex(src, line: from_line, line_offset: line_offset(),
13333 column: from_column, column_offset: col_offset());
13334 // Lines and columns are 1-based, so need to subtract one to get offsets.
13335 const intptr_t end = GetRelativeSourceIndex(
13336 src, line: to_line, line_offset: from_line - 1, column: to_column, column_offset: from_column - 1, starting_index: start);
13337 // Only need to check end, because a negative start results in a negative end.
13338 if (end < 0) {
13339 return String::null();
13340 }
13341 return String::SubString(str: src, begin_index: start, length: end - start);
13342}
13343
13344ScriptPtr Script::New(const String& url, const String& source) {
13345 return Script::New(url, resolved_url: url, source);
13346}
13347
13348ScriptPtr Script::New(const String& url,
13349 const String& resolved_url,
13350 const String& source) {
13351 ASSERT(Object::script_class() != Class::null());
13352 Thread* thread = Thread::Current();
13353 Zone* zone = thread->zone();
13354 const Script& result =
13355 Script::Handle(zone, ptr: Object::Allocate<Script>(space: Heap::kOld));
13356 result.set_url(String::Handle(zone, ptr: Symbols::New(thread, str: url)));
13357 result.set_resolved_url(
13358 String::Handle(zone, ptr: Symbols::New(thread, str: resolved_url)));
13359 result.set_source(source);
13360 NOT_IN_PRECOMPILED(ASSERT_EQUAL(result.HasCachedMaxPosition(), false));
13361 ASSERT_EQUAL(result.kernel_script_index(), 0);
13362 if (FLAG_remove_script_timestamps_for_test) {
13363 ASSERT_EQUAL(result.load_timestamp(), 0);
13364 } else {
13365 result.set_load_timestamp(OS::GetCurrentTimeMillis());
13366 }
13367 return result.ptr();
13368}
13369
13370const char* Script::ToCString() const {
13371 const String& name = String::Handle(ptr: url());
13372 return OS::SCreate(zone: Thread::Current()->zone(), format: "Script(%s)", name.ToCString());
13373}
13374
13375LibraryPtr Script::FindLibrary() const {
13376 Thread* thread = Thread::Current();
13377 Zone* zone = thread->zone();
13378 auto isolate_group = thread->isolate_group();
13379 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
13380 zone, ptr: isolate_group->object_store()->libraries());
13381 Library& lib = Library::Handle(zone);
13382 Array& scripts = Array::Handle(zone);
13383 for (intptr_t i = 0; i < libs.Length(); i++) {
13384 lib ^= libs.At(index: i);
13385 scripts = lib.LoadedScripts();
13386 for (intptr_t j = 0; j < scripts.Length(); j++) {
13387 if (scripts.At(index: j) == ptr()) {
13388 return lib.ptr();
13389 }
13390 }
13391 }
13392 return Library::null();
13393}
13394
13395DictionaryIterator::DictionaryIterator(const Library& library)
13396 : array_(Array::Handle(ptr: library.dictionary())),
13397 // Last element in array is a Smi indicating the number of entries used.
13398 size_(Array::Handle(ptr: library.dictionary()).Length() - 1),
13399 next_ix_(0) {
13400 MoveToNextObject();
13401}
13402
13403ObjectPtr DictionaryIterator::GetNext() {
13404 ASSERT(HasNext());
13405 int ix = next_ix_++;
13406 MoveToNextObject();
13407 ASSERT(array_.At(ix) != Object::null());
13408 return array_.At(index: ix);
13409}
13410
13411void DictionaryIterator::MoveToNextObject() {
13412 Object& obj = Object::Handle(ptr: array_.At(index: next_ix_));
13413 while (obj.IsNull() && HasNext()) {
13414 next_ix_++;
13415 obj = array_.At(index: next_ix_);
13416 }
13417}
13418
13419ClassDictionaryIterator::ClassDictionaryIterator(const Library& library,
13420 IterationKind kind)
13421 : DictionaryIterator(library),
13422 toplevel_class_(Class::Handle(ptr: (kind == kIteratePrivate)
13423 ? library.toplevel_class()
13424 : Class::null())) {
13425 MoveToNextClass();
13426}
13427
13428ClassPtr ClassDictionaryIterator::GetNextClass() {
13429 ASSERT(HasNext());
13430 Class& cls = Class::Handle();
13431 if (next_ix_ < size_) {
13432 int ix = next_ix_++;
13433 cls ^= array_.At(index: ix);
13434 MoveToNextClass();
13435 return cls.ptr();
13436 }
13437 ASSERT(!toplevel_class_.IsNull());
13438 cls = toplevel_class_.ptr();
13439 toplevel_class_ = Class::null();
13440 return cls.ptr();
13441}
13442
13443void ClassDictionaryIterator::MoveToNextClass() {
13444 Object& obj = Object::Handle();
13445 while (next_ix_ < size_) {
13446 obj = array_.At(index: next_ix_);
13447 if (obj.IsClass()) {
13448 return;
13449 }
13450 next_ix_++;
13451 }
13452}
13453
13454static void ReportTooManyImports(const Library& lib) {
13455 const String& url = String::Handle(ptr: lib.url());
13456 Report::MessageF(kind: Report::kError, script: Script::Handle(ptr: lib.LookupScript(url)),
13457 token_pos: TokenPosition::kNoSource, report_after_token: Report::AtLocation,
13458 format: "too many imports in library '%s'", url.ToCString());
13459 UNREACHABLE();
13460}
13461
13462bool Library::IsAnyCoreLibrary() const {
13463 String& url_str = Thread::Current()->StringHandle();
13464 url_str = url();
13465 return url_str.StartsWith(other: Symbols::DartScheme()) ||
13466 url_str.StartsWith(other: Symbols::DartSchemePrivate());
13467}
13468
13469void Library::set_num_imports(intptr_t value) const {
13470 if (!Utils::IsUint(N: 16, value)) {
13471 ReportTooManyImports(lib: *this);
13472 }
13473 StoreNonPointer(addr: &untag()->num_imports_, value);
13474}
13475
13476void Library::set_name(const String& name) const {
13477 ASSERT(name.IsSymbol());
13478 untag()->set_name(name.ptr());
13479}
13480
13481void Library::set_url(const String& url) const {
13482 untag()->set_url(url.ptr());
13483}
13484
13485void Library::set_private_key(const String& key) const {
13486 untag()->set_private_key(key.ptr());
13487}
13488
13489#if !defined(DART_PRECOMPILED_RUNTIME)
13490void Library::set_kernel_program_info(const KernelProgramInfo& info) const {
13491 untag()->set_kernel_program_info(info.ptr());
13492}
13493
13494TypedDataViewPtr Library::KernelLibrary() const {
13495 const auto& info = KernelProgramInfo::Handle(ptr: kernel_program_info());
13496 return info.KernelLibrary(library_index: kernel_library_index());
13497}
13498
13499intptr_t Library::KernelLibraryOffset() const {
13500 const auto& info = KernelProgramInfo::Handle(ptr: kernel_program_info());
13501 return info.KernelLibraryStartOffset(library_index: kernel_library_index());
13502}
13503#endif
13504
13505void Library::set_loading_unit(const LoadingUnit& value) const {
13506 untag()->set_loading_unit(value.ptr());
13507}
13508
13509void Library::SetName(const String& name) const {
13510 // Only set name once.
13511 ASSERT(!Loaded());
13512 set_name(name);
13513}
13514
13515void Library::SetLoadInProgress() const {
13516 // Must not already be in the process of being loaded.
13517 ASSERT(untag()->load_state_ <= UntaggedLibrary::kLoadRequested);
13518 StoreNonPointer(addr: &untag()->load_state_, value: UntaggedLibrary::kLoadInProgress);
13519}
13520
13521void Library::SetLoadRequested() const {
13522 // Must not be already loaded.
13523 ASSERT(untag()->load_state_ == UntaggedLibrary::kAllocated);
13524 StoreNonPointer(addr: &untag()->load_state_, value: UntaggedLibrary::kLoadRequested);
13525}
13526
13527void Library::SetLoaded() const {
13528 // Should not be already loaded or just allocated.
13529 ASSERT(LoadInProgress() || LoadRequested());
13530 StoreNonPointer(addr: &untag()->load_state_, value: UntaggedLibrary::kLoaded);
13531}
13532
13533void Library::AddMetadata(const Object& declaration,
13534 intptr_t kernel_offset) const {
13535#if defined(DART_PRECOMPILED_RUNTIME)
13536 UNREACHABLE();
13537#else
13538 Thread* thread = Thread::Current();
13539 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
13540
13541 MetadataMap map(metadata());
13542 map.UpdateOrInsert(key: declaration, value: Smi::Handle(ptr: Smi::New(value: kernel_offset)));
13543 set_metadata(map.Release());
13544#endif // defined(DART_PRECOMPILED_RUNTIME)
13545}
13546
13547ObjectPtr Library::GetMetadata(const Object& declaration) const {
13548#if defined(DART_PRECOMPILED_RUNTIME)
13549 return Object::empty_array().ptr();
13550#else
13551 RELEASE_ASSERT(declaration.IsClass() || declaration.IsField() ||
13552 declaration.IsFunction() || declaration.IsLibrary() ||
13553 declaration.IsTypeParameter() || declaration.IsNamespace());
13554
13555 auto thread = Thread::Current();
13556 auto zone = thread->zone();
13557
13558 if (declaration.IsLibrary()) {
13559 // Ensure top-level class is loaded as it may contain annotations of
13560 // a library.
13561 const auto& cls = Class::Handle(zone, ptr: toplevel_class());
13562 if (!cls.IsNull()) {
13563 cls.EnsureDeclarationLoaded();
13564 }
13565 }
13566 Object& value = Object::Handle(zone);
13567 {
13568 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
13569 MetadataMap map(metadata());
13570 value = map.GetOrNull(key: declaration);
13571 set_metadata(map.Release());
13572 }
13573 if (value.IsNull()) {
13574 // There is no metadata for this object.
13575 return Object::empty_array().ptr();
13576 }
13577 if (!value.IsSmi()) {
13578 // Metadata is already evaluated.
13579 ASSERT(value.IsArray());
13580 return value.ptr();
13581 }
13582 const auto& smi_value = Smi::Cast(obj: value);
13583 intptr_t kernel_offset = smi_value.Value();
13584 ASSERT(kernel_offset > 0);
13585 const auto& evaluated_value = Object::Handle(
13586 zone, ptr: kernel::EvaluateMetadata(
13587 library: *this, kernel_offset,
13588 /* is_annotations_offset = */ declaration.IsLibrary() ||
13589 declaration.IsNamespace()));
13590 if (evaluated_value.IsArray() || evaluated_value.IsNull()) {
13591 ASSERT(evaluated_value.ptr() != Object::empty_array().ptr());
13592 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13593 MetadataMap map(metadata());
13594 if (map.GetOrNull(key: declaration) == smi_value.ptr()) {
13595 map.UpdateOrInsert(key: declaration, value: evaluated_value);
13596 } else {
13597 ASSERT(map.GetOrNull(declaration) == evaluated_value.ptr());
13598 }
13599 set_metadata(map.Release());
13600 }
13601 return evaluated_value.ptr();
13602#endif // defined(DART_PRECOMPILED_RUNTIME)
13603}
13604
13605static bool ShouldBePrivate(const String& name) {
13606 return (name.Length() >= 1 && name.CharAt(index: 0) == '_') ||
13607 (name.Length() >= 5 &&
13608 (name.CharAt(index: 4) == '_' &&
13609 (name.CharAt(index: 0) == 'g' || name.CharAt(index: 0) == 's') &&
13610 name.CharAt(index: 1) == 'e' && name.CharAt(index: 2) == 't' &&
13611 name.CharAt(index: 3) == ':'));
13612}
13613
13614ObjectPtr Library::ResolveName(const String& name) const {
13615 Object& obj = Object::Handle();
13616 if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, obj: &obj)) {
13617 return obj.ptr();
13618 }
13619 EnsureTopLevelClassIsFinalized();
13620 obj = LookupLocalObject(name);
13621 if (!obj.IsNull()) {
13622 // Names that are in this library's dictionary and are unmangled
13623 // are not cached. This reduces the size of the cache.
13624 return obj.ptr();
13625 }
13626 String& accessor_name = String::Handle(ptr: Field::LookupGetterSymbol(field_name: name));
13627 if (!accessor_name.IsNull()) {
13628 obj = LookupLocalObject(name: accessor_name);
13629 }
13630 if (obj.IsNull()) {
13631 accessor_name = Field::LookupSetterSymbol(field_name: name);
13632 if (!accessor_name.IsNull()) {
13633 obj = LookupLocalObject(name: accessor_name);
13634 }
13635 if (obj.IsNull() && !ShouldBePrivate(name)) {
13636 obj = LookupImportedObject(name);
13637 }
13638 }
13639 AddToResolvedNamesCache(name, obj);
13640 return obj.ptr();
13641}
13642
13643class StringEqualsTraits {
13644 public:
13645 static const char* Name() { return "StringEqualsTraits"; }
13646 static bool ReportStats() { return false; }
13647
13648 static bool IsMatch(const Object& a, const Object& b) {
13649 return String::Cast(obj: a).Equals(str: String::Cast(obj: b));
13650 }
13651 static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
13652};
13653typedef UnorderedHashMap<StringEqualsTraits> ResolvedNamesMap;
13654
13655// Returns true if the name is found in the cache, false no cache hit.
13656// obj is set to the cached entry. It may be null, indicating that the
13657// name does not resolve to anything in this library.
13658bool Library::LookupResolvedNamesCache(const String& name, Object* obj) const {
13659 if (resolved_names() == Array::null()) {
13660 return false;
13661 }
13662 ResolvedNamesMap cache(resolved_names());
13663 bool present = false;
13664 *obj = cache.GetOrNull(key: name, present: &present);
13665// Mutator compiler thread may add entries and therefore
13666// change 'resolved_names()' while running a background compilation;
13667// ASSERT that 'resolved_names()' has not changed only in mutator.
13668#if defined(DEBUG)
13669 if (Thread::Current()->IsDartMutatorThread()) {
13670 ASSERT(cache.Release().ptr() == resolved_names());
13671 } else {
13672 // Release must be called in debug mode.
13673 cache.Release();
13674 }
13675#endif
13676 return present;
13677}
13678
13679// Add a name to the resolved name cache. This name resolves to the
13680// given object in this library scope. obj may be null, which means
13681// the name does not resolve to anything in this library scope.
13682void Library::AddToResolvedNamesCache(const String& name,
13683 const Object& obj) const {
13684 if (!FLAG_use_lib_cache || Compiler::IsBackgroundCompilation()) {
13685 return;
13686 }
13687 if (resolved_names() == Array::null()) {
13688 InitResolvedNamesCache();
13689 }
13690 ResolvedNamesMap cache(resolved_names());
13691 cache.UpdateOrInsert(key: name, value: obj);
13692 untag()->set_resolved_names(cache.Release().ptr());
13693}
13694
13695bool Library::LookupExportedNamesCache(const String& name, Object* obj) const {
13696 ASSERT(FLAG_use_exp_cache);
13697 if (exported_names() == Array::null()) {
13698 return false;
13699 }
13700 ResolvedNamesMap cache(exported_names());
13701 bool present = false;
13702 *obj = cache.GetOrNull(key: name, present: &present);
13703// Mutator compiler thread may add entries and therefore
13704// change 'exported_names()' while running a background compilation;
13705// do not ASSERT that 'exported_names()' has not changed.
13706#if defined(DEBUG)
13707 if (Thread::Current()->IsDartMutatorThread()) {
13708 ASSERT(cache.Release().ptr() == exported_names());
13709 } else {
13710 // Release must be called in debug mode.
13711 cache.Release();
13712 }
13713#endif
13714 return present;
13715}
13716
13717void Library::AddToExportedNamesCache(const String& name,
13718 const Object& obj) const {
13719 if (!FLAG_use_exp_cache || Compiler::IsBackgroundCompilation()) {
13720 return;
13721 }
13722 if (exported_names() == Array::null()) {
13723 InitExportedNamesCache();
13724 }
13725 ResolvedNamesMap cache(exported_names());
13726 cache.UpdateOrInsert(key: name, value: obj);
13727 untag()->set_exported_names(cache.Release().ptr());
13728}
13729
13730void Library::InvalidateResolvedName(const String& name) const {
13731 Thread* thread = Thread::Current();
13732 Zone* zone = thread->zone();
13733 Object& entry = Object::Handle(zone);
13734 if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, obj: &entry)) {
13735 // TODO(koda): Support deleted sentinel in snapshots and remove only 'name'.
13736 ClearResolvedNamesCache();
13737 }
13738 if (!FLAG_use_exp_cache) {
13739 return;
13740 }
13741 // When a new name is added to a library, we need to invalidate all
13742 // caches that contain an entry for this name. If the name was previously
13743 // looked up but could not be resolved, the cache contains a null entry.
13744 GrowableObjectArray& libs = GrowableObjectArray::Handle(
13745 zone, ptr: thread->isolate_group()->object_store()->libraries());
13746 Library& lib = Library::Handle(zone);
13747 intptr_t num_libs = libs.Length();
13748 for (intptr_t i = 0; i < num_libs; i++) {
13749 lib ^= libs.At(index: i);
13750 if (lib.LookupExportedNamesCache(name, obj: &entry)) {
13751 lib.ClearExportedNamesCache();
13752 }
13753 }
13754}
13755
13756// Invalidate all exported names caches in the isolate.
13757void Library::InvalidateExportedNamesCaches() {
13758 GrowableObjectArray& libs = GrowableObjectArray::Handle(
13759 ptr: IsolateGroup::Current()->object_store()->libraries());
13760 Library& lib = Library::Handle();
13761 intptr_t num_libs = libs.Length();
13762 for (intptr_t i = 0; i < num_libs; i++) {
13763 lib ^= libs.At(index: i);
13764 lib.ClearExportedNamesCache();
13765 }
13766}
13767
13768void Library::RehashDictionary(const Array& old_dict,
13769 intptr_t new_dict_size) const {
13770 intptr_t old_dict_size = old_dict.Length() - 1;
13771 const Array& new_dict =
13772 Array::Handle(ptr: Array::New(len: new_dict_size + 1, space: Heap::kOld));
13773 // Rehash all elements from the original dictionary
13774 // to the newly allocated array.
13775 Object& entry = Class::Handle();
13776 String& entry_name = String::Handle();
13777 Object& new_entry = Object::Handle();
13778 intptr_t used = 0;
13779 for (intptr_t i = 0; i < old_dict_size; i++) {
13780 entry = old_dict.At(index: i);
13781 if (!entry.IsNull()) {
13782 entry_name = entry.DictionaryName();
13783 ASSERT(!entry_name.IsNull());
13784 const intptr_t hash = entry_name.Hash();
13785 intptr_t index = hash % new_dict_size;
13786 new_entry = new_dict.At(index);
13787 while (!new_entry.IsNull()) {
13788 index = (index + 1) % new_dict_size; // Move to next element.
13789 new_entry = new_dict.At(index);
13790 }
13791 new_dict.SetAt(index, value: entry);
13792 used++;
13793 }
13794 }
13795 // Set used count.
13796 ASSERT(used < new_dict_size); // Need at least one empty slot.
13797 new_entry = Smi::New(value: used);
13798 new_dict.SetAt(index: new_dict_size, value: new_entry);
13799 // Remember the new dictionary now.
13800 untag()->set_dictionary(new_dict.ptr());
13801}
13802
13803void Library::AddObject(const Object& obj, const String& name) const {
13804 ASSERT(Thread::Current()->IsDartMutatorThread());
13805 ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() ||
13806 obj.IsLibraryPrefix());
13807 ASSERT(name.Equals(String::Handle(obj.DictionaryName())));
13808 ASSERT(LookupLocalObject(name) == Object::null());
13809 const Array& dict = Array::Handle(ptr: dictionary());
13810 intptr_t dict_size = dict.Length() - 1;
13811 intptr_t index = name.Hash() % dict_size;
13812
13813 Object& entry = Object::Handle();
13814 entry = dict.At(index);
13815 // An empty spot will be found because we keep the hash set at most 75% full.
13816 while (!entry.IsNull()) {
13817 index = (index + 1) % dict_size;
13818 entry = dict.At(index);
13819 }
13820
13821 // Insert the object at the empty slot.
13822 dict.SetAt(index, value: obj);
13823 // One more element added.
13824 intptr_t used_elements = Smi::Value(raw_smi: Smi::RawCast(raw: dict.At(index: dict_size))) + 1;
13825 const Smi& used = Smi::Handle(ptr: Smi::New(value: used_elements));
13826 dict.SetAt(index: dict_size, value: used); // Update used count.
13827
13828 // Rehash if symbol_table is 75% full.
13829 if (used_elements > ((dict_size / 4) * 3)) {
13830 // TODO(iposva): Avoid exponential growth.
13831 RehashDictionary(old_dict: dict, new_dict_size: 2 * dict_size);
13832 }
13833
13834 // Invalidate the cache of loaded scripts.
13835 if (loaded_scripts() != Array::null()) {
13836 untag()->set_loaded_scripts(Array::null());
13837 }
13838}
13839
13840// Lookup a name in the library's re-export namespace.
13841// This lookup can occur from two different threads: background compiler and
13842// mutator thread.
13843ObjectPtr Library::LookupReExport(const String& name,
13844 ZoneGrowableArray<intptr_t>* trail) const {
13845 if (!HasExports()) {
13846 return Object::null();
13847 }
13848
13849 if (trail == nullptr) {
13850 trail = new ZoneGrowableArray<intptr_t>();
13851 }
13852 Object& obj = Object::Handle();
13853 if (FLAG_use_exp_cache && LookupExportedNamesCache(name, obj: &obj)) {
13854 return obj.ptr();
13855 }
13856
13857 const intptr_t lib_id = this->index();
13858 ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found.
13859 trail->Add(value: lib_id);
13860 const Array& exports = Array::Handle(ptr: this->exports());
13861 Namespace& ns = Namespace::Handle();
13862 for (int i = 0; i < exports.Length(); i++) {
13863 ns ^= exports.At(index: i);
13864 obj = ns.Lookup(name, trail);
13865 if (!obj.IsNull()) {
13866 // The Lookup call above may return a setter x= when we are looking
13867 // for the name x. Make sure we only return when a matching name
13868 // is found.
13869 String& obj_name = String::Handle(ptr: obj.DictionaryName());
13870 if (Field::IsSetterName(function_name: obj_name) == Field::IsSetterName(function_name: name)) {
13871 break;
13872 }
13873 }
13874 }
13875 bool in_cycle = (trail->RemoveLast() < 0);
13876 if (FLAG_use_exp_cache && !in_cycle && !Compiler::IsBackgroundCompilation()) {
13877 AddToExportedNamesCache(name, obj);
13878 }
13879 return obj.ptr();
13880}
13881
13882ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const {
13883 ASSERT(!IsNull());
13884 Thread* thread = Thread::Current();
13885 REUSABLE_ARRAY_HANDLESCOPE(thread);
13886 REUSABLE_OBJECT_HANDLESCOPE(thread);
13887 REUSABLE_STRING_HANDLESCOPE(thread);
13888 Array& dict = thread->ArrayHandle();
13889 dict = dictionary();
13890 intptr_t dict_size = dict.Length() - 1;
13891 *index = name.Hash() % dict_size;
13892 Object& entry = thread->ObjectHandle();
13893 String& entry_name = thread->StringHandle();
13894 entry = dict.At(index: *index);
13895 // Search the entry in the hash set.
13896 while (!entry.IsNull()) {
13897 entry_name = entry.DictionaryName();
13898 ASSERT(!entry_name.IsNull());
13899 if (entry_name.Equals(str: name)) {
13900 return entry.ptr();
13901 }
13902 *index = (*index + 1) % dict_size;
13903 entry = dict.At(index: *index);
13904 }
13905 return Object::null();
13906}
13907
13908void Library::AddClass(const Class& cls) const {
13909 ASSERT(!Compiler::IsBackgroundCompilation());
13910 const String& class_name = String::Handle(ptr: cls.Name());
13911 AddObject(obj: cls, name: class_name);
13912 // Link class to this library.
13913 cls.set_library(*this);
13914 InvalidateResolvedName(name: class_name);
13915}
13916
13917static void AddScriptIfUnique(const GrowableObjectArray& scripts,
13918 const Script& candidate) {
13919 if (candidate.IsNull()) {
13920 return;
13921 }
13922 Script& script_obj = Script::Handle();
13923
13924 for (int i = 0; i < scripts.Length(); i++) {
13925 script_obj ^= scripts.At(index: i);
13926 if (script_obj.ptr() == candidate.ptr()) {
13927 // We already have a reference to this script.
13928 return;
13929 }
13930 }
13931 // Add script to the list of scripts.
13932 scripts.Add(value: candidate);
13933}
13934
13935ArrayPtr Library::LoadedScripts() const {
13936 // We compute the list of loaded scripts lazily. The result is
13937 // cached in loaded_scripts_.
13938 if (loaded_scripts() == Array::null()) {
13939 // TODO(jensj): This can be cleaned up.
13940 // It really should just return the content of `used_scripts`, and there
13941 // should be no need to do the O(n) call to `AddScriptIfUnique` per script.
13942
13943 // Iterate over the library dictionary and collect all scripts.
13944 const GrowableObjectArray& scripts =
13945 GrowableObjectArray::Handle(ptr: GrowableObjectArray::New(capacity: 8));
13946 Object& entry = Object::Handle();
13947 Class& cls = Class::Handle();
13948 Script& owner_script = Script::Handle();
13949 DictionaryIterator it(*this);
13950 while (it.HasNext()) {
13951 entry = it.GetNext();
13952 if (entry.IsClass()) {
13953 owner_script = Class::Cast(obj: entry).script();
13954 } else if (entry.IsFunction()) {
13955 owner_script = Function::Cast(obj: entry).script();
13956 } else if (entry.IsField()) {
13957 owner_script = Field::Cast(obj: entry).Script();
13958 } else {
13959 continue;
13960 }
13961 AddScriptIfUnique(scripts, candidate: owner_script);
13962 }
13963
13964 // Add all scripts from patch classes.
13965 GrowableObjectArray& patches = GrowableObjectArray::Handle(ptr: used_scripts());
13966 for (intptr_t i = 0; i < patches.Length(); i++) {
13967 entry = patches.At(index: i);
13968 if (entry.IsClass()) {
13969 owner_script = Class::Cast(obj: entry).script();
13970 } else {
13971 ASSERT(entry.IsScript());
13972 owner_script = Script::Cast(obj: entry).ptr();
13973 }
13974 AddScriptIfUnique(scripts, candidate: owner_script);
13975 }
13976
13977 cls = toplevel_class();
13978 if (!cls.IsNull()) {
13979 owner_script = cls.script();
13980 AddScriptIfUnique(scripts, candidate: owner_script);
13981 // Special case: Scripts that only contain external top-level functions
13982 // are not included above, but can be referenced through a library's
13983 // anonymous classes. Example: dart-core:identical.dart.
13984 Function& func = Function::Handle();
13985 Array& functions = Array::Handle(ptr: cls.current_functions());
13986 for (intptr_t j = 0; j < functions.Length(); j++) {
13987 func ^= functions.At(index: j);
13988 if (func.is_external()) {
13989 owner_script = func.script();
13990 AddScriptIfUnique(scripts, candidate: owner_script);
13991 }
13992 }
13993 }
13994
13995 // Create the array of scripts and cache it in loaded_scripts_.
13996 const Array& scripts_array = Array::Handle(ptr: Array::MakeFixedLength(growable_array: scripts));
13997 untag()->set_loaded_scripts(scripts_array.ptr());
13998 }
13999 return loaded_scripts();
14000}
14001
14002// TODO(hausner): we might want to add a script dictionary to the
14003// library class to make this lookup faster.
14004ScriptPtr Library::LookupScript(const String& url,
14005 bool useResolvedUri /* = false */) const {
14006 const intptr_t url_length = url.Length();
14007 if (url_length == 0) {
14008 return Script::null();
14009 }
14010 const Array& scripts = Array::Handle(ptr: LoadedScripts());
14011 Script& script = Script::Handle();
14012 String& script_url = String::Handle();
14013 const intptr_t num_scripts = scripts.Length();
14014 for (int i = 0; i < num_scripts; i++) {
14015 script ^= scripts.At(index: i);
14016 if (useResolvedUri) {
14017 // Use for urls with 'org-dartlang-sdk:' or 'file:' schemes
14018 script_url = script.resolved_url();
14019 } else {
14020 // Use for urls with 'dart:', 'package:', or 'file:' schemes
14021 script_url = script.url();
14022 }
14023 const intptr_t start_idx = script_url.Length() - url_length;
14024 if ((start_idx == 0) && url.Equals(str: script_url)) {
14025 return script.ptr();
14026 } else if (start_idx > 0) {
14027 // If we do a suffix match, only match if the partial path
14028 // starts at or immediately after the path separator.
14029 if (((url.CharAt(index: 0) == '/') ||
14030 (script_url.CharAt(index: start_idx - 1) == '/')) &&
14031 url.Equals(str: script_url, begin_index: start_idx, len: url_length)) {
14032 return script.ptr();
14033 }
14034 }
14035 }
14036 return Script::null();
14037}
14038
14039void Library::EnsureTopLevelClassIsFinalized() const {
14040 if (toplevel_class() == Object::null()) {
14041 return;
14042 }
14043 Thread* thread = Thread::Current();
14044 const Class& cls = Class::Handle(zone: thread->zone(), ptr: toplevel_class());
14045 if (cls.is_finalized()) {
14046 return;
14047 }
14048 const Error& error =
14049 Error::Handle(zone: thread->zone(), ptr: cls.EnsureIsFinalized(thread));
14050 if (!error.IsNull()) {
14051 Exceptions::PropagateError(error);
14052 }
14053}
14054
14055ObjectPtr Library::LookupLocalObject(const String& name) const {
14056 intptr_t index;
14057 return LookupEntry(name, index: &index);
14058}
14059
14060ObjectPtr Library::LookupLocalOrReExportObject(const String& name) const {
14061 intptr_t index;
14062 EnsureTopLevelClassIsFinalized();
14063 const Object& result = Object::Handle(ptr: LookupEntry(name, index: &index));
14064 if (!result.IsNull() && !result.IsLibraryPrefix()) {
14065 return result.ptr();
14066 }
14067 return LookupReExport(name);
14068}
14069
14070FieldPtr Library::LookupFieldAllowPrivate(const String& name) const {
14071 EnsureTopLevelClassIsFinalized();
14072 Object& obj = Object::Handle(ptr: LookupObjectAllowPrivate(name));
14073 if (obj.IsField()) {
14074 return Field::Cast(obj).ptr();
14075 }
14076 return Field::null();
14077}
14078
14079FieldPtr Library::LookupLocalField(const String& name) const {
14080 EnsureTopLevelClassIsFinalized();
14081 Object& obj = Object::Handle(ptr: LookupLocalObjectAllowPrivate(name));
14082 if (obj.IsField()) {
14083 return Field::Cast(obj).ptr();
14084 }
14085 return Field::null();
14086}
14087
14088FunctionPtr Library::LookupFunctionAllowPrivate(const String& name) const {
14089 EnsureTopLevelClassIsFinalized();
14090 Object& obj = Object::Handle(ptr: LookupObjectAllowPrivate(name));
14091 if (obj.IsFunction()) {
14092 return Function::Cast(obj).ptr();
14093 }
14094 return Function::null();
14095}
14096
14097FunctionPtr Library::LookupLocalFunction(const String& name) const {
14098 EnsureTopLevelClassIsFinalized();
14099 Object& obj = Object::Handle(ptr: LookupLocalObjectAllowPrivate(name));
14100 if (obj.IsFunction()) {
14101 return Function::Cast(obj).ptr();
14102 }
14103 return Function::null();
14104}
14105
14106ObjectPtr Library::LookupLocalObjectAllowPrivate(const String& name) const {
14107 Thread* thread = Thread::Current();
14108 Zone* zone = thread->zone();
14109 Object& obj = Object::Handle(zone, ptr: Object::null());
14110 obj = LookupLocalObject(name);
14111 if (obj.IsNull() && ShouldBePrivate(name)) {
14112 String& private_name = String::Handle(zone, ptr: PrivateName(name));
14113 obj = LookupLocalObject(name: private_name);
14114 }
14115 return obj.ptr();
14116}
14117
14118ObjectPtr Library::LookupObjectAllowPrivate(const String& name) const {
14119 // First check if name is found in the local scope of the library.
14120 Object& obj = Object::Handle(ptr: LookupLocalObjectAllowPrivate(name));
14121 if (!obj.IsNull()) {
14122 return obj.ptr();
14123 }
14124
14125 // Do not look up private names in imported libraries.
14126 if (ShouldBePrivate(name)) {
14127 return Object::null();
14128 }
14129
14130 // Now check if name is found in any imported libs.
14131 return LookupImportedObject(name);
14132}
14133
14134ObjectPtr Library::LookupImportedObject(const String& name) const {
14135 Object& obj = Object::Handle();
14136 Namespace& import = Namespace::Handle();
14137 Library& import_lib = Library::Handle();
14138 String& import_lib_url = String::Handle();
14139 String& first_import_lib_url = String::Handle();
14140 Object& found_obj = Object::Handle();
14141 String& found_obj_name = String::Handle();
14142 ASSERT(!ShouldBePrivate(name));
14143 for (intptr_t i = 0; i < num_imports(); i++) {
14144 import = ImportAt(index: i);
14145 obj = import.Lookup(name);
14146 if (!obj.IsNull()) {
14147 import_lib = import.target();
14148 import_lib_url = import_lib.url();
14149 if (found_obj.ptr() != obj.ptr()) {
14150 if (first_import_lib_url.IsNull() ||
14151 first_import_lib_url.StartsWith(other: Symbols::DartScheme())) {
14152 // This is the first object we found, or the
14153 // previously found object is exported from a Dart
14154 // system library. The newly found object hides the one
14155 // from the Dart library.
14156 first_import_lib_url = import_lib.url();
14157 found_obj = obj.ptr();
14158 found_obj_name = obj.DictionaryName();
14159 } else if (import_lib_url.StartsWith(other: Symbols::DartScheme())) {
14160 // The newly found object is exported from a Dart system
14161 // library. It is hidden by the previously found object.
14162 // We continue to search.
14163 } else if (Field::IsSetterName(function_name: found_obj_name) &&
14164 !Field::IsSetterName(function_name: name)) {
14165 // We are looking for an unmangled name or a getter, but
14166 // the first object we found is a setter. Replace the first
14167 // object with the one we just found.
14168 first_import_lib_url = import_lib.url();
14169 found_obj = obj.ptr();
14170 found_obj_name = found_obj.DictionaryName();
14171 } else {
14172 // We found two different objects with the same name.
14173 // Note that we need to compare the names again because
14174 // looking up an unmangled name can return a getter or a
14175 // setter. A getter name is the same as the unmangled name,
14176 // but a setter name is different from an unmangled name or a
14177 // getter name.
14178 if (Field::IsGetterName(function_name: found_obj_name)) {
14179 found_obj_name = Field::NameFromGetter(getter_name: found_obj_name);
14180 }
14181 String& second_obj_name = String::Handle(ptr: obj.DictionaryName());
14182 if (Field::IsGetterName(function_name: second_obj_name)) {
14183 second_obj_name = Field::NameFromGetter(getter_name: second_obj_name);
14184 }
14185 if (found_obj_name.Equals(str: second_obj_name)) {
14186 return Object::null();
14187 }
14188 }
14189 }
14190 }
14191 }
14192 return found_obj.ptr();
14193}
14194
14195ClassPtr Library::LookupClass(const String& name) const {
14196 Object& obj = Object::Handle(ptr: LookupLocalObject(name));
14197 if (obj.IsNull() && !ShouldBePrivate(name)) {
14198 obj = LookupImportedObject(name);
14199 }
14200 if (obj.IsClass()) {
14201 return Class::Cast(obj).ptr();
14202 }
14203 return Class::null();
14204}
14205
14206ClassPtr Library::LookupLocalClass(const String& name) const {
14207 Object& obj = Object::Handle(ptr: LookupLocalObject(name));
14208 if (obj.IsClass()) {
14209 return Class::Cast(obj).ptr();
14210 }
14211 return Class::null();
14212}
14213
14214ClassPtr Library::LookupClassAllowPrivate(const String& name) const {
14215 // See if the class is available in this library or in the top level
14216 // scope of any imported library.
14217 Zone* zone = Thread::Current()->zone();
14218 const Class& cls = Class::Handle(zone, ptr: LookupClass(name));
14219 if (!cls.IsNull()) {
14220 return cls.ptr();
14221 }
14222
14223 // Now try to lookup the class using its private name, but only in
14224 // this library (not in imported libraries).
14225 if (ShouldBePrivate(name)) {
14226 String& private_name = String::Handle(zone, ptr: PrivateName(name));
14227 const Object& obj = Object::Handle(ptr: LookupLocalObject(name: private_name));
14228 if (obj.IsClass()) {
14229 return Class::Cast(obj).ptr();
14230 }
14231 }
14232 return Class::null();
14233}
14234
14235// Mixin applications can have multiple private keys from different libraries.
14236ClassPtr Library::SlowLookupClassAllowMultiPartPrivate(
14237 const String& name) const {
14238 Array& dict = Array::Handle(ptr: dictionary());
14239 Object& entry = Object::Handle();
14240 String& cls_name = String::Handle();
14241 for (intptr_t i = 0; i < dict.Length(); i++) {
14242 entry = dict.At(index: i);
14243 if (entry.IsClass()) {
14244 cls_name = Class::Cast(obj: entry).Name();
14245 // Warning: comparison is not symmetric.
14246 if (String::EqualsIgnoringPrivateKey(str1: cls_name, str2: name)) {
14247 return Class::Cast(obj: entry).ptr();
14248 }
14249 }
14250 }
14251 return Class::null();
14252}
14253
14254LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const {
14255 const Object& obj = Object::Handle(ptr: LookupLocalObject(name));
14256 if (obj.IsLibraryPrefix()) {
14257 return LibraryPrefix::Cast(obj).ptr();
14258 }
14259 return LibraryPrefix::null();
14260}
14261
14262void Library::set_toplevel_class(const Class& value) const {
14263 ASSERT(untag()->toplevel_class() == Class::null());
14264 untag()->set_toplevel_class(value.ptr());
14265}
14266
14267void Library::set_dependencies(const Array& deps) const {
14268 untag()->set_dependencies(deps.ptr());
14269}
14270
14271void Library::set_metadata(const Array& value) const {
14272 if (untag()->metadata() != value.ptr()) {
14273 DEBUG_ASSERT(
14274 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
14275 untag()->set_metadata(value.ptr());
14276 }
14277}
14278
14279LibraryPtr Library::ImportLibraryAt(intptr_t index) const {
14280 Namespace& import = Namespace::Handle(ptr: ImportAt(index));
14281 if (import.IsNull()) {
14282 return Library::null();
14283 }
14284 return import.target();
14285}
14286
14287NamespacePtr Library::ImportAt(intptr_t index) const {
14288 if ((index < 0) || index >= num_imports()) {
14289 return Namespace::null();
14290 }
14291 const Array& import_list = Array::Handle(ptr: imports());
14292 return Namespace::RawCast(raw: import_list.At(index));
14293}
14294
14295void Library::DropDependenciesAndCaches() const {
14296 untag()->set_imports(Object::empty_array().ptr());
14297 untag()->set_exports(Object::empty_array().ptr());
14298 StoreNonPointer(addr: &untag()->num_imports_, value: 0);
14299 untag()->set_resolved_names(Array::null());
14300 untag()->set_exported_names(Array::null());
14301 untag()->set_loaded_scripts(Array::null());
14302 untag()->set_dependencies(Array::null());
14303#if defined(PRODUCT)
14304 // used_scripts is only used by vm-service.
14305 untag()->set_used_scripts(GrowableObjectArray::null());
14306#endif
14307}
14308
14309void Library::AddImport(const Namespace& ns) const {
14310 Array& imports = Array::Handle(ptr: this->imports());
14311 intptr_t capacity = imports.Length();
14312 if (num_imports() == capacity) {
14313 capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
14314 imports = Array::Grow(source: imports, new_length: capacity);
14315 untag()->set_imports(imports.ptr());
14316 }
14317 intptr_t index = num_imports();
14318 imports.SetAt(index, value: ns);
14319 set_num_imports(index + 1);
14320}
14321
14322// Convenience function to determine whether the export list is
14323// non-empty.
14324bool Library::HasExports() const {
14325 return exports() != Object::empty_array().ptr();
14326}
14327
14328// We add one namespace at a time to the exports array and don't
14329// pre-allocate any unused capacity. The assumption is that
14330// re-exports are quite rare.
14331void Library::AddExport(const Namespace& ns) const {
14332 Array& exports = Array::Handle(ptr: this->exports());
14333 intptr_t num_exports = exports.Length();
14334 exports = Array::Grow(source: exports, new_length: num_exports + 1);
14335 untag()->set_exports(exports.ptr());
14336 exports.SetAt(index: num_exports, value: ns);
14337}
14338
14339static ArrayPtr NewDictionary(intptr_t initial_size) {
14340 const Array& dict = Array::Handle(ptr: Array::New(len: initial_size + 1, space: Heap::kOld));
14341 // The last element of the dictionary specifies the number of in use slots.
14342 dict.SetAt(index: initial_size, value: Object::smi_zero());
14343 return dict.ptr();
14344}
14345
14346void Library::InitResolvedNamesCache() const {
14347 Thread* thread = Thread::Current();
14348 ASSERT(thread->IsDartMutatorThread());
14349 REUSABLE_FUNCTION_HANDLESCOPE(thread);
14350 Array& cache = thread->ArrayHandle();
14351 cache = HashTables::New<ResolvedNamesMap>(initial_capacity: 64);
14352 untag()->set_resolved_names(cache.ptr());
14353}
14354
14355void Library::ClearResolvedNamesCache() const {
14356 ASSERT(Thread::Current()->IsDartMutatorThread());
14357 untag()->set_resolved_names(Array::null());
14358}
14359
14360void Library::InitExportedNamesCache() const {
14361 Thread* thread = Thread::Current();
14362 ASSERT(thread->IsDartMutatorThread());
14363 REUSABLE_FUNCTION_HANDLESCOPE(thread);
14364 Array& cache = thread->ArrayHandle();
14365 cache = HashTables::New<ResolvedNamesMap>(initial_capacity: 16);
14366 untag()->set_exported_names(cache.ptr());
14367}
14368
14369void Library::ClearExportedNamesCache() const {
14370 untag()->set_exported_names(Array::null());
14371}
14372
14373void Library::InitClassDictionary() const {
14374 Thread* thread = Thread::Current();
14375 ASSERT(thread->IsDartMutatorThread());
14376 REUSABLE_FUNCTION_HANDLESCOPE(thread);
14377 Array& dictionary = thread->ArrayHandle();
14378 // TODO(iposva): Find reasonable initial size.
14379 const int kInitialElementCount = 16;
14380 dictionary = NewDictionary(initial_size: kInitialElementCount);
14381 untag()->set_dictionary(dictionary.ptr());
14382}
14383
14384void Library::InitImportList() const {
14385 const Array& imports =
14386 Array::Handle(ptr: Array::New(len: kInitialImportsCapacity, space: Heap::kOld));
14387 untag()->set_imports(imports.ptr());
14388 StoreNonPointer(addr: &untag()->num_imports_, value: 0);
14389}
14390
14391LibraryPtr Library::New() {
14392 ASSERT(Object::library_class() != Class::null());
14393 return Object::Allocate<Library>(space: Heap::kOld);
14394}
14395
14396LibraryPtr Library::NewLibraryHelper(const String& url, bool import_core_lib) {
14397 Thread* thread = Thread::Current();
14398 Zone* zone = thread->zone();
14399 ASSERT(thread->IsDartMutatorThread());
14400 // Force the url to have a hash code.
14401 url.Hash();
14402 const bool dart_scheme = url.StartsWith(other: Symbols::DartScheme());
14403 const Library& result = Library::Handle(zone, ptr: Library::New());
14404 result.untag()->set_name(Symbols::Empty().ptr());
14405 result.untag()->set_url(url.ptr());
14406 result.untag()->set_resolved_names(Array::null());
14407 result.untag()->set_exported_names(Array::null());
14408 result.untag()->set_dictionary(Object::empty_array().ptr());
14409 Array& array = Array::Handle(zone);
14410 array = HashTables::New<MetadataMap>(initial_capacity: 4, space: Heap::kOld);
14411 result.untag()->set_metadata(array.ptr());
14412 result.untag()->set_toplevel_class(Class::null());
14413 GrowableObjectArray& list = GrowableObjectArray::Handle(zone);
14414 list = GrowableObjectArray::New(array: Object::empty_array(), space: Heap::kOld);
14415 result.untag()->set_used_scripts(list.ptr());
14416 result.untag()->set_imports(Object::empty_array().ptr());
14417 result.untag()->set_exports(Object::empty_array().ptr());
14418 NOT_IN_PRECOMPILED_RUNTIME(
14419 result.untag()->set_kernel_program_info(KernelProgramInfo::null()));
14420 result.untag()->set_loaded_scripts(Array::null());
14421 result.set_native_entry_resolver(nullptr);
14422 result.set_native_entry_symbol_resolver(nullptr);
14423 result.set_ffi_native_resolver(nullptr);
14424 result.set_flags(0);
14425 result.set_is_in_fullsnapshot(false);
14426 result.set_is_nnbd(false);
14427 // This logic is also in the DAP debug adapter in DDS to avoid needing
14428 // to call setLibraryDebuggable for every library for every isolate.
14429 // If these defaults change, the same should be done there in
14430 // dap/IsolateManager._getIsLibraryDebuggableByDefault.
14431 if (dart_scheme) {
14432 // Only debug dart: libraries if we have been requested to show invisible
14433 // frames.
14434 result.set_debuggable(FLAG_show_invisible_frames);
14435 } else {
14436 // Default to debuggable for all other libraries.
14437 result.set_debuggable(true);
14438 }
14439 result.set_is_dart_scheme(dart_scheme);
14440 NOT_IN_PRECOMPILED(
14441 result.StoreNonPointer(&result.untag()->kernel_library_index_, -1));
14442 result.StoreNonPointer(addr: &result.untag()->load_state_,
14443 value: UntaggedLibrary::kAllocated);
14444 result.StoreNonPointer(addr: &result.untag()->index_, value: -1);
14445 result.InitClassDictionary();
14446 result.InitImportList();
14447 result.AllocatePrivateKey();
14448 if (import_core_lib) {
14449 const Library& core_lib = Library::Handle(zone, ptr: Library::CoreLibrary());
14450 ASSERT(!core_lib.IsNull());
14451 const Namespace& ns =
14452 Namespace::Handle(zone, ptr: Namespace::New(library: core_lib, show_names: Object::null_array(),
14453 hide_names: Object::null_array(), owner: result));
14454 result.AddImport(ns);
14455 }
14456 return result.ptr();
14457}
14458
14459LibraryPtr Library::New(const String& url) {
14460 return NewLibraryHelper(url, import_core_lib: false);
14461}
14462
14463void Library::set_flags(uint8_t flags) const {
14464 StoreNonPointer(addr: &untag()->flags_, value: flags);
14465}
14466
14467void Library::InitCoreLibrary(IsolateGroup* isolate_group) {
14468 Thread* thread = Thread::Current();
14469 Zone* zone = thread->zone();
14470 const String& core_lib_url = Symbols::DartCore();
14471 const Library& core_lib =
14472 Library::Handle(zone, ptr: Library::NewLibraryHelper(url: core_lib_url, import_core_lib: false));
14473 core_lib.SetLoadRequested();
14474 core_lib.Register(thread);
14475 isolate_group->object_store()->set_bootstrap_library(index: ObjectStore::kCore,
14476 value: core_lib);
14477 isolate_group->object_store()->set_root_library(Library::Handle());
14478}
14479
14480// Invoke the function, or noSuchMethod if it is null.
14481static ObjectPtr InvokeInstanceFunction(
14482 Thread* thread,
14483 const Instance& receiver,
14484 const Function& function,
14485 const String& target_name,
14486 const Array& args,
14487 const Array& args_descriptor_array,
14488 bool respect_reflectable,
14489 const TypeArguments& instantiator_type_args) {
14490 // Note "args" is already the internal arguments with the receiver as the
14491 // first element.
14492 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14493 if (function.IsNull() ||
14494 !function.AreValidArguments(args_desc: args_descriptor, error_message: nullptr) ||
14495 (respect_reflectable && !function.is_reflectable())) {
14496 return DartEntry::InvokeNoSuchMethod(thread, receiver, target_name, arguments: args,
14497 arguments_descriptor: args_descriptor_array);
14498 }
14499 ObjectPtr type_error = function.DoArgumentTypesMatch(args, args_desc: args_descriptor,
14500 instantiator_type_arguments: instantiator_type_args);
14501 if (type_error != Error::null()) {
14502 return type_error;
14503 }
14504 return DartEntry::InvokeFunction(function, arguments: args, arguments_descriptor: args_descriptor_array);
14505}
14506
14507ObjectPtr Library::InvokeGetter(const String& getter_name,
14508 bool throw_nsm_if_absent,
14509 bool respect_reflectable,
14510 bool check_is_entrypoint) const {
14511 Object& obj = Object::Handle(ptr: LookupLocalOrReExportObject(name: getter_name));
14512 Function& getter = Function::Handle();
14513 if (obj.IsField()) {
14514 const Field& field = Field::Cast(obj);
14515 if (check_is_entrypoint) {
14516 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
14517 }
14518 if (!field.IsUninitialized()) {
14519 return field.StaticValue();
14520 }
14521 // An uninitialized field was found. Check for a getter in the field's
14522 // owner class.
14523 const Class& klass = Class::Handle(ptr: field.Owner());
14524 const String& internal_getter_name =
14525 String::Handle(ptr: Field::GetterName(field_name: getter_name));
14526 getter = klass.LookupStaticFunction(name: internal_getter_name);
14527 } else {
14528 // No field found. Check for a getter in the lib.
14529 const String& internal_getter_name =
14530 String::Handle(ptr: Field::GetterName(field_name: getter_name));
14531 obj = LookupLocalOrReExportObject(name: internal_getter_name);
14532 if (obj.IsFunction()) {
14533 getter = Function::Cast(obj).ptr();
14534 if (check_is_entrypoint) {
14535 CHECK_ERROR(getter.VerifyCallEntryPoint());
14536 }
14537 } else {
14538 obj = LookupLocalOrReExportObject(name: getter_name);
14539 // Normally static top-level methods cannot be closurized through the
14540 // native API even if they are marked as entry-points, with the one
14541 // exception of "main".
14542 if (obj.IsFunction() && check_is_entrypoint) {
14543 if (!getter_name.Equals(str: String::Handle(ptr: String::New(cstr: "main"))) ||
14544 ptr() != IsolateGroup::Current()->object_store()->root_library()) {
14545 CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint());
14546 }
14547 }
14548 if (obj.IsFunction() && Function::Cast(obj).SafeToClosurize()) {
14549 // Looking for a getter but found a regular method: closurize it.
14550 const Function& closure_function =
14551 Function::Handle(ptr: Function::Cast(obj).ImplicitClosureFunction());
14552 return closure_function.ImplicitStaticClosure();
14553 }
14554 }
14555 }
14556
14557 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
14558 if (throw_nsm_if_absent) {
14559 return ThrowNoSuchMethod(receiver: Object::null_string(), function_name: getter_name,
14560 arguments: Object::null_array(), argument_names: Object::null_array(),
14561 level: InvocationMirror::kTopLevel,
14562 kind: InvocationMirror::kGetter);
14563 }
14564
14565 // Fall through case: Indicate that we didn't find any function or field
14566 // using a special null instance. This is different from a field being null.
14567 // Callers make sure that this null does not leak into Dartland.
14568 return Object::sentinel().ptr();
14569 }
14570
14571 // Invoke the getter and return the result.
14572 return DartEntry::InvokeFunction(function: getter, arguments: Object::empty_array());
14573}
14574
14575ObjectPtr Library::InvokeSetter(const String& setter_name,
14576 const Instance& value,
14577 bool respect_reflectable,
14578 bool check_is_entrypoint) const {
14579 Object& obj = Object::Handle(ptr: LookupLocalOrReExportObject(name: setter_name));
14580 const String& internal_setter_name =
14581 String::Handle(ptr: Field::SetterName(setter_name));
14582 AbstractType& setter_type = AbstractType::Handle();
14583 AbstractType& argument_type = AbstractType::Handle(ptr: value.GetType(space: Heap::kOld));
14584 if (obj.IsField()) {
14585 const Field& field = Field::Cast(obj);
14586 if (check_is_entrypoint) {
14587 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
14588 }
14589 setter_type = field.type();
14590 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14591 !value.IsInstanceOf(other: setter_type, other_instantiator_type_arguments: Object::null_type_arguments(),
14592 other_function_type_arguments: Object::null_type_arguments())) {
14593 return ThrowTypeError(token_pos: field.token_pos(), src_value: value, dst_type: setter_type, dst_name: setter_name);
14594 }
14595 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
14596 const int kNumArgs = 1;
14597 const Array& args = Array::Handle(ptr: Array::New(len: kNumArgs));
14598 args.SetAt(index: 0, value);
14599
14600 return ThrowNoSuchMethod(receiver: Object::null_string(), function_name: internal_setter_name,
14601 arguments: args, argument_names: Object::null_array(),
14602 level: InvocationMirror::kTopLevel,
14603 kind: InvocationMirror::kSetter);
14604 }
14605 field.SetStaticValue(value);
14606 return value.ptr();
14607 }
14608
14609 Function& setter = Function::Handle();
14610 obj = LookupLocalOrReExportObject(name: internal_setter_name);
14611 if (obj.IsFunction()) {
14612 setter ^= obj.ptr();
14613 }
14614
14615 if (!setter.IsNull() && check_is_entrypoint) {
14616 CHECK_ERROR(setter.VerifyCallEntryPoint());
14617 }
14618
14619 const int kNumArgs = 1;
14620 const Array& args = Array::Handle(ptr: Array::New(len: kNumArgs));
14621 args.SetAt(index: 0, value);
14622 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
14623 return ThrowNoSuchMethod(receiver: Object::null_string(), function_name: internal_setter_name, arguments: args,
14624 argument_names: Object::null_array(), level: InvocationMirror::kTopLevel,
14625 kind: InvocationMirror::kSetter);
14626 }
14627
14628 setter_type = setter.ParameterTypeAt(index: 0);
14629 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14630 !value.IsInstanceOf(other: setter_type, other_instantiator_type_arguments: Object::null_type_arguments(),
14631 other_function_type_arguments: Object::null_type_arguments())) {
14632 return ThrowTypeError(token_pos: setter.token_pos(), src_value: value, dst_type: setter_type, dst_name: setter_name);
14633 }
14634
14635 return DartEntry::InvokeFunction(function: setter, arguments: args);
14636}
14637
14638ObjectPtr Library::Invoke(const String& function_name,
14639 const Array& args,
14640 const Array& arg_names,
14641 bool respect_reflectable,
14642 bool check_is_entrypoint) const {
14643 Thread* thread = Thread::Current();
14644 Zone* zone = thread->zone();
14645
14646 // We don't pass any explicit type arguments, which will be understood as
14647 // using dynamic for any function type arguments by lower layers.
14648 const int kTypeArgsLen = 0;
14649 const Array& args_descriptor_array = Array::Handle(
14650 zone, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: args.Length(),
14651 optional_arguments_names: arg_names, space: Heap::kNew));
14652 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14653
14654 auto& function = Function::Handle(zone);
14655 auto& result =
14656 Object::Handle(zone, ptr: LookupLocalOrReExportObject(name: function_name));
14657 if (result.IsFunction()) {
14658 function ^= result.ptr();
14659 }
14660
14661 if (!function.IsNull() && check_is_entrypoint) {
14662 CHECK_ERROR(function.VerifyCallEntryPoint());
14663 }
14664
14665 if (function.IsNull()) {
14666 // Didn't find a method: try to find a getter and invoke call on its result.
14667 const Object& getter_result = Object::Handle(
14668 zone, ptr: InvokeGetter(getter_name: function_name, throw_nsm_if_absent: false, respect_reflectable,
14669 check_is_entrypoint));
14670 if (getter_result.ptr() != Object::sentinel().ptr()) {
14671 if (check_is_entrypoint) {
14672 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
14673 }
14674 const auto& call_args_descriptor_array = Array::Handle(
14675 zone, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: args_descriptor.TypeArgsLen(),
14676 num_arguments: args_descriptor.Count() + 1,
14677 optional_arguments_names: arg_names, space: Heap::kNew));
14678 const auto& call_args = Array::Handle(
14679 zone,
14680 ptr: CreateCallableArgumentsFromStatic(zone, receiver: Instance::Cast(obj: getter_result),
14681 static_args: args, arg_names, static_args_descriptor: args_descriptor));
14682 return DartEntry::InvokeClosure(thread, arguments: call_args,
14683 arguments_descriptor: call_args_descriptor_array);
14684 }
14685 }
14686
14687 if (function.IsNull() ||
14688 (respect_reflectable && !function.is_reflectable())) {
14689 return ThrowNoSuchMethod(receiver: Object::null_string(), function_name, arguments: args,
14690 argument_names: arg_names, level: InvocationMirror::kTopLevel,
14691 kind: InvocationMirror::kMethod);
14692 }
14693 if (!function.AreValidArguments(args_desc: args_descriptor, error_message: nullptr)) {
14694 return ThrowNoSuchMethod(
14695 receiver: String::Handle(ptr: function.UserVisibleSignature()), function_name, arguments: args,
14696 argument_names: arg_names, level: InvocationMirror::kTopLevel, kind: InvocationMirror::kMethod);
14697 }
14698 // This is a static function, so we pass an empty instantiator tav.
14699 ASSERT(function.is_static());
14700 ObjectPtr type_error = function.DoArgumentTypesMatch(
14701 args, args_desc: args_descriptor, instantiator_type_arguments: Object::empty_type_arguments());
14702 if (type_error != Error::null()) {
14703 return type_error;
14704 }
14705 return DartEntry::InvokeFunction(function, arguments: args, arguments_descriptor: args_descriptor_array);
14706}
14707
14708void Library::InitNativeWrappersLibrary(IsolateGroup* isolate_group,
14709 bool is_kernel) {
14710 const int kNumNativeWrappersClasses = 4;
14711 COMPILE_ASSERT((kNumNativeWrappersClasses > 0) &&
14712 (kNumNativeWrappersClasses < 10));
14713 Thread* thread = Thread::Current();
14714 Zone* zone = thread->zone();
14715 const String& native_flds_lib_url = Symbols::DartNativeWrappers();
14716 const Library& native_flds_lib = Library::Handle(
14717 zone, ptr: Library::NewLibraryHelper(url: native_flds_lib_url, import_core_lib: false));
14718 const String& native_flds_lib_name = Symbols::DartNativeWrappersLibName();
14719 native_flds_lib.SetName(native_flds_lib_name);
14720 native_flds_lib.SetLoadRequested();
14721 native_flds_lib.Register(thread);
14722 native_flds_lib.SetLoadInProgress();
14723 isolate_group->object_store()->set_native_wrappers_library(native_flds_lib);
14724 const char* const kNativeWrappersClass = "NativeFieldWrapperClass";
14725 const int kNameLength = 25;
14726 ASSERT(kNameLength == (strlen(kNativeWrappersClass) + 1 + 1));
14727 char name_buffer[kNameLength];
14728 String& cls_name = String::Handle(zone);
14729 for (int fld_cnt = 1; fld_cnt <= kNumNativeWrappersClasses; fld_cnt++) {
14730 Utils::SNPrint(str: name_buffer, size: kNameLength, format: "%s%d", kNativeWrappersClass,
14731 fld_cnt);
14732 cls_name = Symbols::New(thread, cstr: name_buffer);
14733 Class::NewNativeWrapper(library: native_flds_lib, name: cls_name, field_count: fld_cnt);
14734 }
14735 // NOTE: If we bootstrap from a Kernel IR file we want to generate the
14736 // synthetic constructors for the native wrapper classes. We leave this up to
14737 // the [KernelLoader] who will take care of it later.
14738 if (!is_kernel) {
14739 native_flds_lib.SetLoaded();
14740 }
14741}
14742
14743// LibraryLookupSet maps URIs to libraries.
14744class LibraryLookupTraits {
14745 public:
14746 static const char* Name() { return "LibraryLookupTraits"; }
14747 static bool ReportStats() { return false; }
14748
14749 static bool IsMatch(const Object& a, const Object& b) {
14750 const String& a_str = String::Cast(obj: a);
14751 const String& b_str = String::Cast(obj: b);
14752
14753 ASSERT(a_str.HasHash() && b_str.HasHash());
14754 return a_str.Equals(str: b_str);
14755 }
14756
14757 static uword Hash(const Object& key) { return String::Cast(obj: key).Hash(); }
14758
14759 static ObjectPtr NewKey(const String& str) { return str.ptr(); }
14760};
14761typedef UnorderedHashMap<LibraryLookupTraits> LibraryLookupMap;
14762
14763// Returns library with given url in current isolate, or nullptr.
14764LibraryPtr Library::LookupLibrary(Thread* thread, const String& url) {
14765 Zone* zone = thread->zone();
14766 ObjectStore* object_store = thread->isolate_group()->object_store();
14767
14768 // Make sure the URL string has an associated hash code
14769 // to speed up the repeated equality checks.
14770 url.Hash();
14771
14772 // Use the libraries map to lookup the library by URL.
14773 Library& lib = Library::Handle(zone);
14774 if (object_store->libraries_map() == Array::null()) {
14775 return Library::null();
14776 } else {
14777 LibraryLookupMap map(object_store->libraries_map());
14778 lib ^= map.GetOrNull(key: url);
14779 ASSERT(map.Release().ptr() == object_store->libraries_map());
14780 }
14781 return lib.ptr();
14782}
14783
14784bool Library::IsPrivate(const String& name) {
14785 if (ShouldBePrivate(name)) return true;
14786 // Factory names: List._fromLiteral.
14787 for (intptr_t i = 1; i < name.Length() - 1; i++) {
14788 if (name.CharAt(index: i) == '.') {
14789 if (name.CharAt(index: i + 1) == '_') {
14790 return true;
14791 }
14792 }
14793 }
14794 return false;
14795}
14796
14797// Create a private key for this library. It is based on the hash of the
14798// library URI and the sequence number of the library to guarantee unique
14799// private keys without having to verify.
14800void Library::AllocatePrivateKey() const {
14801 Thread* thread = Thread::Current();
14802 Zone* zone = thread->zone();
14803 auto isolate_group = thread->isolate_group();
14804
14805#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14806 if (isolate_group->IsReloading()) {
14807 // When reloading, we need to make sure we use the original private key
14808 // if this library previously existed.
14809 ProgramReloadContext* program_reload_context =
14810 isolate_group->program_reload_context();
14811 const String& original_key =
14812 String::Handle(ptr: program_reload_context->FindLibraryPrivateKey(replacement_or_new: *this));
14813 if (!original_key.IsNull()) {
14814 untag()->set_private_key(original_key.ptr());
14815 return;
14816 }
14817 }
14818#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14819
14820 // Format of the private key is: "@<sequence number><6 digits of hash>
14821 const intptr_t hash_mask = 0x7FFFF;
14822
14823 const String& url = String::Handle(zone, ptr: this->url());
14824 intptr_t hash_value = url.Hash() & hash_mask;
14825
14826 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
14827 zone, ptr: isolate_group->object_store()->libraries());
14828 intptr_t sequence_value = libs.Length();
14829
14830 char private_key[32];
14831 Utils::SNPrint(str: private_key, size: sizeof(private_key), format: "%c%" Pd "%06" Pd "",
14832 kPrivateKeySeparator, sequence_value, hash_value);
14833 const String& key =
14834 String::Handle(zone, ptr: String::New(cstr: private_key, space: Heap::kOld));
14835 key.Hash(); // This string may end up in the VM isolate.
14836 untag()->set_private_key(key.ptr());
14837}
14838
14839const String& Library::PrivateCoreLibName(const String& member) {
14840 const Library& core_lib = Library::Handle(ptr: Library::CoreLibrary());
14841 const String& private_name = String::ZoneHandle(ptr: core_lib.PrivateName(name: member));
14842 return private_name;
14843}
14844
14845bool Library::IsPrivateCoreLibName(const String& name, const String& member) {
14846 Zone* zone = Thread::Current()->zone();
14847 const auto& core_lib = Library::Handle(zone, ptr: Library::CoreLibrary());
14848 const auto& private_key = String::Handle(zone, ptr: core_lib.private_key());
14849
14850 ASSERT(core_lib.IsPrivate(member));
14851 return name.EqualsConcat(str1: member, str2: private_key);
14852}
14853
14854ClassPtr Library::LookupCoreClass(const String& class_name) {
14855 Thread* thread = Thread::Current();
14856 Zone* zone = thread->zone();
14857 const Library& core_lib = Library::Handle(zone, ptr: Library::CoreLibrary());
14858 String& name = String::Handle(zone, ptr: class_name.ptr());
14859 if (class_name.CharAt(index: 0) == kPrivateIdentifierStart) {
14860 // Private identifiers are mangled on a per library basis.
14861 name = Symbols::FromConcat(thread, str1: name,
14862 str2: String::Handle(zone, ptr: core_lib.private_key()));
14863 }
14864 return core_lib.LookupClass(name);
14865}
14866
14867// Cannot handle qualified names properly as it only appends private key to
14868// the end (e.g. _Alfa.foo -> _Alfa.foo@...).
14869StringPtr Library::PrivateName(const String& name) const {
14870 Thread* thread = Thread::Current();
14871 Zone* zone = thread->zone();
14872 ASSERT(IsPrivate(name));
14873 // ASSERT(strchr(name, '@') == nullptr);
14874 String& str = String::Handle(zone);
14875 str = name.ptr();
14876 str = Symbols::FromConcat(thread, str1: str,
14877 str2: String::Handle(zone, ptr: this->private_key()));
14878 return str.ptr();
14879}
14880
14881LibraryPtr Library::GetLibrary(intptr_t index) {
14882 Thread* thread = Thread::Current();
14883 Zone* zone = thread->zone();
14884 auto isolate_group = thread->isolate_group();
14885 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
14886 zone, ptr: isolate_group->object_store()->libraries());
14887 ASSERT(!libs.IsNull());
14888 if ((0 <= index) && (index < libs.Length())) {
14889 Library& lib = Library::Handle(zone);
14890 lib ^= libs.At(index);
14891 return lib.ptr();
14892 }
14893 return Library::null();
14894}
14895
14896void Library::Register(Thread* thread) const {
14897 Zone* zone = thread->zone();
14898 auto isolate_group = thread->isolate_group();
14899 ObjectStore* object_store = isolate_group->object_store();
14900
14901 // A library is "registered" in two places:
14902 // - A growable array mapping from index to library.
14903 const String& lib_url = String::Handle(zone, ptr: url());
14904 ASSERT(Library::LookupLibrary(thread, lib_url) == Library::null());
14905 ASSERT(lib_url.HasHash());
14906 GrowableObjectArray& libs =
14907 GrowableObjectArray::Handle(zone, ptr: object_store->libraries());
14908 ASSERT(!libs.IsNull());
14909 set_index(libs.Length());
14910 libs.Add(value: *this);
14911
14912 // - A map from URL string to library.
14913 if (object_store->libraries_map() == Array::null()) {
14914 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(initial_capacity: 16, space: Heap::kOld));
14915 object_store->set_libraries_map(map.Release());
14916 }
14917
14918 LibraryLookupMap map(object_store->libraries_map());
14919 bool present = map.UpdateOrInsert(key: lib_url, value: *this);
14920 ASSERT(!present);
14921 object_store->set_libraries_map(map.Release());
14922}
14923
14924void Library::RegisterLibraries(Thread* thread,
14925 const GrowableObjectArray& libs) {
14926 Zone* zone = thread->zone();
14927 auto isolate_group = thread->isolate_group();
14928 Library& lib = Library::Handle(zone);
14929 String& lib_url = String::Handle(zone);
14930
14931 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(initial_capacity: 16, space: Heap::kOld));
14932
14933 intptr_t len = libs.Length();
14934 for (intptr_t i = 0; i < len; i++) {
14935 lib ^= libs.At(index: i);
14936 lib_url = lib.url();
14937 map.InsertNewOrGetValue(key: lib_url, value_if_absent: lib);
14938 }
14939 // Now remember these in the isolate's object store.
14940 isolate_group->object_store()->set_libraries(libs);
14941 isolate_group->object_store()->set_libraries_map(map.Release());
14942}
14943
14944LibraryPtr Library::AsyncLibrary() {
14945 return IsolateGroup::Current()->object_store()->async_library();
14946}
14947
14948LibraryPtr Library::ConvertLibrary() {
14949 return IsolateGroup::Current()->object_store()->convert_library();
14950}
14951
14952LibraryPtr Library::CoreLibrary() {
14953 return IsolateGroup::Current()->object_store()->core_library();
14954}
14955
14956LibraryPtr Library::CollectionLibrary() {
14957 return IsolateGroup::Current()->object_store()->collection_library();
14958}
14959
14960LibraryPtr Library::DeveloperLibrary() {
14961 return IsolateGroup::Current()->object_store()->developer_library();
14962}
14963
14964LibraryPtr Library::FfiLibrary() {
14965 return IsolateGroup::Current()->object_store()->ffi_library();
14966}
14967
14968LibraryPtr Library::InternalLibrary() {
14969 return IsolateGroup::Current()->object_store()->_internal_library();
14970}
14971
14972LibraryPtr Library::IsolateLibrary() {
14973 return IsolateGroup::Current()->object_store()->isolate_library();
14974}
14975
14976LibraryPtr Library::MathLibrary() {
14977 return IsolateGroup::Current()->object_store()->math_library();
14978}
14979
14980#if !defined(DART_PRECOMPILED_RUNTIME)
14981LibraryPtr Library::MirrorsLibrary() {
14982 return IsolateGroup::Current()->object_store()->mirrors_library();
14983}
14984#endif
14985
14986LibraryPtr Library::NativeWrappersLibrary() {
14987 return IsolateGroup::Current()->object_store()->native_wrappers_library();
14988}
14989
14990LibraryPtr Library::TypedDataLibrary() {
14991 return IsolateGroup::Current()->object_store()->typed_data_library();
14992}
14993
14994LibraryPtr Library::VMServiceLibrary() {
14995 return IsolateGroup::Current()->object_store()->_vmservice_library();
14996}
14997
14998const char* Library::ToCString() const {
14999 NoSafepointScope no_safepoint;
15000 const String& name = String::Handle(ptr: url());
15001 return OS::SCreate(zone: Thread::Current()->zone(), format: "Library:'%s'",
15002 name.ToCString());
15003}
15004
15005LibraryPtr LibraryPrefix::GetLibrary(int index) const {
15006 if ((index >= 0) || (index < num_imports())) {
15007 const Array& imports = Array::Handle(ptr: this->imports());
15008 Namespace& import = Namespace::Handle();
15009 import ^= imports.At(index);
15010 return import.target();
15011 }
15012 return Library::null();
15013}
15014
15015void LibraryPrefix::AddImport(const Namespace& import) const {
15016 intptr_t num_current_imports = num_imports();
15017
15018 // Prefixes with deferred libraries can only contain one library.
15019 ASSERT((num_current_imports == 0) || !is_deferred_load());
15020
15021 // The library needs to be added to the list.
15022 Array& imports = Array::Handle(ptr: this->imports());
15023 const intptr_t length = (imports.IsNull()) ? 0 : imports.Length();
15024 // Grow the list if it is full.
15025 if (num_current_imports >= length) {
15026 const intptr_t new_length = length + kIncrementSize + (length >> 2);
15027 imports = Array::Grow(source: imports, new_length, space: Heap::kOld);
15028 set_imports(imports);
15029 }
15030 imports.SetAt(index: num_current_imports, value: import);
15031 set_num_imports(num_current_imports + 1);
15032}
15033
15034LibraryPrefixPtr LibraryPrefix::New() {
15035 return Object::Allocate<LibraryPrefix>(space: Heap::kOld);
15036}
15037
15038LibraryPrefixPtr LibraryPrefix::New(const String& name,
15039 const Namespace& import,
15040 bool deferred_load,
15041 const Library& importer) {
15042 const LibraryPrefix& result = LibraryPrefix::Handle(ptr: LibraryPrefix::New());
15043 result.set_name(name);
15044 result.set_num_imports(0);
15045 result.set_importer(importer);
15046 result.StoreNonPointer(addr: &result.untag()->is_deferred_load_, value: deferred_load);
15047 result.set_imports(Array::Handle(ptr: Array::New(len: kInitialSize)));
15048 result.AddImport(import);
15049 return result.ptr();
15050}
15051
15052void LibraryPrefix::set_name(const String& value) const {
15053 ASSERT(value.IsSymbol());
15054 untag()->set_name(value.ptr());
15055}
15056
15057void LibraryPrefix::set_imports(const Array& value) const {
15058 untag()->set_imports(value.ptr());
15059}
15060
15061void LibraryPrefix::set_num_imports(intptr_t value) const {
15062 if (!Utils::IsUint(N: 16, value)) {
15063 ReportTooManyImports(lib: Library::Handle(ptr: importer()));
15064 }
15065 StoreNonPointer(addr: &untag()->num_imports_, value);
15066}
15067
15068void LibraryPrefix::set_importer(const Library& value) const {
15069 untag()->set_importer(value.ptr());
15070}
15071
15072const char* LibraryPrefix::ToCString() const {
15073 const String& prefix = String::Handle(ptr: name());
15074 return prefix.ToCString();
15075}
15076
15077const char* Namespace::ToCString() const {
15078 const Library& lib = Library::Handle(ptr: target());
15079 return OS::SCreate(zone: Thread::Current()->zone(), format: "Namespace for library '%s'",
15080 lib.ToCString());
15081}
15082
15083bool Namespace::HidesName(const String& name) const {
15084 // Quick check for common case with no combinators.
15085 if (hide_names() == show_names()) {
15086 ASSERT(hide_names() == Array::null());
15087 return false;
15088 }
15089 const String* plain_name = &name;
15090 if (Field::IsGetterName(function_name: name)) {
15091 plain_name = &String::Handle(ptr: Field::NameFromGetter(getter_name: name));
15092 } else if (Field::IsSetterName(function_name: name)) {
15093 plain_name = &String::Handle(ptr: Field::NameFromSetter(setter_name: name));
15094 }
15095 // Check whether the name is in the list of explicitly hidden names.
15096 if (hide_names() != Array::null()) {
15097 const Array& names = Array::Handle(ptr: hide_names());
15098 String& hidden = String::Handle();
15099 intptr_t num_names = names.Length();
15100 for (intptr_t i = 0; i < num_names; i++) {
15101 hidden ^= names.At(index: i);
15102 if (plain_name->Equals(str: hidden)) {
15103 return true;
15104 }
15105 }
15106 }
15107 // The name is not explicitly hidden. Now check whether it is in the
15108 // list of explicitly visible names, if there is one.
15109 if (show_names() != Array::null()) {
15110 const Array& names = Array::Handle(ptr: show_names());
15111 String& shown = String::Handle();
15112 intptr_t num_names = names.Length();
15113 for (intptr_t i = 0; i < num_names; i++) {
15114 shown ^= names.At(index: i);
15115 if (plain_name->Equals(str: shown)) {
15116 return false;
15117 }
15118 }
15119 // There is a list of visible names. The name we're looking for is not
15120 // contained in the list, so it is hidden.
15121 return true;
15122 }
15123 // The name is not filtered out.
15124 return false;
15125}
15126
15127// Look up object with given name in library and filter out hidden
15128// names. Also look up getters and setters.
15129ObjectPtr Namespace::Lookup(const String& name,
15130 ZoneGrowableArray<intptr_t>* trail) const {
15131 Zone* zone = Thread::Current()->zone();
15132 const Library& lib = Library::Handle(zone, ptr: target());
15133
15134 if (trail != nullptr) {
15135 // Look for cycle in reexport graph.
15136 for (int i = 0; i < trail->length(); i++) {
15137 if (trail->At(index: i) == lib.index()) {
15138 for (int j = i + 1; j < trail->length(); j++) {
15139 (*trail)[j] = -1;
15140 }
15141 return Object::null();
15142 }
15143 }
15144 }
15145
15146 lib.EnsureTopLevelClassIsFinalized();
15147
15148 intptr_t ignore = 0;
15149 // Lookup the name in the library's symbols.
15150 Object& obj = Object::Handle(zone, ptr: lib.LookupEntry(name, index: &ignore));
15151 if (!Field::IsGetterName(function_name: name) && !Field::IsSetterName(function_name: name) &&
15152 (obj.IsNull() || obj.IsLibraryPrefix())) {
15153 String& accessor_name = String::Handle(zone);
15154 accessor_name = Field::LookupGetterSymbol(field_name: name);
15155 if (!accessor_name.IsNull()) {
15156 obj = lib.LookupEntry(name: accessor_name, index: &ignore);
15157 }
15158 if (obj.IsNull()) {
15159 accessor_name = Field::LookupSetterSymbol(field_name: name);
15160 if (!accessor_name.IsNull()) {
15161 obj = lib.LookupEntry(name: accessor_name, index: &ignore);
15162 }
15163 }
15164 }
15165
15166 // Library prefixes are not exported.
15167 if (obj.IsNull() || obj.IsLibraryPrefix()) {
15168 // Lookup in the re-exported symbols.
15169 obj = lib.LookupReExport(name, trail);
15170 if (obj.IsNull() && !Field::IsSetterName(function_name: name)) {
15171 // LookupReExport() only returns objects that match the given name.
15172 // If there is no field/func/getter, try finding a setter.
15173 const String& setter_name =
15174 String::Handle(zone, ptr: Field::LookupSetterSymbol(field_name: name));
15175 if (!setter_name.IsNull()) {
15176 obj = lib.LookupReExport(name: setter_name, trail);
15177 }
15178 }
15179 }
15180 if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
15181 return Object::null();
15182 }
15183 return obj.ptr();
15184}
15185
15186NamespacePtr Namespace::New() {
15187 ASSERT(Object::namespace_class() != Class::null());
15188 return Object::Allocate<Namespace>(space: Heap::kOld);
15189}
15190
15191NamespacePtr Namespace::New(const Library& target,
15192 const Array& show_names,
15193 const Array& hide_names,
15194 const Library& owner) {
15195 ASSERT(show_names.IsNull() || (show_names.Length() > 0));
15196 ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
15197 const Namespace& result = Namespace::Handle(ptr: Namespace::New());
15198 result.untag()->set_target(target.ptr());
15199 result.untag()->set_show_names(show_names.ptr());
15200 result.untag()->set_hide_names(hide_names.ptr());
15201 result.untag()->set_owner(owner.ptr());
15202 return result.ptr();
15203}
15204
15205KernelProgramInfoPtr KernelProgramInfo::New() {
15206 return Object::Allocate<KernelProgramInfo>(space: Heap::kOld);
15207}
15208
15209KernelProgramInfoPtr KernelProgramInfo::New(
15210 const TypedDataBase& kernel_component,
15211 const TypedDataView& string_data,
15212 const TypedDataView& metadata_payloads,
15213 const TypedDataView& metadata_mappings,
15214 const TypedDataView& constants_table,
15215 const TypedData& string_offsets,
15216 const TypedData& canonical_names,
15217 const Array& scripts,
15218 const Array& libraries_cache,
15219 const Array& classes_cache) {
15220 ASSERT(kernel_component.IsExternalOrExternalView());
15221 ASSERT(string_data.IsExternalOrExternalView());
15222 ASSERT(metadata_payloads.IsExternalOrExternalView());
15223 ASSERT(metadata_mappings.IsExternalOrExternalView());
15224 ASSERT(constants_table.IsExternalOrExternalView());
15225
15226 const auto& info = KernelProgramInfo::Handle(ptr: KernelProgramInfo::New());
15227 info.untag()->set_kernel_component(kernel_component.ptr());
15228 info.untag()->set_string_offsets(string_offsets.ptr());
15229 info.untag()->set_string_data(string_data.ptr());
15230 info.untag()->set_canonical_names(canonical_names.ptr());
15231 info.untag()->set_metadata_payloads(metadata_payloads.ptr());
15232 info.untag()->set_metadata_mappings(metadata_mappings.ptr());
15233 info.untag()->set_scripts(scripts.ptr());
15234 info.untag()->set_constants_table(constants_table.ptr());
15235 info.untag()->set_libraries_cache(libraries_cache.ptr());
15236 info.untag()->set_classes_cache(classes_cache.ptr());
15237 return info.ptr();
15238}
15239
15240const char* KernelProgramInfo::ToCString() const {
15241 return "[KernelProgramInfo]";
15242}
15243
15244ScriptPtr KernelProgramInfo::ScriptAt(intptr_t index) const {
15245 const Array& all_scripts = Array::Handle(ptr: scripts());
15246 ObjectPtr script = all_scripts.At(index);
15247 return Script::RawCast(raw: script);
15248}
15249
15250void KernelProgramInfo::set_scripts(const Array& scripts) const {
15251 untag()->set_scripts(scripts.ptr());
15252}
15253
15254void KernelProgramInfo::set_constants(const Array& constants) const {
15255 untag()->set_constants(constants.ptr());
15256}
15257
15258intptr_t KernelProgramInfo::KernelLibraryStartOffset(
15259 intptr_t library_index) const {
15260 const auto& blob = TypedDataBase::Handle(ptr: kernel_component());
15261 const intptr_t library_count =
15262 Utils::BigEndianToHost32(be_value: *reinterpret_cast<uint32_t*>(
15263 blob.DataAddr(byte_offset: blob.LengthInBytes() - 2 * 4)));
15264 const intptr_t library_start =
15265 Utils::BigEndianToHost32(be_value: *reinterpret_cast<uint32_t*>(
15266 blob.DataAddr(byte_offset: blob.LengthInBytes() -
15267 (2 + 1 + (library_count - library_index)) * 4)));
15268 return library_start;
15269}
15270
15271TypedDataViewPtr KernelProgramInfo::KernelLibrary(
15272 intptr_t library_index) const {
15273 const intptr_t start_offset = KernelLibraryStartOffset(library_index);
15274 const intptr_t end_offset = KernelLibraryEndOffset(library_index);
15275 const auto& component = TypedDataBase::Handle(ptr: kernel_component());
15276 return component.ViewFromTo(start: start_offset, end: end_offset);
15277}
15278
15279intptr_t KernelProgramInfo::KernelLibraryEndOffset(
15280 intptr_t library_index) const {
15281 const auto& blob = TypedDataBase::Handle(ptr: kernel_component());
15282 const intptr_t library_count =
15283 Utils::BigEndianToHost32(be_value: *reinterpret_cast<uint32_t*>(
15284 blob.DataAddr(byte_offset: blob.LengthInBytes() - 2 * 4)));
15285 const intptr_t library_end =
15286 Utils::BigEndianToHost32(be_value: *reinterpret_cast<uint32_t*>(blob.DataAddr(
15287 byte_offset: blob.LengthInBytes() - (2 + (library_count - library_index)) * 4)));
15288 return library_end;
15289}
15290
15291void KernelProgramInfo::set_constants_table(const TypedDataView& value) const {
15292 untag()->set_constants_table(value.ptr());
15293}
15294
15295void KernelProgramInfo::set_libraries_cache(const Array& cache) const {
15296 untag()->set_libraries_cache(cache.ptr());
15297}
15298
15299LibraryPtr KernelProgramInfo::LookupLibrary(Thread* thread,
15300 const Smi& name_index) const {
15301 REUSABLE_ARRAY_HANDLESCOPE(thread);
15302 REUSABLE_LIBRARY_HANDLESCOPE(thread);
15303 REUSABLE_OBJECT_HANDLESCOPE(thread);
15304 REUSABLE_SMI_HANDLESCOPE(thread);
15305 Array& data = thread->ArrayHandle();
15306 Library& result = thread->LibraryHandle();
15307 Object& key = thread->ObjectHandle();
15308 Smi& value = thread->SmiHandle();
15309 {
15310 SafepointMutexLocker ml(
15311 thread->isolate_group()->kernel_data_lib_cache_mutex());
15312 data = libraries_cache();
15313 ASSERT(!data.IsNull());
15314 IntHashMap table(&key, &value, &data);
15315 result ^= table.GetOrNull(key: name_index);
15316 table.Release();
15317 }
15318 return result.ptr();
15319}
15320
15321LibraryPtr KernelProgramInfo::InsertLibrary(Thread* thread,
15322 const Smi& name_index,
15323 const Library& lib) const {
15324 REUSABLE_ARRAY_HANDLESCOPE(thread);
15325 REUSABLE_LIBRARY_HANDLESCOPE(thread);
15326 REUSABLE_OBJECT_HANDLESCOPE(thread);
15327 REUSABLE_SMI_HANDLESCOPE(thread);
15328 Array& data = thread->ArrayHandle();
15329 Library& result = thread->LibraryHandle();
15330 Object& key = thread->ObjectHandle();
15331 Smi& value = thread->SmiHandle();
15332 {
15333 SafepointMutexLocker ml(
15334 thread->isolate_group()->kernel_data_lib_cache_mutex());
15335 data = libraries_cache();
15336 ASSERT(!data.IsNull());
15337 IntHashMap table(&key, &value, &data);
15338 result ^= table.InsertOrGetValue(key: name_index, value_if_absent: lib);
15339 set_libraries_cache(table.Release());
15340 }
15341 return result.ptr();
15342}
15343
15344void KernelProgramInfo::set_classes_cache(const Array& cache) const {
15345 untag()->set_classes_cache(cache.ptr());
15346}
15347
15348ClassPtr KernelProgramInfo::LookupClass(Thread* thread,
15349 const Smi& name_index) const {
15350 REUSABLE_ARRAY_HANDLESCOPE(thread);
15351 REUSABLE_CLASS_HANDLESCOPE(thread);
15352 REUSABLE_OBJECT_HANDLESCOPE(thread);
15353 REUSABLE_SMI_HANDLESCOPE(thread);
15354 Array& data = thread->ArrayHandle();
15355 Class& result = thread->ClassHandle();
15356 Object& key = thread->ObjectHandle();
15357 Smi& value = thread->SmiHandle();
15358 {
15359 SafepointMutexLocker ml(
15360 thread->isolate_group()->kernel_data_class_cache_mutex());
15361 data = classes_cache();
15362 ASSERT(!data.IsNull());
15363 IntHashMap table(&key, &value, &data);
15364 result ^= table.GetOrNull(key: name_index);
15365 table.Release();
15366 }
15367 return result.ptr();
15368}
15369
15370ClassPtr KernelProgramInfo::InsertClass(Thread* thread,
15371 const Smi& name_index,
15372 const Class& klass) const {
15373 REUSABLE_ARRAY_HANDLESCOPE(thread);
15374 REUSABLE_CLASS_HANDLESCOPE(thread);
15375 REUSABLE_OBJECT_HANDLESCOPE(thread);
15376 REUSABLE_SMI_HANDLESCOPE(thread);
15377 Array& data = thread->ArrayHandle();
15378 Class& result = thread->ClassHandle();
15379 Object& key = thread->ObjectHandle();
15380 Smi& value = thread->SmiHandle();
15381 {
15382 SafepointMutexLocker ml(
15383 thread->isolate_group()->kernel_data_class_cache_mutex());
15384 data = classes_cache();
15385 ASSERT(!data.IsNull());
15386 IntHashMap table(&key, &value, &data);
15387 result ^= table.InsertOrGetValue(key: name_index, value_if_absent: klass);
15388 set_classes_cache(table.Release());
15389 }
15390 return result.ptr();
15391}
15392
15393ErrorPtr Library::CompileAll(bool ignore_error /* = false */) {
15394 Thread* thread = Thread::Current();
15395 Zone* zone = thread->zone();
15396 Error& error = Error::Handle(zone);
15397 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
15398 ptr: IsolateGroup::Current()->object_store()->libraries());
15399 Library& lib = Library::Handle(zone);
15400 Class& cls = Class::Handle(zone);
15401 for (int i = 0; i < libs.Length(); i++) {
15402 lib ^= libs.At(index: i);
15403 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
15404 while (it.HasNext()) {
15405 cls = it.GetNextClass();
15406 error = cls.EnsureIsFinalized(thread);
15407 if (!error.IsNull()) {
15408 if (ignore_error) continue;
15409 return error.ptr();
15410 }
15411 error = Compiler::CompileAllFunctions(cls);
15412 if (!error.IsNull()) {
15413 if (ignore_error) continue;
15414 return error.ptr();
15415 }
15416 }
15417 }
15418
15419 Object& result = Object::Handle(zone);
15420 ClosureFunctionsCache::ForAllClosureFunctions(callback: [&](const Function& func) {
15421 if (!func.HasCode()) {
15422 result = Compiler::CompileFunction(thread, function: func);
15423 if (result.IsError()) {
15424 error = Error::Cast(obj: result).ptr();
15425 return false; // Stop iteration.
15426 }
15427 }
15428 return true; // Continue iteration.
15429 });
15430 return error.ptr();
15431}
15432
15433#if !defined(DART_PRECOMPILED_RUNTIME)
15434
15435ErrorPtr Library::FinalizeAllClasses() {
15436 Thread* thread = Thread::Current();
15437 ASSERT(thread->IsDartMutatorThread());
15438 Zone* zone = thread->zone();
15439 Error& error = Error::Handle(zone);
15440 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
15441 ptr: IsolateGroup::Current()->object_store()->libraries());
15442 Library& lib = Library::Handle(zone);
15443 Class& cls = Class::Handle(zone);
15444 for (int i = 0; i < libs.Length(); i++) {
15445 lib ^= libs.At(index: i);
15446 if (!lib.Loaded()) {
15447 String& uri = String::Handle(zone, ptr: lib.url());
15448 String& msg = String::Handle(
15449 zone,
15450 ptr: String::NewFormatted(format: "Library '%s' is not loaded. "
15451 "Did you forget to call Dart_FinalizeLoading?",
15452 uri.ToCString()));
15453 return ApiError::New(message: msg);
15454 }
15455 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
15456 while (it.HasNext()) {
15457 cls = it.GetNextClass();
15458 error = cls.EnsureIsFinalized(thread);
15459 if (!error.IsNull()) {
15460 return error.ptr();
15461 }
15462 }
15463 }
15464 return Error::null();
15465}
15466
15467#endif // !defined(DART_PRECOMPILED_RUNTIME)
15468
15469// Return Function::null() if function does not exist in libs.
15470FunctionPtr Library::GetFunction(const GrowableArray<Library*>& libs,
15471 const char* class_name,
15472 const char* function_name) {
15473 Thread* thread = Thread::Current();
15474 Zone* zone = thread->zone();
15475 Function& func = Function::Handle(zone);
15476 String& class_str = String::Handle(zone);
15477 String& func_str = String::Handle(zone);
15478 Class& cls = Class::Handle(zone);
15479 for (intptr_t l = 0; l < libs.length(); l++) {
15480 const Library& lib = *libs[l];
15481 if (strcmp(s1: class_name, s2: "::") == 0) {
15482 func_str = Symbols::New(thread, cstr: function_name);
15483 func = lib.LookupFunctionAllowPrivate(name: func_str);
15484 } else {
15485 class_str = String::New(cstr: class_name);
15486 cls = lib.LookupClassAllowPrivate(name: class_str);
15487 if (!cls.IsNull()) {
15488 if (cls.EnsureIsFinalized(thread) == Error::null()) {
15489 func_str = String::New(cstr: function_name);
15490 if (function_name[0] == '.') {
15491 func_str = String::Concat(str1: class_str, str2: func_str);
15492 }
15493 func = cls.LookupFunctionAllowPrivate(name: func_str);
15494 }
15495 }
15496 }
15497 if (!func.IsNull()) {
15498 return func.ptr();
15499 }
15500 }
15501 return Function::null();
15502}
15503
15504ObjectPtr Library::GetFunctionClosure(const String& name) const {
15505 Thread* thread = Thread::Current();
15506 Zone* zone = thread->zone();
15507 Function& func = Function::Handle(zone, ptr: LookupFunctionAllowPrivate(name));
15508 if (func.IsNull()) {
15509 // Check whether the function is reexported into the library.
15510 const Object& obj = Object::Handle(zone, ptr: LookupReExport(name));
15511 if (obj.IsFunction()) {
15512 func ^= obj.ptr();
15513 } else {
15514 // Check if there is a getter of 'name', in which case invoke it
15515 // and return the result.
15516 const String& getter_name = String::Handle(zone, ptr: Field::GetterName(field_name: name));
15517 func = LookupFunctionAllowPrivate(name: getter_name);
15518 if (func.IsNull()) {
15519 return Closure::null();
15520 }
15521 // Invoke the getter and return the result.
15522 return DartEntry::InvokeFunction(function: func, arguments: Object::empty_array());
15523 }
15524 }
15525 func = func.ImplicitClosureFunction();
15526 return func.ImplicitStaticClosure();
15527}
15528
15529#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
15530void Library::CheckFunctionFingerprints() {
15531 GrowableArray<Library*> all_libs;
15532 Function& func = Function::Handle();
15533 bool fingerprints_match = true;
15534
15535#define CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, kind) \
15536 func = GetFunction(all_libs, #class_name, #function_name); \
15537 if (func.IsNull()) { \
15538 fingerprints_match = false; \
15539 OS::PrintErr("Function not found %s.%s\n", #class_name, #function_name); \
15540 } else { \
15541 fingerprints_match = \
15542 func.CheckSourceFingerprint(fp, kind) && fingerprints_match; \
15543 }
15544
15545#define CHECK_FINGERPRINTS(class_name, function_name, dest, fp) \
15546 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, nullptr)
15547#define CHECK_FINGERPRINTS_ASM_INTRINSIC(class_name, function_name, dest, fp) \
15548 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "asm-intrinsic")
15549#define CHECK_FINGERPRINTS_GRAPH_INTRINSIC(class_name, function_name, dest, \
15550 fp) \
15551 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, \
15552 "graph-intrinsic")
15553#define CHECK_FINGERPRINTS_OTHER(class_name, function_name, dest, fp) \
15554 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "other")
15555
15556 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15557 CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15558 CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15559 GRAPH_CORE_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15560
15561 all_libs.Add(&Library::ZoneHandle(Library::AsyncLibrary()));
15562 all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
15563 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15564 all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
15565 all_libs.Add(&Library::ZoneHandle(Library::ConvertLibrary()));
15566 all_libs.Add(&Library::ZoneHandle(Library::InternalLibrary()));
15567 all_libs.Add(&Library::ZoneHandle(Library::IsolateLibrary()));
15568 all_libs.Add(&Library::ZoneHandle(Library::FfiLibrary()));
15569 all_libs.Add(&Library::ZoneHandle(Library::NativeWrappersLibrary()));
15570 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15571 INTERNAL_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15572 OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS_OTHER);
15573 POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
15574 GRAPH_TYPED_DATA_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15575
15576 all_libs.Clear();
15577 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15578 DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15579
15580#undef CHECK_FINGERPRINTS_INNER
15581#undef CHECK_FINGERPRINTS
15582#undef CHECK_FINGERPRINTS_ASM_INTRINSIC
15583#undef CHECK_FINGERPRINTS_GRAPH_INTRINSIC
15584#undef CHECK_FINGERPRINTS_OTHER
15585
15586#define CHECK_FACTORY_FINGERPRINTS(symbol, class_name, factory_name, cid, fp) \
15587 func = GetFunction(all_libs, #class_name, #factory_name); \
15588 if (func.IsNull()) { \
15589 fingerprints_match = false; \
15590 OS::PrintErr("Function not found %s.%s\n", #class_name, #factory_name); \
15591 } else { \
15592 fingerprints_match = \
15593 func.CheckSourceFingerprint(fp) && fingerprints_match; \
15594 }
15595
15596 all_libs.Clear();
15597 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15598 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15599 RECOGNIZED_LIST_FACTORY_LIST(CHECK_FACTORY_FINGERPRINTS);
15600
15601#undef CHECK_FACTORY_FINGERPRINTS
15602
15603 if (!fingerprints_match) {
15604 // Private names are mangled. Mangling depends on Library::private_key_.
15605 // If registering a new bootstrap library, add at the end.
15606 FATAL(
15607 "FP mismatch while recognizing methods. If the behavior of "
15608 "these functions has changed, then changes are also needed in "
15609 "the VM's compiler. Otherwise the fingerprint can simply be "
15610 "updated in recognized_methods_list.h\n");
15611 }
15612}
15613#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
15614
15615InstructionsPtr Instructions::New(intptr_t size, bool has_monomorphic_entry) {
15616 ASSERT(size >= 0);
15617 ASSERT(Object::instructions_class() != Class::null());
15618 if (size < 0 || size > kMaxElements) {
15619 // This should be caught before we reach here.
15620 FATAL("Fatal error in Instructions::New: invalid size %" Pd "\n", size);
15621 }
15622 Instructions& result = Instructions::Handle();
15623 {
15624 auto raw = Object::Allocate<Instructions>(space: Heap::kCode, elements: size);
15625 NoSafepointScope no_safepoint;
15626 result = raw;
15627 result.SetSize(size);
15628 // Set this within the NoSafepointScope as well since it is contained in
15629 // the same bitfield as the size.
15630 result.SetHasMonomorphicEntry(has_monomorphic_entry);
15631 }
15632 ASSERT(result.stats() == nullptr);
15633 return result.ptr();
15634}
15635
15636const char* Instructions::ToCString() const {
15637 return "Instructions";
15638}
15639
15640CodeStatistics* Instructions::stats() const {
15641#if defined(DART_PRECOMPILER)
15642 return reinterpret_cast<CodeStatistics*>(
15643 Thread::Current()->heap()->GetPeer(ptr()));
15644#else
15645 return nullptr;
15646#endif
15647}
15648
15649void Instructions::set_stats(CodeStatistics* stats) const {
15650#if defined(DART_PRECOMPILER)
15651 Thread::Current()->heap()->SetPeer(ptr(), stats);
15652#endif
15653}
15654
15655const char* InstructionsSection::ToCString() const {
15656 return "InstructionsSection";
15657}
15658
15659void InstructionsTable::set_length(intptr_t value) const {
15660 StoreNonPointer(addr: &untag()->length_, value);
15661}
15662
15663void InstructionsTable::set_start_pc(uword value) const {
15664 StoreNonPointer(addr: &untag()->start_pc_, value);
15665}
15666
15667void InstructionsTable::set_end_pc(uword value) const {
15668 StoreNonPointer(addr: &untag()->end_pc_, value);
15669}
15670
15671void InstructionsTable::set_code_objects(const Array& value) const {
15672 untag()->set_code_objects(value.ptr());
15673}
15674
15675void InstructionsTable::set_rodata(uword value) const {
15676 StoreNonPointer(
15677 addr: &untag()->rodata_,
15678 value: reinterpret_cast<const UntaggedInstructionsTable::Data*>(value));
15679}
15680
15681InstructionsTablePtr InstructionsTable::New(intptr_t length,
15682 uword start_pc,
15683 uword end_pc,
15684 uword rodata) {
15685 ASSERT(Object::instructions_table_class() != Class::null());
15686 ASSERT(length >= 0);
15687 ASSERT(start_pc <= end_pc);
15688 auto* const zone = Thread::Current()->zone();
15689 const Array& code_objects =
15690 (length == 0) ? Object::empty_array()
15691 : Array::Handle(zone, ptr: Array::New(len: length, space: Heap::kOld));
15692 const auto& result = InstructionsTable::Handle(
15693 zone, ptr: Object::Allocate<InstructionsTable>(space: Heap::kOld));
15694 result.set_code_objects(code_objects);
15695 result.set_length(length);
15696 result.set_start_pc(start_pc);
15697 result.set_end_pc(end_pc);
15698 result.set_rodata(rodata);
15699 return result.ptr();
15700}
15701
15702void InstructionsTable::SetCodeAt(intptr_t index, CodePtr code) const {
15703 ASSERT((0 <= index) &&
15704 (index < Smi::Value(code_objects()->untag()->length())));
15705 code_objects()->untag()->set_element(index, value: code);
15706}
15707
15708bool InstructionsTable::ContainsPc(InstructionsTablePtr table, uword pc) {
15709 return (InstructionsTable::start_pc(table) <= pc) &&
15710 (pc < InstructionsTable::end_pc(table));
15711}
15712
15713uint32_t InstructionsTable::ConvertPcToOffset(InstructionsTablePtr table,
15714 uword pc) {
15715 ASSERT(InstructionsTable::ContainsPc(table, pc));
15716 const uint32_t pc_offset =
15717 static_cast<uint32_t>(pc - InstructionsTable::start_pc(table));
15718 ASSERT(InstructionsTable::start_pc(table) + pc_offset == pc); // No overflow.
15719 return pc_offset;
15720}
15721
15722intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table,
15723 uword pc,
15724 intptr_t start_index /* = 0 */) {
15725 // This can run in the middle of GC and must not allocate handles.
15726 NoSafepointScope no_safepoint;
15727 if (!InstructionsTable::ContainsPc(table, pc)) return -1;
15728 const uint32_t pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15729
15730 const auto rodata = table.untag()->rodata_;
15731 const auto entries = rodata->entries();
15732 intptr_t lo = start_index;
15733 intptr_t hi = rodata->length - 1;
15734 while (lo <= hi) {
15735 intptr_t mid = (hi - lo + 1) / 2 + lo;
15736 ASSERT(mid >= lo);
15737 ASSERT(mid <= hi);
15738 if (pc_offset < entries[mid].pc_offset) {
15739 hi = mid - 1;
15740 } else if ((mid != hi) && (pc_offset >= entries[mid + 1].pc_offset)) {
15741 lo = mid + 1;
15742 } else {
15743 return mid;
15744 }
15745 }
15746 return -1;
15747}
15748
15749const UntaggedCompressedStackMaps::Payload*
15750InstructionsTable::GetCanonicalStackMap(InstructionsTablePtr table) {
15751 const auto rodata = table.untag()->rodata_;
15752 return rodata->canonical_stack_map_entries_offset != 0
15753 ? rodata->StackMapAt(offset: rodata->canonical_stack_map_entries_offset)
15754 : nullptr;
15755}
15756
15757const UntaggedCompressedStackMaps::Payload* InstructionsTable::FindStackMap(
15758 InstructionsTablePtr table,
15759 uword pc,
15760 uword* start_pc) {
15761 // This can run in the middle of GC and must not allocate handles.
15762 NoSafepointScope no_safepoint;
15763 const intptr_t idx = FindEntry(table, pc);
15764 if (idx != -1) {
15765 const auto rodata = table.untag()->rodata_;
15766 const auto entries = rodata->entries();
15767 *start_pc = InstructionsTable::start_pc(table) + entries[idx].pc_offset;
15768 return rodata->StackMapAt(offset: entries[idx].stack_map_offset);
15769 }
15770 return nullptr;
15771}
15772
15773CodePtr InstructionsTable::FindCode(InstructionsTablePtr table, uword pc) {
15774 // This can run in the middle of GC and must not allocate handles.
15775 NoSafepointScope no_safepoint;
15776 if (!InstructionsTable::ContainsPc(table, pc)) return Code::null();
15777
15778 const auto rodata = table.untag()->rodata_;
15779
15780 const auto pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15781
15782 if (pc_offset <= rodata->entries()[rodata->first_entry_with_code].pc_offset) {
15783 return StubCode::UnknownDartCode().ptr();
15784 }
15785
15786 const auto idx =
15787 FindEntry(table, pc, start_index: table.untag()->rodata_->first_entry_with_code);
15788 if (idx != -1) {
15789 const intptr_t code_index = idx - rodata->first_entry_with_code;
15790 ASSERT(code_index >= 0);
15791 ASSERT(code_index <
15792 Smi::Value(table.untag()->code_objects()->untag()->length()));
15793 ObjectPtr result =
15794 table.untag()->code_objects()->untag()->element(index: code_index);
15795 ASSERT(result->IsCode());
15796 // Note: can't use Code::RawCast(...) here because it allocates handles
15797 // in DEBUG mode.
15798 return static_cast<CodePtr>(result);
15799 }
15800
15801 return Code::null();
15802}
15803
15804uword InstructionsTable::EntryPointAt(intptr_t code_index) const {
15805 ASSERT(0 <= code_index);
15806 ASSERT(code_index < static_cast<intptr_t>(rodata()->length));
15807 return InstructionsTable::start_pc(table: this->ptr()) +
15808 rodata()->entries()[code_index].pc_offset;
15809}
15810
15811const char* InstructionsTable::ToCString() const {
15812 return "InstructionsTable";
15813}
15814
15815ObjectPoolPtr ObjectPool::New(intptr_t len) {
15816 ASSERT(Object::object_pool_class() != Class::null());
15817 if (len < 0 || len > kMaxElements) {
15818 // This should be caught before we reach here.
15819 FATAL("Fatal error in ObjectPool::New: invalid length %" Pd "\n", len);
15820 }
15821 // We only verify the entry bits in DEBUG, so only allocate a handle there.
15822 DEBUG_ONLY(auto& result = ObjectPool::Handle());
15823 auto raw = Object::Allocate<ObjectPool>(space: Heap::kOld, elements: len);
15824 NoSafepointScope no_safepoint;
15825 raw->untag()->length_ = len;
15826#if defined(DEBUG)
15827 result = raw;
15828 for (intptr_t i = 0; i < len; i++) {
15829 // Verify that InitializeObject() already set the payload as expected.
15830 ASSERT_EQUAL(result.PatchableAt(i), ObjectPool::Patchability::kPatchable);
15831 ASSERT_EQUAL(result.TypeAt(i), ObjectPool::EntryType::kImmediate);
15832 ASSERT_EQUAL(result.RawValueAt(i), 0);
15833 }
15834#endif
15835 return raw;
15836}
15837
15838#if !defined(DART_PRECOMPILED_RUNTIME)
15839ObjectPoolPtr ObjectPool::NewFromBuilder(
15840 const compiler::ObjectPoolBuilder& builder) {
15841 const intptr_t len = builder.CurrentLength();
15842 if (len == 0) {
15843 return Object::empty_object_pool().ptr();
15844 }
15845 const ObjectPool& result = ObjectPool::Handle(ptr: ObjectPool::New(len));
15846 for (intptr_t i = 0; i < len; i++) {
15847 auto entry = builder.EntryAt(i);
15848 auto type = entry.type();
15849 auto patchable = entry.patchable();
15850 result.SetTypeAt(index: i, type, patchable);
15851 if (type == EntryType::kTaggedObject) {
15852 result.SetObjectAt(index: i, obj: *entry.obj_);
15853 } else {
15854#if defined(TARGET_ARCH_IS_32_BIT)
15855 ASSERT(type != EntryType::kImmediate64);
15856#endif
15857 ASSERT(type != EntryType::kImmediate128);
15858 result.SetRawValueAt(index: i, raw_value: entry.imm_);
15859 }
15860 }
15861 return result.ptr();
15862}
15863
15864void ObjectPool::CopyInto(compiler::ObjectPoolBuilder* builder) const {
15865 ASSERT(builder->CurrentLength() == 0);
15866 for (intptr_t i = 0; i < Length(); i++) {
15867 auto type = TypeAt(index: i);
15868 auto patchable = PatchableAt(index: i);
15869 switch (type) {
15870 case compiler::ObjectPoolBuilderEntry::kTaggedObject: {
15871 compiler::ObjectPoolBuilderEntry entry(&Object::ZoneHandle(ptr: ObjectAt(index: i)),
15872 patchable);
15873 builder->AddObject(entry);
15874 break;
15875 }
15876 case compiler::ObjectPoolBuilderEntry::kImmediate:
15877 case compiler::ObjectPoolBuilderEntry::kNativeFunction: {
15878 compiler::ObjectPoolBuilderEntry entry(RawValueAt(index: i), type, patchable);
15879 builder->AddObject(entry);
15880 break;
15881 }
15882 default:
15883 UNREACHABLE();
15884 }
15885 }
15886 ASSERT(builder->CurrentLength() == Length());
15887}
15888#endif
15889
15890const char* ObjectPool::ToCString() const {
15891 Zone* zone = Thread::Current()->zone();
15892 return zone->PrintToString(format: "ObjectPool len:%" Pd, Length());
15893}
15894
15895void ObjectPool::DebugPrint() const {
15896 THR_Print("ObjectPool len:%" Pd " {\n", Length());
15897 for (intptr_t i = 0; i < Length(); i++) {
15898#if defined(DART_PRECOMPILED_RUNTIME)
15899 intptr_t offset = ObjectPool::element_offset(i);
15900#else
15901 intptr_t offset = compiler::target::ObjectPool::element_offset(index: i);
15902#endif
15903#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
15904 THR_Print(" %" Pd "(pp) ", offset); // PP is untagged
15905#elif defined(TARGET_ARCH_ARM64)
15906 THR_Print(" [pp, #%" Pd "] ", offset); // PP is untagged
15907#elif defined(TARGET_ARCH_ARM32)
15908 THR_Print(" [pp, #%" Pd "] ", offset - kHeapObjectTag); // PP is tagged
15909#else
15910 THR_Print(" [pp+0x%" Px "] ", offset - kHeapObjectTag); // PP is tagged
15911#endif
15912 if (TypeAt(index: i) == EntryType::kTaggedObject) {
15913 const Object& obj = Object::Handle(ptr: ObjectAt(index: i));
15914 THR_Print("%s (obj)\n", obj.ToCString());
15915 } else if (TypeAt(index: i) == EntryType::kNativeFunction) {
15916 uword pc = RawValueAt(index: i);
15917 uintptr_t start = 0;
15918 char* name = NativeSymbolResolver::LookupSymbolName(pc, start: &start);
15919 char* dso_name;
15920 uword dso_base;
15921 if (name != nullptr) {
15922 THR_Print("%s (native function)\n", name);
15923 NativeSymbolResolver::FreeSymbolName(name);
15924 } else if (NativeSymbolResolver::LookupSharedObject(pc, dso_base: &dso_base,
15925 dso_name: &dso_name)) {
15926 uword dso_offset = pc - dso_base;
15927 THR_Print("%s+0x%" Px " (native function)\n", dso_name, dso_offset);
15928 NativeSymbolResolver::FreeSymbolName(name: dso_name);
15929 } else {
15930 THR_Print("0x%" Px " (native function)\n", pc);
15931 }
15932 } else {
15933 THR_Print("0x%" Px " (raw)\n", RawValueAt(i));
15934 }
15935 }
15936 THR_Print("}\n");
15937}
15938
15939intptr_t PcDescriptors::Length() const {
15940 return untag()->length_;
15941}
15942
15943void PcDescriptors::SetLength(intptr_t value) const {
15944 StoreNonPointer(addr: &untag()->length_, value);
15945}
15946
15947void PcDescriptors::CopyData(const void* bytes, intptr_t size) {
15948 NoSafepointScope no_safepoint;
15949 uint8_t* data = UnsafeMutableNonPointer(addr: &untag()->data()[0]);
15950 // We're guaranteed these memory spaces do not overlap.
15951 memcpy(dest: data, src: bytes, n: size); // NOLINT
15952}
15953
15954PcDescriptorsPtr PcDescriptors::New(const void* delta_encoded_data,
15955 intptr_t size) {
15956 ASSERT(Object::pc_descriptors_class() != Class::null());
15957 Thread* thread = Thread::Current();
15958 PcDescriptors& result = PcDescriptors::Handle(zone: thread->zone());
15959 {
15960 auto raw = Object::Allocate<PcDescriptors>(space: Heap::kOld, elements: size);
15961 NoSafepointScope no_safepoint;
15962 result = raw;
15963 result.SetLength(size);
15964 }
15965 result.CopyData(bytes: delta_encoded_data, size);
15966 return result.ptr();
15967}
15968
15969PcDescriptorsPtr PcDescriptors::New(intptr_t length) {
15970 ASSERT(Object::pc_descriptors_class() != Class::null());
15971 Thread* thread = Thread::Current();
15972 PcDescriptors& result = PcDescriptors::Handle(zone: thread->zone());
15973 {
15974 auto raw = Object::Allocate<PcDescriptors>(space: Heap::kOld, elements: length);
15975 NoSafepointScope no_safepoint;
15976 result = raw;
15977 result.SetLength(length);
15978 }
15979 return result.ptr();
15980}
15981
15982const char* PcDescriptors::KindAsStr(UntaggedPcDescriptors::Kind kind) {
15983 switch (kind) {
15984 case UntaggedPcDescriptors::kDeopt:
15985 return "deopt ";
15986 case UntaggedPcDescriptors::kIcCall:
15987 return "ic-call ";
15988 case UntaggedPcDescriptors::kUnoptStaticCall:
15989 return "unopt-call ";
15990 case UntaggedPcDescriptors::kRuntimeCall:
15991 return "runtime-call ";
15992 case UntaggedPcDescriptors::kOsrEntry:
15993 return "osr-entry ";
15994 case UntaggedPcDescriptors::kRewind:
15995 return "rewind ";
15996 case UntaggedPcDescriptors::kBSSRelocation:
15997 return "bss reloc ";
15998 case UntaggedPcDescriptors::kOther:
15999 return "other ";
16000 case UntaggedPcDescriptors::kAnyKind:
16001 UNREACHABLE();
16002 break;
16003 }
16004 UNREACHABLE();
16005 return "";
16006}
16007
16008void PcDescriptors::PrintHeaderString() {
16009 // 4 bits per hex digit + 2 for "0x".
16010 const int addr_width = (kBitsPerWord / 4) + 2;
16011 // "*" in a printf format specifier tells it to read the field width from
16012 // the printf argument list.
16013 THR_Print("%-*s\tkind \tdeopt-id\ttok-ix\ttry-ix\tyield-idx\n", addr_width,
16014 "pc");
16015}
16016
16017const char* PcDescriptors::ToCString() const {
16018// "*" in a printf format specifier tells it to read the field width from
16019// the printf argument list.
16020#define FORMAT "%#-*" Px "\t%s\t%" Pd "\t\t%s\t%" Pd "\t%" Pd "\n"
16021 if (Length() == 0) {
16022 return "empty PcDescriptors\n";
16023 }
16024 // 4 bits per hex digit.
16025 const int addr_width = kBitsPerWord / 4;
16026 // First compute the buffer size required.
16027 intptr_t len = 1; // Trailing '\0'.
16028 {
16029 Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
16030 while (iter.MoveNext()) {
16031 len += Utils::SNPrint(str: nullptr, size: 0, FORMAT, addr_width, iter.PcOffset(),
16032 KindAsStr(kind: iter.Kind()), iter.DeoptId(),
16033 iter.TokenPos().ToCString(), iter.TryIndex(),
16034 iter.YieldIndex());
16035 }
16036 }
16037 // Allocate the buffer.
16038 char* buffer = Thread::Current()->zone()->Alloc<char>(len);
16039 // Layout the fields in the buffer.
16040 intptr_t index = 0;
16041 Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
16042 while (iter.MoveNext()) {
16043 index += Utils::SNPrint(str: (buffer + index), size: (len - index), FORMAT, addr_width,
16044 iter.PcOffset(), KindAsStr(kind: iter.Kind()),
16045 iter.DeoptId(), iter.TokenPos().ToCString(),
16046 iter.TryIndex(), iter.YieldIndex());
16047 }
16048 return buffer;
16049#undef FORMAT
16050}
16051
16052// Verify assumptions (in debug mode only).
16053// - No two deopt descriptors have the same deoptimization id.
16054// - No two ic-call descriptors have the same deoptimization id (type feedback).
16055// A function without unique ids is marked as non-optimizable (e.g., because of
16056// finally blocks).
16057void PcDescriptors::Verify(const Function& function) const {
16058#if defined(DEBUG)
16059 // Only check ids for unoptimized code that is optimizable.
16060 if (!function.IsOptimizable()) {
16061 return;
16062 }
16063 intptr_t max_deopt_id = 0;
16064 Iterator max_iter(
16065 *this, UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
16066 while (max_iter.MoveNext()) {
16067 if (max_iter.DeoptId() > max_deopt_id) {
16068 max_deopt_id = max_iter.DeoptId();
16069 }
16070 }
16071
16072 Zone* zone = Thread::Current()->zone();
16073 BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
16074 BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
16075 Iterator iter(*this,
16076 UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
16077 while (iter.MoveNext()) {
16078 // 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
16079 if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
16080 // TODO(vegorov): some instructions contain multiple calls and have
16081 // multiple "after" targets recorded. Right now it is benign but might
16082 // lead to issues in the future. Fix that and enable verification.
16083 continue;
16084 }
16085 if (iter.Kind() == UntaggedPcDescriptors::kDeopt) {
16086 ASSERT(!deopt_ids->Contains(iter.DeoptId()));
16087 deopt_ids->Add(iter.DeoptId());
16088 } else {
16089 ASSERT(!iccall_ids->Contains(iter.DeoptId()));
16090 iccall_ids->Add(iter.DeoptId());
16091 }
16092 }
16093#endif // DEBUG
16094}
16095
16096void CodeSourceMap::SetLength(intptr_t value) const {
16097 StoreNonPointer(addr: &untag()->length_, value);
16098}
16099
16100CodeSourceMapPtr CodeSourceMap::New(intptr_t length) {
16101 ASSERT(Object::code_source_map_class() != Class::null());
16102 Thread* thread = Thread::Current();
16103 CodeSourceMap& result = CodeSourceMap::Handle(zone: thread->zone());
16104 {
16105 auto raw = Object::Allocate<CodeSourceMap>(space: Heap::kOld, elements: length);
16106 NoSafepointScope no_safepoint;
16107 result = raw;
16108 result.SetLength(length);
16109 }
16110 return result.ptr();
16111}
16112
16113const char* CodeSourceMap::ToCString() const {
16114 return "CodeSourceMap";
16115}
16116
16117uword CompressedStackMaps::Hash() const {
16118 NoSafepointScope scope;
16119 uint8_t* data = UnsafeMutableNonPointer(addr: &untag()->payload()->data()[0]);
16120 uint8_t* end = data + payload_size();
16121 uint32_t hash = payload_size();
16122 for (uint8_t* cursor = data; cursor < end; cursor++) {
16123 hash = CombineHashes(hash, other_hash: *cursor);
16124 }
16125 return FinalizeHash(hash, hashbits: kHashBits);
16126}
16127
16128void CompressedStackMaps::WriteToBuffer(BaseTextBuffer* buffer,
16129 const char* separator) const {
16130 auto it = iterator(thread: Thread::Current());
16131 bool first_entry = true;
16132 while (it.MoveNext()) {
16133 if (!first_entry) {
16134 buffer->AddString(s: separator);
16135 }
16136 buffer->Printf(format: "0x%.8" Px32 ": ", it.pc_offset());
16137 for (intptr_t i = 0, n = it.Length(); i < n; i++) {
16138 buffer->AddString(s: it.IsObject(bit_index: i) ? "1" : "0");
16139 }
16140 first_entry = false;
16141 }
16142}
16143
16144CompressedStackMaps::Iterator<CompressedStackMaps>
16145CompressedStackMaps::iterator(Thread* thread) const {
16146 return Iterator<CompressedStackMaps>(
16147 *this, CompressedStackMaps::Handle(
16148 zone: thread->zone(), ptr: thread->isolate_group()
16149 ->object_store()
16150 ->canonicalized_stack_map_entries()));
16151}
16152
16153CompressedStackMapsPtr CompressedStackMaps::New(const void* payload,
16154 intptr_t size,
16155 bool is_global_table,
16156 bool uses_global_table) {
16157 ASSERT(Object::compressed_stackmaps_class() != Class::null());
16158 // We don't currently allow both flags to be true.
16159 ASSERT(!is_global_table || !uses_global_table);
16160 // The canonical empty instance should be used instead.
16161 ASSERT(size != 0);
16162
16163 if (!UntaggedCompressedStackMaps::SizeField::is_valid(value: size)) {
16164 FATAL(
16165 "Fatal error in CompressedStackMaps::New: "
16166 "invalid payload size %" Pu "\n",
16167 size);
16168 }
16169
16170 auto& result = CompressedStackMaps::Handle();
16171 {
16172 // CompressedStackMaps data objects are associated with a code object,
16173 // allocate them in old generation.
16174 auto raw = Object::Allocate<CompressedStackMaps>(space: Heap::kOld, elements: size);
16175 NoSafepointScope no_safepoint;
16176 result = raw;
16177 result.untag()->payload()->set_flags_and_size(
16178 UntaggedCompressedStackMaps::GlobalTableBit::encode(value: is_global_table) |
16179 UntaggedCompressedStackMaps::UsesTableBit::encode(value: uses_global_table) |
16180 UntaggedCompressedStackMaps::SizeField::encode(value: size));
16181 // Perform the copy under the NoSafepointScope since it uses a raw pointer
16182 // to the payload, and so the object should not move during the copy.
16183 auto cursor =
16184 result.UnsafeMutableNonPointer(addr: result.untag()->payload()->data());
16185 memcpy(dest: cursor, src: payload, n: size); // NOLINT
16186 }
16187
16188 ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
16189
16190 return result.ptr();
16191}
16192
16193const char* CompressedStackMaps::ToCString() const {
16194 ASSERT(!IsGlobalTable());
16195 if (payload_size() == 0) {
16196 return "CompressedStackMaps()";
16197 }
16198 auto const t = Thread::Current();
16199 ZoneTextBuffer buffer(t->zone(), 100);
16200 buffer.AddString(s: "CompressedStackMaps(");
16201 WriteToBuffer(buffer: &buffer, separator: ", ");
16202 buffer.AddString(s: ")");
16203 return buffer.buffer();
16204}
16205
16206StringPtr LocalVarDescriptors::GetName(intptr_t var_index) const {
16207 ASSERT(var_index < Length());
16208 ASSERT(Object::Handle(ptr()->untag()->name(var_index)).IsString());
16209 return ptr()->untag()->name(index: var_index);
16210}
16211
16212void LocalVarDescriptors::SetVar(
16213 intptr_t var_index,
16214 const String& name,
16215 UntaggedLocalVarDescriptors::VarInfo* info) const {
16216 ASSERT(var_index < Length());
16217 ASSERT(!name.IsNull());
16218 ptr()->untag()->set_name(i: var_index, value: name.ptr());
16219 ptr()->untag()->data()[var_index] = *info;
16220}
16221
16222void LocalVarDescriptors::GetInfo(
16223 intptr_t var_index,
16224 UntaggedLocalVarDescriptors::VarInfo* info) const {
16225 ASSERT(var_index < Length());
16226 *info = ptr()->untag()->data()[var_index];
16227}
16228
16229static int PrintVarInfo(char* buffer,
16230 int len,
16231 intptr_t i,
16232 const String& var_name,
16233 const UntaggedLocalVarDescriptors::VarInfo& info) {
16234 const UntaggedLocalVarDescriptors::VarInfoKind kind = info.kind();
16235 const int32_t index = info.index();
16236 if (kind == UntaggedLocalVarDescriptors::kContextLevel) {
16237 return Utils::SNPrint(str: buffer, size: len,
16238 format: "%2" Pd
16239 " %-13s level=%-3d"
16240 " begin=%-3d end=%d\n",
16241 i, LocalVarDescriptors::KindToCString(kind), index,
16242 static_cast<int>(info.begin_pos.Pos()),
16243 static_cast<int>(info.end_pos.Pos()));
16244 } else if (kind == UntaggedLocalVarDescriptors::kContextVar) {
16245 return Utils::SNPrint(
16246 str: buffer, size: len,
16247 format: "%2" Pd
16248 " %-13s level=%-3d index=%-3d"
16249 " begin=%-3d end=%-3d name=%s\n",
16250 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16251 static_cast<int>(info.begin_pos.Pos()),
16252 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16253 } else {
16254 return Utils::SNPrint(
16255 str: buffer, size: len,
16256 format: "%2" Pd
16257 " %-13s scope=%-3d index=%-3d"
16258 " begin=%-3d end=%-3d name=%s\n",
16259 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16260 static_cast<int>(info.begin_pos.Pos()),
16261 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16262 }
16263}
16264
16265const char* LocalVarDescriptors::ToCString() const {
16266 if (IsNull()) {
16267 return "LocalVarDescriptors: null";
16268 }
16269 if (Length() == 0) {
16270 return "empty LocalVarDescriptors";
16271 }
16272 intptr_t len = 1; // Trailing '\0'.
16273 String& var_name = String::Handle();
16274 for (intptr_t i = 0; i < Length(); i++) {
16275 UntaggedLocalVarDescriptors::VarInfo info;
16276 var_name = GetName(var_index: i);
16277 GetInfo(var_index: i, info: &info);
16278 len += PrintVarInfo(buffer: nullptr, len: 0, i, var_name, info);
16279 }
16280 char* buffer = Thread::Current()->zone()->Alloc<char>(len: len + 1);
16281 buffer[0] = '\0';
16282 intptr_t num_chars = 0;
16283 for (intptr_t i = 0; i < Length(); i++) {
16284 UntaggedLocalVarDescriptors::VarInfo info;
16285 var_name = GetName(var_index: i);
16286 GetInfo(var_index: i, info: &info);
16287 num_chars += PrintVarInfo(buffer: (buffer + num_chars), len: (len - num_chars), i,
16288 var_name, info);
16289 }
16290 return buffer;
16291}
16292
16293const char* LocalVarDescriptors::KindToCString(
16294 UntaggedLocalVarDescriptors::VarInfoKind kind) {
16295 switch (kind) {
16296 case UntaggedLocalVarDescriptors::kStackVar:
16297 return "StackVar";
16298 case UntaggedLocalVarDescriptors::kContextVar:
16299 return "ContextVar";
16300 case UntaggedLocalVarDescriptors::kContextLevel:
16301 return "ContextLevel";
16302 case UntaggedLocalVarDescriptors::kSavedCurrentContext:
16303 return "CurrentCtx";
16304 default:
16305 UNIMPLEMENTED();
16306 return nullptr;
16307 }
16308}
16309
16310LocalVarDescriptorsPtr LocalVarDescriptors::New(intptr_t num_variables) {
16311 ASSERT(Object::var_descriptors_class() != Class::null());
16312 if (num_variables < 0 || num_variables > kMaxElements) {
16313 // This should be caught before we reach here.
16314 FATAL(
16315 "Fatal error in LocalVarDescriptors::New: "
16316 "invalid num_variables %" Pd ". Maximum is: %d\n",
16317 num_variables, UntaggedLocalVarDescriptors::kMaxIndex);
16318 }
16319 auto raw = Object::Allocate<LocalVarDescriptors>(space: Heap::kOld, elements: num_variables);
16320 NoSafepointScope no_safepoint;
16321 raw->untag()->num_entries_ = num_variables;
16322 return raw;
16323}
16324
16325intptr_t LocalVarDescriptors::Length() const {
16326 return untag()->num_entries_;
16327}
16328
16329intptr_t ExceptionHandlers::num_entries() const {
16330 return untag()->num_entries();
16331}
16332
16333bool ExceptionHandlers::has_async_handler() const {
16334 return UntaggedExceptionHandlers::AsyncHandlerBit::decode(
16335 value: untag()->packed_fields_);
16336}
16337
16338void ExceptionHandlers::set_has_async_handler(bool value) const {
16339 StoreNonPointer(addr: &untag()->packed_fields_,
16340 value: UntaggedExceptionHandlers::AsyncHandlerBit::update(
16341 value, original: untag()->packed_fields_));
16342}
16343
16344void ExceptionHandlers::SetHandlerInfo(intptr_t try_index,
16345 intptr_t outer_try_index,
16346 uword handler_pc_offset,
16347 bool needs_stacktrace,
16348 bool has_catch_all,
16349 bool is_generated) const {
16350 ASSERT((try_index >= 0) && (try_index < num_entries()));
16351 NoSafepointScope no_safepoint;
16352 ExceptionHandlerInfo* info =
16353 UnsafeMutableNonPointer(addr: &untag()->data()[try_index]);
16354 info->outer_try_index = outer_try_index;
16355 // Some C compilers warn about the comparison always being true when using <=
16356 // due to limited range of data type.
16357 ASSERT((handler_pc_offset == static_cast<uword>(kMaxUint32)) ||
16358 (handler_pc_offset < static_cast<uword>(kMaxUint32)));
16359 info->handler_pc_offset = handler_pc_offset;
16360 info->needs_stacktrace = static_cast<int8_t>(needs_stacktrace);
16361 info->has_catch_all = static_cast<int8_t>(has_catch_all);
16362 info->is_generated = static_cast<int8_t>(is_generated);
16363}
16364
16365void ExceptionHandlers::GetHandlerInfo(intptr_t try_index,
16366 ExceptionHandlerInfo* info) const {
16367 ASSERT((try_index >= 0) && (try_index < num_entries()));
16368 ASSERT(info != nullptr);
16369 *info = untag()->data()[try_index];
16370}
16371
16372uword ExceptionHandlers::HandlerPCOffset(intptr_t try_index) const {
16373 ASSERT((try_index >= 0) && (try_index < num_entries()));
16374 return untag()->data()[try_index].handler_pc_offset;
16375}
16376
16377intptr_t ExceptionHandlers::OuterTryIndex(intptr_t try_index) const {
16378 ASSERT((try_index >= 0) && (try_index < num_entries()));
16379 return untag()->data()[try_index].outer_try_index;
16380}
16381
16382bool ExceptionHandlers::NeedsStackTrace(intptr_t try_index) const {
16383 ASSERT((try_index >= 0) && (try_index < num_entries()));
16384 return untag()->data()[try_index].needs_stacktrace != 0;
16385}
16386
16387bool ExceptionHandlers::IsGenerated(intptr_t try_index) const {
16388 ASSERT((try_index >= 0) && (try_index < num_entries()));
16389 return untag()->data()[try_index].is_generated != 0;
16390}
16391
16392bool ExceptionHandlers::HasCatchAll(intptr_t try_index) const {
16393 ASSERT((try_index >= 0) && (try_index < num_entries()));
16394 return untag()->data()[try_index].has_catch_all != 0;
16395}
16396
16397void ExceptionHandlers::SetHandledTypes(intptr_t try_index,
16398 const Array& handled_types) const {
16399 ASSERT((try_index >= 0) && (try_index < num_entries()));
16400 ASSERT(!handled_types.IsNull());
16401 const Array& handled_types_data =
16402 Array::Handle(ptr: untag()->handled_types_data());
16403 handled_types_data.SetAt(index: try_index, value: handled_types);
16404}
16405
16406ArrayPtr ExceptionHandlers::GetHandledTypes(intptr_t try_index) const {
16407 ASSERT((try_index >= 0) && (try_index < num_entries()));
16408 Array& array = Array::Handle(ptr: untag()->handled_types_data());
16409 array ^= array.At(index: try_index);
16410 return array.ptr();
16411}
16412
16413void ExceptionHandlers::set_handled_types_data(const Array& value) const {
16414 untag()->set_handled_types_data(value.ptr());
16415}
16416
16417ExceptionHandlersPtr ExceptionHandlers::New(intptr_t num_handlers) {
16418 ASSERT(Object::exception_handlers_class() != Class::null());
16419 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16420 FATAL(
16421 "Fatal error in ExceptionHandlers::New(): "
16422 "invalid num_handlers %" Pd "\n",
16423 num_handlers);
16424 }
16425 const Array& handled_types_data =
16426 (num_handlers == 0) ? Object::empty_array()
16427 : Array::Handle(ptr: Array::New(len: num_handlers, space: Heap::kOld));
16428 return ExceptionHandlers::New(handled_types_data);
16429}
16430
16431ExceptionHandlersPtr ExceptionHandlers::New(const Array& handled_types_data) {
16432 ASSERT(Object::exception_handlers_class() != Class::null());
16433 const intptr_t num_handlers = handled_types_data.Length();
16434 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16435 FATAL(
16436 "Fatal error in ExceptionHandlers::New(): "
16437 "invalid num_handlers %" Pd "\n",
16438 num_handlers);
16439 }
16440 ExceptionHandlers& result = ExceptionHandlers::Handle();
16441 {
16442 auto raw = Object::Allocate<ExceptionHandlers>(space: Heap::kOld, elements: num_handlers);
16443 NoSafepointScope no_safepoint;
16444 result = raw;
16445 result.untag()->packed_fields_ =
16446 UntaggedExceptionHandlers::NumEntriesBits::encode(value: num_handlers);
16447 }
16448 result.set_handled_types_data(handled_types_data);
16449 return result.ptr();
16450}
16451
16452const char* ExceptionHandlers::ToCString() const {
16453#define FORMAT1 "%" Pd " => %#x (%" Pd " types) (outer %d)%s%s\n"
16454#define FORMAT2 " %d. %s\n"
16455#define FORMAT3 "<async handler>\n"
16456 if (num_entries() == 0) {
16457 return has_async_handler()
16458 ? "empty ExceptionHandlers (with <async handler>)\n"
16459 : "empty ExceptionHandlers\n";
16460 }
16461 auto& handled_types = Array::Handle();
16462 auto& type = AbstractType::Handle();
16463 ExceptionHandlerInfo info;
16464 // First compute the buffer size required.
16465 intptr_t len = 1; // Trailing '\0'.
16466 for (intptr_t i = 0; i < num_entries(); i++) {
16467 GetHandlerInfo(try_index: i, info: &info);
16468 handled_types = GetHandledTypes(try_index: i);
16469 const intptr_t num_types =
16470 handled_types.IsNull() ? 0 : handled_types.Length();
16471 len += Utils::SNPrint(
16472 str: nullptr, size: 0, FORMAT1, i, info.handler_pc_offset, num_types,
16473 info.outer_try_index,
16474 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
16475 ((info.is_generated != 0) ? " (generated)" : ""));
16476 for (int k = 0; k < num_types; k++) {
16477 type ^= handled_types.At(index: k);
16478 ASSERT(!type.IsNull());
16479 len += Utils::SNPrint(str: nullptr, size: 0, FORMAT2, k, type.ToCString());
16480 }
16481 }
16482 if (has_async_handler()) {
16483 len += Utils::SNPrint(str: nullptr, size: 0, FORMAT3);
16484 }
16485 // Allocate the buffer.
16486 char* buffer = Thread::Current()->zone()->Alloc<char>(len);
16487 // Layout the fields in the buffer.
16488 intptr_t num_chars = 0;
16489 for (intptr_t i = 0; i < num_entries(); i++) {
16490 GetHandlerInfo(try_index: i, info: &info);
16491 handled_types = GetHandledTypes(try_index: i);
16492 const intptr_t num_types =
16493 handled_types.IsNull() ? 0 : handled_types.Length();
16494 num_chars += Utils::SNPrint(
16495 str: (buffer + num_chars), size: (len - num_chars), FORMAT1, i,
16496 info.handler_pc_offset, num_types, info.outer_try_index,
16497 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
16498 ((info.is_generated != 0) ? " (generated)" : ""));
16499 for (int k = 0; k < num_types; k++) {
16500 type ^= handled_types.At(index: k);
16501 num_chars += Utils::SNPrint(str: (buffer + num_chars), size: (len - num_chars),
16502 FORMAT2, k, type.ToCString());
16503 }
16504 }
16505 if (has_async_handler()) {
16506 num_chars +=
16507 Utils::SNPrint(str: (buffer + num_chars), size: (len - num_chars), FORMAT3);
16508 }
16509 return buffer;
16510#undef FORMAT1
16511#undef FORMAT2
16512#undef FORMAT3
16513}
16514
16515void SingleTargetCache::set_target(const Code& value) const {
16516 untag()->set_target(value.ptr());
16517}
16518
16519const char* SingleTargetCache::ToCString() const {
16520 return "SingleTargetCache";
16521}
16522
16523SingleTargetCachePtr SingleTargetCache::New() {
16524 return Object::Allocate<SingleTargetCache>(space: Heap::kOld);
16525}
16526
16527void UnlinkedCall::set_can_patch_to_monomorphic(bool value) const {
16528 StoreNonPointer(addr: &untag()->can_patch_to_monomorphic_, value);
16529}
16530
16531uword UnlinkedCall::Hash() const {
16532 return String::Handle(ptr: target_name()).Hash();
16533}
16534
16535bool UnlinkedCall::Equals(const UnlinkedCall& other) const {
16536 return (target_name() == other.target_name()) &&
16537 (arguments_descriptor() == other.arguments_descriptor()) &&
16538 (can_patch_to_monomorphic() == other.can_patch_to_monomorphic());
16539}
16540
16541const char* UnlinkedCall::ToCString() const {
16542 return "UnlinkedCall";
16543}
16544
16545UnlinkedCallPtr UnlinkedCall::New() {
16546 const auto& result =
16547 UnlinkedCall::Handle(ptr: Object::Allocate<UnlinkedCall>(space: Heap::kOld));
16548 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode);
16549 return result.ptr();
16550}
16551
16552MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
16553 const Code& target) {
16554 const auto& result = MonomorphicSmiableCall::Handle(
16555 ptr: Object::Allocate<MonomorphicSmiableCall>(space: Heap::kOld));
16556 result.StoreNonPointer(addr: &result.untag()->expected_cid_, value: expected_cid);
16557 result.StoreNonPointer(addr: &result.untag()->entrypoint_, value: target.EntryPoint());
16558 return result.ptr();
16559}
16560
16561const char* MonomorphicSmiableCall::ToCString() const {
16562 return "MonomorphicSmiableCall";
16563}
16564
16565const char* CallSiteData::ToCString() const {
16566 // CallSiteData is an abstract class. We should never reach here.
16567 UNREACHABLE();
16568 return "CallSiteData";
16569}
16570
16571void CallSiteData::set_target_name(const String& value) const {
16572 ASSERT(!value.IsNull());
16573 ASSERT(value.IsCanonical());
16574 untag()->set_target_name(value.ptr());
16575}
16576
16577void CallSiteData::set_arguments_descriptor(const Array& value) const {
16578 ASSERT(!value.IsNull());
16579 untag()->set_args_descriptor(value.ptr());
16580}
16581
16582#if !defined(DART_PRECOMPILED_RUNTIME)
16583void ICData::SetReceiversStaticType(const AbstractType& type) const {
16584 untag()->set_receivers_static_type(type.ptr());
16585
16586#if defined(TARGET_ARCH_X64)
16587 if (!type.IsNull() && type.HasTypeClass() && (NumArgsTested() == 1) &&
16588 type.IsInstantiated() && !type.IsFutureOrType()) {
16589 const Class& cls = Class::Handle(ptr: type.type_class());
16590 if (cls.IsGeneric()) {
16591 set_tracking_exactness(true);
16592 }
16593 }
16594#endif // defined(TARGET_ARCH_X64)
16595}
16596#endif
16597
16598void ICData::SetTargetAtPos(const Array& data,
16599 intptr_t data_pos,
16600 intptr_t num_args_tested,
16601 const Function& target) {
16602#if !defined(DART_PRECOMPILED_RUNTIME)
16603 // JIT
16604 data.SetAt(index: data_pos + TargetIndexFor(num_args: num_args_tested), value: target);
16605#else
16606 // AOT
16607 ASSERT(target.HasCode());
16608 const Code& code = Code::Handle(target.CurrentCode());
16609 data.SetAt(data_pos + CodeIndexFor(num_args_tested), code);
16610 data.SetAt(data_pos + EntryPointIndexFor(num_args_tested), target);
16611#endif
16612}
16613
16614uword ICData::Hash() const {
16615 return String::HashRawSymbol(symbol: target_name()) ^ deopt_id();
16616}
16617
16618const char* ICData::ToCString() const {
16619 Zone* zone = Thread::Current()->zone();
16620 const String& name = String::Handle(zone, ptr: target_name());
16621 return zone->PrintToString(format: "ICData(%s num-args: %" Pd " num-checks: %" Pd
16622 " type-args-len: %" Pd ", deopt-id: %" Pd ")",
16623 name.ToCString(), NumArgsTested(),
16624 NumberOfChecks(), TypeArgsLen(), deopt_id());
16625}
16626
16627FunctionPtr ICData::Owner() const {
16628 Object& obj = Object::Handle(ptr: untag()->owner());
16629 if (obj.IsNull()) {
16630 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
16631 return Function::null();
16632 } else if (obj.IsFunction()) {
16633 return Function::Cast(obj).ptr();
16634 } else {
16635 ICData& original = ICData::Handle();
16636 original ^= obj.ptr();
16637 return original.Owner();
16638 }
16639}
16640
16641ICDataPtr ICData::Original() const {
16642 if (IsNull()) {
16643 return ICData::null();
16644 }
16645 if (untag()->owner()->IsICData()) {
16646 return static_cast<ICDataPtr>(untag()->owner());
16647 }
16648 return this->ptr();
16649}
16650
16651void ICData::SetOriginal(const ICData& value) const {
16652 ASSERT(value.IsOriginal());
16653 ASSERT(!value.IsNull());
16654 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16655}
16656
16657void ICData::set_owner(const Function& value) const {
16658 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16659}
16660
16661void ICData::set_deopt_id(intptr_t value) const {
16662#if defined(DART_PRECOMPILED_RUNTIME)
16663 UNREACHABLE();
16664#else
16665 ASSERT(value <= kMaxInt32);
16666 StoreNonPointer(addr: &untag()->deopt_id_, value);
16667#endif
16668}
16669
16670void ICData::set_entries(const Array& value) const {
16671 ASSERT(!value.IsNull());
16672 untag()->set_entries<std::memory_order_release>(value.ptr());
16673}
16674
16675intptr_t ICData::NumArgsTested() const {
16676 return untag()->state_bits_.Read<NumArgsTestedBits>();
16677}
16678
16679void ICData::SetNumArgsTested(intptr_t value) const {
16680 ASSERT(Utils::IsUint(2, value));
16681 untag()->state_bits_.Update<NumArgsTestedBits>(value);
16682}
16683
16684intptr_t CallSiteData::TypeArgsLen() const {
16685 ArgumentsDescriptor args_desc(Array::Handle(ptr: arguments_descriptor()));
16686 return args_desc.TypeArgsLen();
16687}
16688
16689intptr_t CallSiteData::CountWithTypeArgs() const {
16690 ArgumentsDescriptor args_desc(Array::Handle(ptr: arguments_descriptor()));
16691 return args_desc.CountWithTypeArgs();
16692}
16693
16694intptr_t CallSiteData::CountWithoutTypeArgs() const {
16695 ArgumentsDescriptor args_desc(Array::Handle(ptr: arguments_descriptor()));
16696 return args_desc.Count();
16697}
16698
16699intptr_t CallSiteData::SizeWithoutTypeArgs() const {
16700 ArgumentsDescriptor args_desc(Array::Handle(ptr: arguments_descriptor()));
16701 return args_desc.Size();
16702}
16703
16704intptr_t CallSiteData::SizeWithTypeArgs() const {
16705 ArgumentsDescriptor args_desc(Array::Handle(ptr: arguments_descriptor()));
16706 return args_desc.SizeWithTypeArgs();
16707}
16708
16709uint32_t ICData::DeoptReasons() const {
16710 return untag()->state_bits_.Read<DeoptReasonBits>();
16711}
16712
16713void ICData::SetDeoptReasons(uint32_t reasons) const {
16714 untag()->state_bits_.Update<DeoptReasonBits>(value: reasons);
16715}
16716
16717bool ICData::HasDeoptReason(DeoptReasonId reason) const {
16718 ASSERT(reason <= kLastRecordedDeoptReason);
16719 return (DeoptReasons() & (1 << reason)) != 0;
16720}
16721
16722void ICData::AddDeoptReason(DeoptReasonId reason) const {
16723 if (reason <= kLastRecordedDeoptReason) {
16724 untag()->state_bits_.FetchOr<DeoptReasonBits>(value: 1 << reason);
16725 }
16726}
16727
16728const char* ICData::RebindRuleToCString(RebindRule r) {
16729 switch (r) {
16730#define RULE_CASE(Name) \
16731 case RebindRule::k##Name: \
16732 return #Name;
16733 FOR_EACH_REBIND_RULE(RULE_CASE)
16734#undef RULE_CASE
16735 default:
16736 return nullptr;
16737 }
16738}
16739
16740bool ICData::ParseRebindRule(const char* str, RebindRule* out) {
16741#define RULE_CASE(Name) \
16742 if (strcmp(str, #Name) == 0) { \
16743 *out = RebindRule::k##Name; \
16744 return true; \
16745 }
16746 FOR_EACH_REBIND_RULE(RULE_CASE)
16747#undef RULE_CASE
16748 return false;
16749}
16750
16751ICData::RebindRule ICData::rebind_rule() const {
16752 return RebindRule(untag()->state_bits_.Read<RebindRuleBits>());
16753}
16754
16755void ICData::set_rebind_rule(uint32_t rebind_rule) const {
16756 untag()->state_bits_.Update<ICData::RebindRuleBits>(value: rebind_rule);
16757}
16758
16759bool ICData::is_static_call() const {
16760 return rebind_rule() != kInstance;
16761}
16762
16763void ICData::clear_state_bits() const {
16764 untag()->state_bits_ = 0;
16765}
16766
16767intptr_t ICData::TestEntryLengthFor(intptr_t num_args,
16768 bool tracking_exactness) {
16769 return num_args + 1 /* target function*/ + 1 /* frequency */ +
16770 (tracking_exactness ? 1 : 0) /* exactness state */;
16771}
16772
16773intptr_t ICData::TestEntryLength() const {
16774 return TestEntryLengthFor(num_args: NumArgsTested(), tracking_exactness: is_tracking_exactness());
16775}
16776
16777intptr_t ICData::Length() const {
16778 return (Smi::Value(raw_smi: entries()->untag()->length()) / TestEntryLength());
16779}
16780
16781intptr_t ICData::NumberOfChecks() const {
16782 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16783 return Length() - 1;
16784}
16785
16786bool ICData::NumberOfChecksIs(intptr_t n) const {
16787 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16788 return NumberOfChecks() == n;
16789}
16790
16791#if defined(DEBUG)
16792void ICData::AssertInvariantsAreSatisfied() const {
16793 // See layout and invariant of [ICData] in class comment in object.h.
16794 //
16795 // This method can be called without holding any locks, it will grab a
16796 // snapshot of `entries()` and do it's verification logic on that.
16797 auto zone = Thread::Current()->zone();
16798 const auto& array = Array::Handle(zone, entries());
16799
16800 const intptr_t entry_length = TestEntryLength();
16801 const intptr_t num_checks = array.Length() / entry_length - 1;
16802 const intptr_t num_args = NumArgsTested();
16803
16804 /// Backing store must be multiple of entry length.
16805 ASSERT((array.Length() % entry_length) == 0);
16806
16807 /// Entries must be valid.
16808 for (intptr_t i = 0; i < num_checks; ++i) {
16809 // Should be valid entry.
16810 const intptr_t start = entry_length * i;
16811 for (intptr_t i = 0; i < num_args; ++i) {
16812 ASSERT(!array.At(start + i)->IsHeapObject());
16813 ASSERT(array.At(start + i) != smi_illegal_cid().ptr());
16814 }
16815 ASSERT(array.At(start + TargetIndexFor(num_args))->IsHeapObject());
16816 if (is_tracking_exactness()) {
16817 ASSERT(!array.At(start + ExactnessIndexFor(num_args))->IsHeapObject());
16818 }
16819 }
16820
16821 /// Sentinel at end must be valid.
16822 const intptr_t sentinel_start = num_checks * entry_length;
16823 for (intptr_t i = 0; i < entry_length - 1; ++i) {
16824 ASSERT(array.At(sentinel_start + i) == smi_illegal_cid().ptr());
16825 }
16826 if (num_checks == 0) {
16827 ASSERT(array.At(sentinel_start + entry_length - 1) ==
16828 smi_illegal_cid().ptr());
16829 ASSERT(ICData::CachedEmptyICDataArray(num_args, is_tracking_exactness()) ==
16830 array.ptr());
16831 } else {
16832 ASSERT(array.At(sentinel_start + entry_length - 1) == ptr());
16833 }
16834
16835 // Invariants for ICData of static calls.
16836 if (num_args == 0) {
16837 ASSERT(Length() == 2);
16838 ASSERT(TestEntryLength() == 2);
16839 }
16840}
16841#endif // defined(DEBUG)
16842
16843// Discounts any checks with usage of zero.
16844intptr_t ICData::NumberOfUsedChecks() const {
16845 const intptr_t n = NumberOfChecks();
16846 intptr_t count = 0;
16847 for (intptr_t i = 0; i < n; i++) {
16848 if (GetCountAt(index: i) > 0) {
16849 count++;
16850 }
16851 }
16852 return count;
16853}
16854
16855void ICData::WriteSentinel(const Array& data,
16856 intptr_t test_entry_length,
16857 const Object& back_ref) {
16858 ASSERT(!data.IsNull());
16859 RELEASE_ASSERT(smi_illegal_cid().Value() == kIllegalCid);
16860 const intptr_t entry_start = data.Length() - test_entry_length;
16861 for (intptr_t i = 0; i < test_entry_length - 1; i++) {
16862 data.SetAt(index: entry_start + i, value: smi_illegal_cid());
16863 }
16864 data.SetAt(index: entry_start + test_entry_length - 1, value: back_ref);
16865}
16866
16867#if defined(DEBUG)
16868// Used in asserts to verify that a check is not added twice.
16869bool ICData::HasCheck(const GrowableArray<intptr_t>& cids) const {
16870 return FindCheck(cids) != -1;
16871}
16872#endif // DEBUG
16873
16874intptr_t ICData::FindCheck(const GrowableArray<intptr_t>& cids) const {
16875 const intptr_t len = NumberOfChecks();
16876 GrowableArray<intptr_t> class_ids;
16877 for (intptr_t i = 0; i < len; i++) {
16878 GetClassIdsAt(index: i, class_ids: &class_ids);
16879 bool matches = true;
16880 for (intptr_t k = 0; k < class_ids.length(); k++) {
16881 ASSERT(class_ids[k] != kIllegalCid);
16882 if (class_ids[k] != cids[k]) {
16883 matches = false;
16884 break;
16885 }
16886 }
16887 if (matches) {
16888 return i;
16889 }
16890 }
16891 return -1;
16892}
16893
16894void ICData::TruncateTo(intptr_t num_checks,
16895 const CallSiteResetter& proof_of_reload) const {
16896 USE(proof_of_reload); // This method can only be called during reload.
16897
16898 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16899 ASSERT(num_checks <= NumberOfChecks());
16900
16901 // Nothing to do.
16902 if (NumberOfChecks() == num_checks) return;
16903
16904 auto thread = Thread::Current();
16905 REUSABLE_ARRAY_HANDLESCOPE(thread);
16906 auto& array = thread->ArrayHandle();
16907
16908 // If we make the ICData empty, use the pre-allocated shared backing stores.
16909 const intptr_t num_args = NumArgsTested();
16910 if (num_checks == 0) {
16911 array = ICData::CachedEmptyICDataArray(num_args_tested: num_args, tracking_exactness: is_tracking_exactness());
16912 set_entries(array);
16913 return;
16914 }
16915
16916 // Otherwise truncate array and initialize sentinel.
16917 // Use kSmiCid for all slots in the entry except the last, which is a backref
16918 // to ICData.
16919 const intptr_t entry_length = TestEntryLength();
16920 array = entries();
16921 array.Truncate(new_length: (num_checks + 1) * entry_length);
16922 WriteSentinel(data: array, test_entry_length: entry_length, back_ref: *this);
16923}
16924
16925void ICData::ClearCountAt(intptr_t index,
16926 const CallSiteResetter& proof_of_reload) const {
16927 USE(proof_of_reload); // This method can only be called during reload.
16928
16929 ASSERT(index >= 0);
16930 ASSERT(index < NumberOfChecks());
16931 SetCountAt(index, value: 0);
16932}
16933
16934void ICData::ClearAndSetStaticTarget(
16935 const Function& func,
16936 const CallSiteResetter& proof_of_reload) const {
16937 USE(proof_of_reload); // This method can only be called during reload.
16938
16939 // The final entry is always the sentinel.
16940 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16941
16942 if (IsImmutable()) return;
16943 if (NumberOfChecks() == 0) return;
16944
16945 // Leave one entry.
16946 TruncateTo(/*num_checks=*/1, proof_of_reload);
16947
16948 // Reinitialize the one and only entry.
16949 const intptr_t num_args = NumArgsTested();
16950 Thread* thread = Thread::Current();
16951 REUSABLE_ARRAY_HANDLESCOPE(thread);
16952 Array& data = thread->ArrayHandle();
16953 data = entries();
16954 const Smi& object_cid = Smi::Handle(ptr: Smi::New(value: kObjectCid));
16955 for (intptr_t i = 0; i < num_args; i++) {
16956 data.SetAt(index: i, value: object_cid);
16957 }
16958 data.SetAt(index: TargetIndexFor(num_args), value: func);
16959 data.SetAt(index: CountIndexFor(num_args), value: Object::smi_zero());
16960}
16961
16962bool ICData::ValidateInterceptor(const Function& target) const {
16963#if !defined(DART_PRECOMPILED_RUNTIME)
16964 const String& name = String::Handle(ptr: target_name());
16965 if (Function::IsDynamicInvocationForwarderName(name)) {
16966 return Function::DemangleDynamicInvocationForwarderName(name) ==
16967 target.name();
16968 }
16969#endif
16970 ObjectStore* store = IsolateGroup::Current()->object_store();
16971 ASSERT((target.ptr() == store->simple_instance_of_true_function()) ||
16972 (target.ptr() == store->simple_instance_of_false_function()));
16973 const String& instance_of_name = String::Handle(
16974 ptr: Library::PrivateCoreLibName(member: Symbols::_simpleInstanceOf()).ptr());
16975 ASSERT(target_name() == instance_of_name.ptr());
16976 return true;
16977}
16978
16979void ICData::EnsureHasCheck(const GrowableArray<intptr_t>& class_ids,
16980 const Function& target,
16981 intptr_t count) const {
16982 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16983
16984 if (FindCheck(cids: class_ids) != -1) return;
16985 AddCheckInternal(class_ids, target, count);
16986}
16987
16988void ICData::AddCheck(const GrowableArray<intptr_t>& class_ids,
16989 const Function& target,
16990 intptr_t count) const {
16991 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16992 AddCheckInternal(class_ids, target, count);
16993}
16994
16995void ICData::AddCheckInternal(const GrowableArray<intptr_t>& class_ids,
16996 const Function& target,
16997 intptr_t count) const {
16998 ASSERT(
16999 IsolateGroup::Current()->type_feedback_mutex()->IsOwnedByCurrentThread());
17000
17001 ASSERT(!is_tracking_exactness());
17002 ASSERT(!target.IsNull());
17003 ASSERT((target.name() == target_name()) || ValidateInterceptor(target));
17004 DEBUG_ASSERT(!HasCheck(class_ids));
17005 ASSERT(NumArgsTested() > 1); // Otherwise use 'AddReceiverCheck'.
17006 const intptr_t num_args_tested = NumArgsTested();
17007 ASSERT(class_ids.length() == num_args_tested);
17008 const intptr_t old_num = NumberOfChecks();
17009 Array& data = Array::Handle(ptr: entries());
17010
17011 // ICData of static calls with NumArgsTested() > 0 have initially a
17012 // dummy set of cids entered (see ICData::NewForStaticCall). That entry is
17013 // overwritten by first real type feedback data.
17014 if (old_num == 1 && num_args_tested == 2) {
17015 const bool has_dummy_entry =
17016 Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: 0))) == kObjectCid &&
17017 Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: 1))) == kObjectCid;
17018 if (has_dummy_entry) {
17019 ASSERT(target.ptr() == data.At(TargetIndexFor(num_args_tested)));
17020 // Replace dummy entry.
17021 Smi& value = Smi::Handle();
17022 for (intptr_t i = 0; i < NumArgsTested(); i++) {
17023 ASSERT(class_ids[i] != kIllegalCid);
17024 value = Smi::New(value: class_ids[i]);
17025 data.SetAt(index: i, value);
17026 }
17027 return;
17028 }
17029 }
17030 intptr_t index = -1;
17031 data = Grow(index: &index);
17032 ASSERT(!data.IsNull());
17033 intptr_t data_pos = index * TestEntryLength();
17034 Smi& value = Smi::Handle();
17035 for (intptr_t i = 0; i < class_ids.length(); i++) {
17036 // kIllegalCid is used as terminating value, do not add it.
17037 ASSERT(class_ids[i] != kIllegalCid);
17038 value = Smi::New(value: class_ids[i]);
17039 data.SetAt(index: data_pos + i, value);
17040 }
17041 ASSERT(!target.IsNull());
17042 data.SetAt(index: data_pos + TargetIndexFor(num_args: num_args_tested), value: target);
17043 value = Smi::New(value: count);
17044 data.SetAt(index: data_pos + CountIndexFor(num_args: num_args_tested), value);
17045 // Multithreaded access to ICData requires setting of array to be the last
17046 // operation.
17047 set_entries(data);
17048}
17049
17050ArrayPtr ICData::Grow(intptr_t* index) const {
17051 DEBUG_ONLY(AssertInvariantsAreSatisfied());
17052
17053 *index = NumberOfChecks();
17054 Array& data = Array::Handle(ptr: entries());
17055 const intptr_t new_len = data.Length() + TestEntryLength();
17056 data = Array::Grow(source: data, new_length: new_len, space: Heap::kOld);
17057 WriteSentinel(data, test_entry_length: TestEntryLength(), back_ref: *this);
17058 return data.ptr();
17059}
17060
17061void ICData::DebugDump() const {
17062 const Function& owner = Function::Handle(ptr: Owner());
17063 THR_Print("ICData::DebugDump\n");
17064 THR_Print("Owner = %s [deopt=%" Pd "]\n", owner.ToCString(), deopt_id());
17065 THR_Print("NumArgsTested = %" Pd "\n", NumArgsTested());
17066 THR_Print("Length = %" Pd "\n", Length());
17067 THR_Print("NumberOfChecks = %" Pd "\n", NumberOfChecks());
17068
17069 GrowableArray<intptr_t> class_ids;
17070 for (intptr_t i = 0; i < NumberOfChecks(); i++) {
17071 THR_Print("Check[%" Pd "]:", i);
17072 GetClassIdsAt(index: i, class_ids: &class_ids);
17073 for (intptr_t c = 0; c < class_ids.length(); c++) {
17074 THR_Print(" %" Pd "", class_ids[c]);
17075 }
17076 THR_Print("--- %" Pd " hits\n", GetCountAt(i));
17077 }
17078}
17079
17080void ICData::EnsureHasReceiverCheck(intptr_t receiver_class_id,
17081 const Function& target,
17082 intptr_t count,
17083 StaticTypeExactnessState exactness) const {
17084 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
17085
17086 GrowableArray<intptr_t> class_ids(1);
17087 class_ids.Add(value: receiver_class_id);
17088 if (FindCheck(cids: class_ids) != -1) return;
17089
17090 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
17091}
17092
17093void ICData::AddReceiverCheck(intptr_t receiver_class_id,
17094 const Function& target,
17095 intptr_t count,
17096 StaticTypeExactnessState exactness) const {
17097 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
17098 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
17099}
17100
17101void ICData::AddReceiverCheckInternal(
17102 intptr_t receiver_class_id,
17103 const Function& target,
17104 intptr_t count,
17105 StaticTypeExactnessState exactness) const {
17106#if defined(DEBUG)
17107 GrowableArray<intptr_t> class_ids(1);
17108 class_ids.Add(receiver_class_id);
17109 ASSERT(!HasCheck(class_ids));
17110#endif // DEBUG
17111 ASSERT(!target.IsNull());
17112 const intptr_t kNumArgsTested = 1;
17113 ASSERT(NumArgsTested() == kNumArgsTested); // Otherwise use 'AddCheck'.
17114 ASSERT(receiver_class_id != kIllegalCid);
17115
17116 intptr_t index = -1;
17117 Array& data = Array::Handle(ptr: Grow(index: &index));
17118 intptr_t data_pos = index * TestEntryLength();
17119 if ((receiver_class_id == kSmiCid) && (data_pos > 0)) {
17120 ASSERT(GetReceiverClassIdAt(0) != kSmiCid);
17121 // Move class occupying position 0 to the data_pos.
17122 for (intptr_t i = 0; i < TestEntryLength(); i++) {
17123 data.SetAt(index: data_pos + i, value: Object::Handle(ptr: data.At(index: i)));
17124 }
17125 // Insert kSmiCid in position 0.
17126 data_pos = 0;
17127 }
17128 data.SetAt(index: data_pos, value: Smi::Handle(ptr: Smi::New(value: receiver_class_id)));
17129 SetTargetAtPos(data, data_pos, num_args_tested: kNumArgsTested, target);
17130
17131#if !defined(DART_PRECOMPILED_RUNTIME)
17132 data.SetAt(index: data_pos + CountIndexFor(num_args: kNumArgsTested),
17133 value: Smi::Handle(ptr: Smi::New(value: count)));
17134 if (is_tracking_exactness()) {
17135 data.SetAt(index: data_pos + ExactnessIndexFor(num_args: kNumArgsTested),
17136 value: Smi::Handle(ptr: Smi::New(value: exactness.Encode())));
17137 }
17138#endif
17139
17140 // Multithreaded access to ICData requires setting of array to be the last
17141 // operation.
17142 set_entries(data);
17143}
17144
17145StaticTypeExactnessState ICData::GetExactnessAt(intptr_t index) const {
17146 if (!is_tracking_exactness()) {
17147 return StaticTypeExactnessState::NotTracking();
17148 }
17149 Thread* thread = Thread::Current();
17150 REUSABLE_ARRAY_HANDLESCOPE(thread);
17151 Array& data = thread->ArrayHandle();
17152 data = entries();
17153 intptr_t data_pos =
17154 index * TestEntryLength() + ExactnessIndexFor(num_args: NumArgsTested());
17155 return StaticTypeExactnessState::Decode(
17156 value: Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos))));
17157}
17158
17159void ICData::GetCheckAt(intptr_t index,
17160 GrowableArray<intptr_t>* class_ids,
17161 Function* target) const {
17162 ASSERT(index < NumberOfChecks());
17163 ASSERT(class_ids != nullptr);
17164 ASSERT(target != nullptr);
17165 class_ids->Clear();
17166 Thread* thread = Thread::Current();
17167 REUSABLE_ARRAY_HANDLESCOPE(thread);
17168 Array& data = thread->ArrayHandle();
17169 data = entries();
17170 intptr_t data_pos = index * TestEntryLength();
17171 for (intptr_t i = 0; i < NumArgsTested(); i++) {
17172 class_ids->Add(value: Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos + i))));
17173 }
17174 (*target) ^= data.At(index: data_pos + TargetIndexFor(num_args: NumArgsTested()));
17175}
17176
17177void ICData::GetClassIdsAt(intptr_t index,
17178 GrowableArray<intptr_t>* class_ids) const {
17179 ASSERT(index < Length());
17180 ASSERT(class_ids != nullptr);
17181 ASSERT(IsValidEntryIndex(index));
17182 class_ids->Clear();
17183 Thread* thread = Thread::Current();
17184 REUSABLE_ARRAY_HANDLESCOPE(thread);
17185 Array& data = thread->ArrayHandle();
17186 data = entries();
17187 intptr_t data_pos = index * TestEntryLength();
17188 for (intptr_t i = 0; i < NumArgsTested(); i++) {
17189 class_ids->Add(value: Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos++))));
17190 }
17191}
17192
17193void ICData::GetOneClassCheckAt(intptr_t index,
17194 intptr_t* class_id,
17195 Function* target) const {
17196 ASSERT(class_id != nullptr);
17197 ASSERT(target != nullptr);
17198 ASSERT(NumArgsTested() == 1);
17199 Thread* thread = Thread::Current();
17200 REUSABLE_ARRAY_HANDLESCOPE(thread);
17201 Array& data = thread->ArrayHandle();
17202 data = entries();
17203 const intptr_t data_pos = index * TestEntryLength();
17204 *class_id = Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos)));
17205 *target ^= data.At(index: data_pos + TargetIndexFor(num_args: NumArgsTested()));
17206}
17207
17208intptr_t ICData::GetCidAt(intptr_t index) const {
17209 ASSERT(NumArgsTested() == 1);
17210 Thread* thread = Thread::Current();
17211 REUSABLE_ARRAY_HANDLESCOPE(thread);
17212 Array& data = thread->ArrayHandle();
17213 data = entries();
17214 const intptr_t data_pos = index * TestEntryLength();
17215 return Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos)));
17216}
17217
17218intptr_t ICData::GetClassIdAt(intptr_t index, intptr_t arg_nr) const {
17219 GrowableArray<intptr_t> class_ids;
17220 GetClassIdsAt(index, class_ids: &class_ids);
17221 return class_ids[arg_nr];
17222}
17223
17224intptr_t ICData::GetReceiverClassIdAt(intptr_t index) const {
17225 ASSERT(index < Length());
17226 ASSERT(IsValidEntryIndex(index));
17227 const intptr_t data_pos = index * TestEntryLength();
17228 NoSafepointScope no_safepoint;
17229 ArrayPtr raw_data = entries();
17230 return Smi::Value(raw_smi: Smi::RawCast(raw: raw_data->untag()->element(index: data_pos)));
17231}
17232
17233FunctionPtr ICData::GetTargetAt(intptr_t index) const {
17234#if defined(DART_PRECOMPILED_RUNTIME)
17235 UNREACHABLE();
17236 return nullptr;
17237#else
17238 const intptr_t data_pos =
17239 index * TestEntryLength() + TargetIndexFor(num_args: NumArgsTested());
17240 ASSERT(Object::Handle(Array::Handle(entries()).At(data_pos)).IsFunction());
17241
17242 NoSafepointScope no_safepoint;
17243 ArrayPtr raw_data = entries();
17244 return static_cast<FunctionPtr>(raw_data->untag()->element(index: data_pos));
17245#endif
17246}
17247
17248void ICData::IncrementCountAt(intptr_t index, intptr_t value) const {
17249 ASSERT(0 <= value);
17250 ASSERT(value <= Smi::kMaxValue);
17251 SetCountAt(index, value: Utils::Minimum(x: GetCountAt(index) + value, y: Smi::kMaxValue));
17252}
17253
17254void ICData::SetCountAt(intptr_t index, intptr_t value) const {
17255 ASSERT(0 <= value);
17256 ASSERT(value <= Smi::kMaxValue);
17257
17258 Thread* thread = Thread::Current();
17259 REUSABLE_ARRAY_HANDLESCOPE(thread);
17260 Array& data = thread->ArrayHandle();
17261 data = entries();
17262 const intptr_t data_pos =
17263 index * TestEntryLength() + CountIndexFor(num_args: NumArgsTested());
17264 data.SetAt(index: data_pos, value: Smi::Handle(ptr: Smi::New(value)));
17265}
17266
17267intptr_t ICData::GetCountAt(intptr_t index) const {
17268#if defined(DART_PRECOMPILED_RUNTIME)
17269 UNREACHABLE();
17270 return 0;
17271#else
17272 Thread* thread = Thread::Current();
17273 REUSABLE_ARRAY_HANDLESCOPE(thread);
17274 Array& data = thread->ArrayHandle();
17275 data = entries();
17276 const intptr_t data_pos =
17277 index * TestEntryLength() + CountIndexFor(num_args: NumArgsTested());
17278 intptr_t value = Smi::Value(raw_smi: Smi::RawCast(raw: data.At(index: data_pos)));
17279 if (value >= 0) return value;
17280
17281 // The counter very rarely overflows to a negative value, but if it does, we
17282 // would rather just reset it to zero.
17283 SetCountAt(index, value: 0);
17284 return 0;
17285#endif
17286}
17287
17288intptr_t ICData::AggregateCount() const {
17289 if (IsNull()) return 0;
17290 const intptr_t len = NumberOfChecks();
17291 intptr_t count = 0;
17292 for (intptr_t i = 0; i < len; i++) {
17293 count += GetCountAt(index: i);
17294 }
17295 return count;
17296}
17297
17298#if !defined(DART_PRECOMPILED_RUNTIME)
17299ICDataPtr ICData::AsUnaryClassChecksForArgNr(intptr_t arg_nr) const {
17300 ASSERT(!IsNull());
17301 ASSERT(NumArgsTested() > arg_nr);
17302 if ((arg_nr == 0) && (NumArgsTested() == 1)) {
17303 // Frequent case.
17304 return ptr();
17305 }
17306 const intptr_t kNumArgsTested = 1;
17307 ICData& result = ICData::Handle(ptr: ICData::NewFrom(from: *this, num_args_tested: kNumArgsTested));
17308 const intptr_t len = NumberOfChecks();
17309 for (intptr_t i = 0; i < len; i++) {
17310 const intptr_t class_id = GetClassIdAt(index: i, arg_nr);
17311 const intptr_t count = GetCountAt(index: i);
17312 if (count == 0) {
17313 continue;
17314 }
17315 intptr_t duplicate_class_id = -1;
17316 const intptr_t result_len = result.NumberOfChecks();
17317 for (intptr_t k = 0; k < result_len; k++) {
17318 if (class_id == result.GetReceiverClassIdAt(index: k)) {
17319 duplicate_class_id = k;
17320 break;
17321 }
17322 }
17323 if (duplicate_class_id >= 0) {
17324 // This check is valid only when checking the receiver.
17325 ASSERT((arg_nr != 0) ||
17326 (result.GetTargetAt(duplicate_class_id) == GetTargetAt(i)));
17327 result.IncrementCountAt(index: duplicate_class_id, value: count);
17328 } else {
17329 // This will make sure that Smi is first if it exists.
17330 result.AddReceiverCheckInternal(receiver_class_id: class_id,
17331 target: Function::Handle(ptr: GetTargetAt(index: i)), count,
17332 exactness: StaticTypeExactnessState::NotTracking());
17333 }
17334 }
17335
17336 return result.ptr();
17337}
17338
17339// (cid, count) tuple used to sort ICData by count.
17340struct CidCount {
17341 CidCount(intptr_t cid_, intptr_t count_, Function* f_)
17342 : cid(cid_), count(count_), function(f_) {}
17343
17344 static int HighestCountFirst(const CidCount* a, const CidCount* b);
17345
17346 intptr_t cid;
17347 intptr_t count;
17348 Function* function;
17349};
17350
17351int CidCount::HighestCountFirst(const CidCount* a, const CidCount* b) {
17352 if (a->count > b->count) {
17353 return -1;
17354 }
17355 return (a->count < b->count) ? 1 : 0;
17356}
17357
17358ICDataPtr ICData::AsUnaryClassChecksSortedByCount() const {
17359 ASSERT(!IsNull());
17360 const intptr_t kNumArgsTested = 1;
17361 const intptr_t len = NumberOfChecks();
17362 if (len <= 1) {
17363 // No sorting needed.
17364 return AsUnaryClassChecks();
17365 }
17366 GrowableArray<CidCount> aggregate;
17367 for (intptr_t i = 0; i < len; i++) {
17368 const intptr_t class_id = GetClassIdAt(index: i, arg_nr: 0);
17369 const intptr_t count = GetCountAt(index: i);
17370 if (count == 0) {
17371 continue;
17372 }
17373 bool found = false;
17374 for (intptr_t r = 0; r < aggregate.length(); r++) {
17375 if (aggregate[r].cid == class_id) {
17376 aggregate[r].count += count;
17377 found = true;
17378 break;
17379 }
17380 }
17381 if (!found) {
17382 aggregate.Add(
17383 value: CidCount(class_id, count, &Function::ZoneHandle(ptr: GetTargetAt(index: i))));
17384 }
17385 }
17386 aggregate.Sort(compare: CidCount::HighestCountFirst);
17387
17388 ICData& result = ICData::Handle(ptr: ICData::NewFrom(from: *this, num_args_tested: kNumArgsTested));
17389 ASSERT(result.NumberOfChecksIs(0));
17390 // Room for all entries and the sentinel.
17391 const intptr_t data_len = result.TestEntryLength() * (aggregate.length() + 1);
17392 // Allocate the array but do not assign it to result until we have populated
17393 // it with the aggregate data and the terminating sentinel.
17394 const Array& data = Array::Handle(ptr: Array::New(len: data_len, space: Heap::kOld));
17395 intptr_t pos = 0;
17396 for (intptr_t i = 0; i < aggregate.length(); i++) {
17397 data.SetAt(index: pos + 0, value: Smi::Handle(ptr: Smi::New(value: aggregate[i].cid)));
17398 data.SetAt(index: pos + TargetIndexFor(num_args: 1), value: *aggregate[i].function);
17399 data.SetAt(index: pos + CountIndexFor(num_args: 1),
17400 value: Smi::Handle(ptr: Smi::New(value: aggregate[i].count)));
17401
17402 pos += result.TestEntryLength();
17403 }
17404 WriteSentinel(data, test_entry_length: result.TestEntryLength(), back_ref: result);
17405 result.set_entries(data);
17406 ASSERT(result.NumberOfChecksIs(aggregate.length()));
17407 return result.ptr();
17408}
17409
17410UnlinkedCallPtr ICData::AsUnlinkedCall() const {
17411 ASSERT(NumArgsTested() == 1);
17412 ASSERT(!is_tracking_exactness());
17413 const UnlinkedCall& result = UnlinkedCall::Handle(ptr: UnlinkedCall::New());
17414 result.set_target_name(String::Handle(ptr: target_name()));
17415 result.set_arguments_descriptor(Array::Handle(ptr: arguments_descriptor()));
17416 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode ||
17417 receiver_cannot_be_smi());
17418 return result.ptr();
17419}
17420
17421bool ICData::HasReceiverClassId(intptr_t class_id) const {
17422 ASSERT(NumArgsTested() > 0);
17423 const intptr_t len = NumberOfChecks();
17424 for (intptr_t i = 0; i < len; i++) {
17425 if (IsUsedAt(i)) {
17426 const intptr_t test_class_id = GetReceiverClassIdAt(index: i);
17427 if (test_class_id == class_id) {
17428 return true;
17429 }
17430 }
17431 }
17432 return false;
17433}
17434#endif
17435
17436bool ICData::IsUsedAt(intptr_t i) const {
17437 if (GetCountAt(index: i) <= 0) {
17438 // Do not mistake unoptimized static call ICData for unused.
17439 // See ICData::AddTarget.
17440 // TODO(srdjan): Make this test more robust.
17441 if (NumArgsTested() > 0) {
17442 const intptr_t cid = GetReceiverClassIdAt(index: i);
17443 if (cid == kObjectCid) {
17444 return true;
17445 }
17446 }
17447 return false;
17448 }
17449 return true;
17450}
17451
17452void ICData::Init() {
17453 for (int i = 0; i <= kCachedICDataMaxArgsTestedWithoutExactnessTracking;
17454 i++) {
17455 cached_icdata_arrays_
17456 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx + i] =
17457 ICData::NewNonCachedEmptyICDataArray(num_args_tested: i, tracking_exactness: false);
17458 }
17459 cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx] =
17460 ICData::NewNonCachedEmptyICDataArray(num_args_tested: 1, tracking_exactness: true);
17461}
17462
17463void ICData::Cleanup() {
17464 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17465 cached_icdata_arrays_[i] = nullptr;
17466 }
17467}
17468
17469ArrayPtr ICData::NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
17470 bool tracking_exactness) {
17471 // IC data array must be null terminated (sentinel entry).
17472 const intptr_t len = TestEntryLengthFor(num_args: num_args_tested, tracking_exactness);
17473 const Array& array = Array::Handle(ptr: Array::New(len, space: Heap::kOld));
17474 // Only empty [ICData]s are allowed to have a non-ICData backref.
17475 WriteSentinel(data: array, test_entry_length: len, /*back_ref=*/smi_illegal_cid());
17476 array.MakeImmutable();
17477 return array.ptr();
17478}
17479
17480ArrayPtr ICData::CachedEmptyICDataArray(intptr_t num_args_tested,
17481 bool tracking_exactness) {
17482 if (tracking_exactness) {
17483 ASSERT(num_args_tested == 1);
17484 return cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx];
17485 } else {
17486 ASSERT(num_args_tested >= 0);
17487 ASSERT(num_args_tested <=
17488 kCachedICDataMaxArgsTestedWithoutExactnessTracking);
17489 return cached_icdata_arrays_
17490 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
17491 num_args_tested];
17492 }
17493}
17494
17495bool ICData::IsCachedEmptyEntry(const Array& array) {
17496 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17497 if (cached_icdata_arrays_[i] == array.ptr()) return true;
17498 }
17499 return false;
17500}
17501
17502// Does not initialize ICData array.
17503ICDataPtr ICData::NewDescriptor(Zone* zone,
17504 const Function& owner,
17505 const String& target_name,
17506 const Array& arguments_descriptor,
17507 intptr_t deopt_id,
17508 intptr_t num_args_tested,
17509 RebindRule rebind_rule,
17510 const AbstractType& receivers_static_type) {
17511#if !defined(DART_PRECOMPILED_RUNTIME)
17512 // We should only have null owners in the precompiled runtime, if the
17513 // owning function for a Code object was optimized out.
17514 ASSERT(!owner.IsNull());
17515#endif
17516 ASSERT(!target_name.IsNull());
17517 ASSERT(!arguments_descriptor.IsNull());
17518 ASSERT(Object::icdata_class() != Class::null());
17519 ASSERT(num_args_tested >= 0);
17520 // IC data objects are long living objects, allocate them in old generation.
17521 const auto& result =
17522 ICData::Handle(zone, ptr: Object::Allocate<ICData>(space: Heap::kOld));
17523 result.set_owner(owner);
17524 result.set_target_name(target_name);
17525 result.set_arguments_descriptor(arguments_descriptor);
17526 NOT_IN_PRECOMPILED(result.set_deopt_id(deopt_id));
17527 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17528 result.set_rebind_rule(rebind_rule);
17529 result.SetNumArgsTested(num_args_tested);
17530 NOT_IN_PRECOMPILED(result.SetReceiversStaticType(receivers_static_type));
17531 return result.ptr();
17532}
17533
17534bool ICData::IsImmutable() const {
17535 return entries()->IsImmutableArray();
17536}
17537
17538ICDataPtr ICData::New() {
17539 // IC data objects are long living objects, allocate them in old generation.
17540 const auto& result = ICData::Handle(ptr: Object::Allocate<ICData>(space: Heap::kOld));
17541 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17542 result.set_deopt_id(DeoptId::kNone);
17543 return result.ptr();
17544}
17545
17546ICDataPtr ICData::New(const Function& owner,
17547 const String& target_name,
17548 const Array& arguments_descriptor,
17549 intptr_t deopt_id,
17550 intptr_t num_args_tested,
17551 RebindRule rebind_rule,
17552 const AbstractType& receivers_static_type) {
17553 Zone* zone = Thread::Current()->zone();
17554 const ICData& result = ICData::Handle(
17555 zone,
17556 ptr: NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17557 num_args_tested, rebind_rule, receivers_static_type));
17558 result.set_entries(Array::Handle(
17559 zone,
17560 ptr: CachedEmptyICDataArray(num_args_tested, tracking_exactness: result.is_tracking_exactness())));
17561 return result.ptr();
17562}
17563
17564ICDataPtr ICData::NewWithCheck(const Function& owner,
17565 const String& target_name,
17566 const Array& arguments_descriptor,
17567 intptr_t deopt_id,
17568 intptr_t num_args_tested,
17569 RebindRule rebind_rule,
17570 GrowableArray<intptr_t>* cids,
17571 const Function& target,
17572 const AbstractType& receiver_type) {
17573 ASSERT((cids != nullptr) && !target.IsNull());
17574 ASSERT(cids->length() == num_args_tested);
17575
17576 Zone* zone = Thread::Current()->zone();
17577 const auto& result = ICData::Handle(
17578 zone,
17579 ptr: NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17580 num_args_tested, rebind_rule, receivers_static_type: receiver_type));
17581
17582 const intptr_t kNumEntries = 2; // 1 entry and a sentinel.
17583 const intptr_t entry_len =
17584 TestEntryLengthFor(num_args: num_args_tested, tracking_exactness: result.is_tracking_exactness());
17585 const auto& array =
17586 Array::Handle(zone, ptr: Array::New(len: kNumEntries * entry_len, space: Heap::kOld));
17587
17588 auto& cid = Smi::Handle(zone);
17589 for (intptr_t i = 0; i < num_args_tested; ++i) {
17590 cid = Smi::New(value: (*cids)[i]);
17591 array.SetAt(index: i, value: cid);
17592 }
17593
17594 SetTargetAtPos(data: array, data_pos: 0, num_args_tested, target);
17595#if !defined(DART_PRECOMPILED_RUNTIME)
17596 array.SetAt(index: CountIndexFor(num_args: num_args_tested), value: Object::smi_zero());
17597#endif
17598 WriteSentinel(data: array, test_entry_length: entry_len, back_ref: result);
17599
17600 result.set_entries(array);
17601
17602 return result.ptr();
17603}
17604
17605ICDataPtr ICData::NewForStaticCall(const Function& owner,
17606 const Function& target,
17607 const Array& arguments_descriptor,
17608 intptr_t deopt_id,
17609 intptr_t num_args_tested,
17610 RebindRule rebind_rule) {
17611 // See `MethodRecognizer::NumArgsCheckedForStaticCall`.
17612 ASSERT(num_args_tested == 0 || num_args_tested == 2);
17613 ASSERT(!target.IsNull());
17614
17615 Zone* zone = Thread::Current()->zone();
17616 const auto& target_name = String::Handle(zone, ptr: target.name());
17617 GrowableArray<intptr_t> cids(num_args_tested);
17618 if (num_args_tested == 2) {
17619 cids.Add(value: kObjectCid);
17620 cids.Add(value: kObjectCid);
17621 }
17622 return ICData::NewWithCheck(owner, target_name, arguments_descriptor,
17623 deopt_id, num_args_tested, rebind_rule, cids: &cids,
17624 target, receiver_type: Object::null_abstract_type());
17625}
17626
17627#if !defined(DART_PRECOMPILED_RUNTIME)
17628ICDataPtr ICData::NewFrom(const ICData& from, intptr_t num_args_tested) {
17629 // See comment in [ICData::Clone] why we access the megamorphic bit first.
17630 const bool is_megamorphic = from.is_megamorphic();
17631
17632 const ICData& result = ICData::Handle(ptr: ICData::New(
17633 owner: Function::Handle(ptr: from.Owner()), target_name: String::Handle(ptr: from.target_name()),
17634 arguments_descriptor: Array::Handle(ptr: from.arguments_descriptor()), deopt_id: from.deopt_id(),
17635 num_args_tested, rebind_rule: from.rebind_rule(),
17636 receivers_static_type: AbstractType::Handle(ptr: from.receivers_static_type())));
17637 // Copy deoptimization reasons.
17638 result.SetDeoptReasons(from.DeoptReasons());
17639 result.set_is_megamorphic(is_megamorphic);
17640 return result.ptr();
17641}
17642
17643ICDataPtr ICData::Clone(const ICData& from) {
17644 Zone* zone = Thread::Current()->zone();
17645
17646 // We have to check the megamorphic bit before accessing the entries of the
17647 // ICData to ensure all writes to the entries have been flushed and are
17648 // visible at this point.
17649 //
17650 // This will allow us to maintain the invariant that if the megamorphic bit is
17651 // set, the number of entries in the ICData have reached the limit.
17652 const bool is_megamorphic = from.is_megamorphic();
17653
17654 const ICData& result = ICData::Handle(
17655 zone, ptr: ICData::NewDescriptor(
17656 zone, owner: Function::Handle(zone, ptr: from.Owner()),
17657 target_name: String::Handle(zone, ptr: from.target_name()),
17658 arguments_descriptor: Array::Handle(zone, ptr: from.arguments_descriptor()),
17659 deopt_id: from.deopt_id(), num_args_tested: from.NumArgsTested(), rebind_rule: from.rebind_rule(),
17660 receivers_static_type: AbstractType::Handle(zone, ptr: from.receivers_static_type())));
17661 // Clone entry array.
17662 const Array& from_array = Array::Handle(zone, ptr: from.entries());
17663 if (ICData::IsCachedEmptyEntry(array: from_array)) {
17664 result.set_entries(from_array);
17665 } else {
17666 const intptr_t len = from_array.Length();
17667 const Array& cloned_array =
17668 Array::Handle(zone, ptr: Array::New(len, space: Heap::kOld));
17669 Object& obj = Object::Handle(zone);
17670 for (intptr_t i = 0; i < len; i++) {
17671 obj = from_array.At(index: i);
17672 cloned_array.SetAt(index: i, value: obj);
17673 }
17674 // Update backref in our clone.
17675 cloned_array.SetAt(index: cloned_array.Length() - 1, value: result);
17676 result.set_entries(cloned_array);
17677 }
17678 // Copy deoptimization reasons.
17679 result.SetDeoptReasons(from.DeoptReasons());
17680 result.set_is_megamorphic(is_megamorphic);
17681
17682 RELEASE_ASSERT(!is_megamorphic ||
17683 result.NumberOfChecks() >= FLAG_max_polymorphic_checks);
17684
17685 DEBUG_ONLY(result.AssertInvariantsAreSatisfied());
17686
17687 return result.ptr();
17688}
17689#endif
17690
17691ICDataPtr ICData::ICDataOfEntriesArray(const Array& array) {
17692 const auto& back_ref = Object::Handle(ptr: array.At(index: array.Length() - 1));
17693 if (back_ref.ptr() == smi_illegal_cid().ptr()) {
17694 ASSERT(IsCachedEmptyEntry(array));
17695 return ICData::null();
17696 }
17697
17698 const auto& ic_data = ICData::Cast(obj: back_ref);
17699 DEBUG_ONLY(ic_data.AssertInvariantsAreSatisfied());
17700 return ic_data.ptr();
17701}
17702
17703const char* WeakSerializationReference::ToCString() const {
17704 return Object::Handle(ptr: target()).ToCString();
17705}
17706
17707ObjectPtr WeakSerializationReference::New(const Object& target,
17708 const Object& replacement) {
17709 ASSERT(Object::weak_serialization_reference_class() != Class::null());
17710 // Don't wrap any object in the VM heap, as all objects in the VM isolate
17711 // heap are currently serialized.
17712 //
17713 // Note that we _do_ wrap Smis if requested. Smis are serialized in the Mint
17714 // cluster, and so dropping them if not strongly referenced saves space in
17715 // the snapshot.
17716 if (target.ptr()->IsHeapObject() && target.InVMIsolateHeap()) {
17717 return target.ptr();
17718 }
17719 // If the target is a WSR that already uses the replacement, then return it.
17720 if (target.IsWeakSerializationReference() &&
17721 WeakSerializationReference::Cast(obj: target).replacement() ==
17722 replacement.ptr()) {
17723 return target.ptr();
17724 }
17725 const auto& result = WeakSerializationReference::Handle(
17726 ptr: Object::Allocate<WeakSerializationReference>(space: Heap::kOld));
17727 // Don't nest WSRs, instead just use the old WSR's target.
17728 result.untag()->set_target(target.IsWeakSerializationReference()
17729 ? WeakSerializationReference::Unwrap(obj: target)
17730 : target.ptr());
17731 result.untag()->set_replacement(replacement.ptr());
17732 return result.ptr();
17733}
17734
17735const char* WeakArray::ToCString() const {
17736 return Thread::Current()->zone()->PrintToString(format: "WeakArray len:%" Pd,
17737 Length());
17738}
17739
17740WeakArrayPtr WeakArray::New(intptr_t length, Heap::Space space) {
17741 ASSERT(Object::weak_array_class() != Class::null());
17742 if (!IsValidLength(length)) {
17743 // This should be caught before we reach here.
17744 FATAL("Fatal error in WeakArray::New: invalid len %" Pd "\n", length);
17745 }
17746 auto raw = Object::Allocate<WeakArray>(space, elements: length);
17747 NoSafepointScope no_safepoint;
17748 raw->untag()->set_length(Smi::New(value: length));
17749 return raw;
17750}
17751
17752#if defined(INCLUDE_IL_PRINTER)
17753Code::Comments& Code::Comments::New(intptr_t count) {
17754 Comments* comments;
17755 if (count < 0 || count > (kIntptrMax / kNumberOfEntries)) {
17756 // This should be caught before we reach here.
17757 FATAL("Fatal error in Code::Comments::New: invalid count %" Pd "\n", count);
17758 }
17759 if (count == 0) {
17760 comments = new Comments(Object::empty_array());
17761 } else {
17762 const Array& data =
17763 Array::Handle(ptr: Array::New(len: count * kNumberOfEntries, space: Heap::kOld));
17764 comments = new Comments(data);
17765 }
17766 return *comments;
17767}
17768
17769intptr_t Code::Comments::Length() const {
17770 if (comments_.IsNull()) {
17771 return 0;
17772 }
17773 return comments_.Length() / kNumberOfEntries;
17774}
17775
17776intptr_t Code::Comments::PCOffsetAt(intptr_t idx) const {
17777 return Smi::Value(
17778 raw_smi: Smi::RawCast(raw: comments_.At(index: idx * kNumberOfEntries + kPCOffsetEntry)));
17779}
17780
17781void Code::Comments::SetPCOffsetAt(intptr_t idx, intptr_t pc) {
17782 comments_.SetAt(index: idx * kNumberOfEntries + kPCOffsetEntry,
17783 value: Smi::Handle(ptr: Smi::New(value: pc)));
17784}
17785
17786const char* Code::Comments::CommentAt(intptr_t idx) const {
17787 string_ ^= comments_.At(index: idx * kNumberOfEntries + kCommentEntry);
17788 return string_.ToCString();
17789}
17790
17791void Code::Comments::SetCommentAt(intptr_t idx, const String& comment) {
17792 comments_.SetAt(index: idx * kNumberOfEntries + kCommentEntry, value: comment);
17793}
17794
17795Code::Comments::Comments(const Array& comments)
17796 : comments_(comments), string_(String::Handle()) {}
17797#endif // defined(INCLUDE_IL_PRINTER)
17798
17799const char* Code::EntryKindToCString(EntryKind kind) {
17800 switch (kind) {
17801 case EntryKind::kNormal:
17802 return "Normal";
17803 case EntryKind::kUnchecked:
17804 return "Unchecked";
17805 case EntryKind::kMonomorphic:
17806 return "Monomorphic";
17807 case EntryKind::kMonomorphicUnchecked:
17808 return "MonomorphicUnchecked";
17809 default:
17810 UNREACHABLE();
17811 return nullptr;
17812 }
17813}
17814
17815bool Code::ParseEntryKind(const char* str, EntryKind* out) {
17816 if (strcmp(s1: str, s2: "Normal") == 0) {
17817 *out = EntryKind::kNormal;
17818 return true;
17819 } else if (strcmp(s1: str, s2: "Unchecked") == 0) {
17820 *out = EntryKind::kUnchecked;
17821 return true;
17822 } else if (strcmp(s1: str, s2: "Monomorphic") == 0) {
17823 *out = EntryKind::kMonomorphic;
17824 return true;
17825 } else if (strcmp(s1: str, s2: "MonomorphicUnchecked") == 0) {
17826 *out = EntryKind::kMonomorphicUnchecked;
17827 return true;
17828 }
17829 return false;
17830}
17831
17832LocalVarDescriptorsPtr Code::GetLocalVarDescriptors() const {
17833 const LocalVarDescriptors& v = LocalVarDescriptors::Handle(ptr: var_descriptors());
17834 if (v.IsNull()) {
17835 ASSERT(!is_optimized());
17836 const Function& f = Function::Handle(ptr: function());
17837 ASSERT(!f.IsIrregexpFunction()); // Not yet implemented.
17838 Compiler::ComputeLocalVarDescriptors(code: *this);
17839 }
17840 return var_descriptors();
17841}
17842
17843void Code::set_owner(const Object& owner) const {
17844#if defined(DEBUG)
17845 const auto& unwrapped_owner =
17846 Object::Handle(WeakSerializationReference::Unwrap(owner));
17847 ASSERT(unwrapped_owner.IsFunction() || unwrapped_owner.IsClass() ||
17848 unwrapped_owner.IsAbstractType());
17849#endif
17850 untag()->set_owner(owner.ptr());
17851}
17852
17853void Code::set_state_bits(intptr_t bits) const {
17854 StoreNonPointer(addr: &untag()->state_bits_, value: bits);
17855}
17856
17857void Code::set_is_optimized(bool value) const {
17858 set_state_bits(OptimizedBit::update(value, original: untag()->state_bits_));
17859}
17860
17861void Code::set_is_force_optimized(bool value) const {
17862 set_state_bits(ForceOptimizedBit::update(value, original: untag()->state_bits_));
17863}
17864
17865void Code::set_is_alive(bool value) const {
17866 set_state_bits(AliveBit::update(value, original: untag()->state_bits_));
17867}
17868
17869void Code::set_is_discarded(bool value) const {
17870 set_state_bits(DiscardedBit::update(value, original: untag()->state_bits_));
17871}
17872
17873void Code::set_compressed_stackmaps(const CompressedStackMaps& maps) const {
17874 ASSERT(maps.IsOld());
17875 untag()->set_compressed_stackmaps(maps.ptr());
17876}
17877
17878#if !defined(DART_PRECOMPILED_RUNTIME)
17879intptr_t Code::num_variables() const {
17880 ASSERT(!FLAG_precompiled_mode);
17881 return Smi::Value(raw_smi: Smi::RawCast(raw: untag()->catch_entry()));
17882}
17883void Code::set_num_variables(intptr_t num_variables) const {
17884 ASSERT(!FLAG_precompiled_mode);
17885 untag()->set_catch_entry(Smi::New(value: num_variables));
17886}
17887#endif
17888
17889#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
17890TypedDataPtr Code::catch_entry_moves_maps() const {
17891 ASSERT(FLAG_precompiled_mode);
17892 return TypedData::RawCast(untag()->catch_entry());
17893}
17894void Code::set_catch_entry_moves_maps(const TypedData& maps) const {
17895 ASSERT(FLAG_precompiled_mode);
17896 untag()->set_catch_entry(maps.ptr());
17897}
17898#endif
17899
17900void Code::set_deopt_info_array(const Array& array) const {
17901#if defined(DART_PRECOMPILED_RUNTIME)
17902 UNREACHABLE();
17903#else
17904 ASSERT(array.IsOld());
17905 untag()->set_deopt_info_array(array.ptr());
17906#endif
17907}
17908
17909void Code::set_static_calls_target_table(const Array& value) const {
17910#if defined(DART_PRECOMPILED_RUNTIME)
17911 UNREACHABLE();
17912#else
17913 untag()->set_static_calls_target_table(value.ptr());
17914#endif
17915#if defined(DEBUG)
17916 // Check that the table is sorted by pc offsets.
17917 // FlowGraphCompiler::AddStaticCallTarget adds pc-offsets to the table while
17918 // emitting assembly. This guarantees that every succeeding pc-offset is
17919 // larger than the previously added one.
17920 StaticCallsTable entries(value);
17921 const intptr_t count = entries.Length();
17922 for (intptr_t i = 0; i < count - 1; ++i) {
17923 auto left = Smi::Value(entries[i].Get<kSCallTableKindAndOffset>());
17924 auto right = Smi::Value(entries[i + 1].Get<kSCallTableKindAndOffset>());
17925 ASSERT(OffsetField::decode(left) < OffsetField::decode(right));
17926 }
17927#endif // DEBUG
17928}
17929
17930ObjectPoolPtr Code::GetObjectPool() const {
17931#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
17932 if (FLAG_precompiled_mode) {
17933 return IsolateGroup::Current()->object_store()->global_object_pool();
17934 }
17935#endif
17936 return object_pool();
17937}
17938
17939bool Code::HasBreakpoint() const {
17940#if defined(PRODUCT)
17941 return false;
17942#else
17943 return IsolateGroup::Current()->debugger()->HasBreakpointInCode(code: *this);
17944#endif
17945}
17946
17947TypedDataPtr Code::GetDeoptInfoAtPc(uword pc,
17948 ICData::DeoptReasonId* deopt_reason,
17949 uint32_t* deopt_flags) const {
17950#if defined(DART_PRECOMPILED_RUNTIME)
17951 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
17952 return TypedData::null();
17953#else
17954 ASSERT(is_optimized());
17955 const Instructions& instrs = Instructions::Handle(ptr: instructions());
17956 uword code_entry = instrs.PayloadStart();
17957 const Array& table = Array::Handle(ptr: deopt_info_array());
17958 if (table.IsNull()) {
17959 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
17960 return TypedData::null();
17961 }
17962 // Linear search for the PC offset matching the target PC.
17963 intptr_t length = DeoptTable::GetLength(table);
17964 Smi& offset = Smi::Handle();
17965 Smi& reason_and_flags = Smi::Handle();
17966 TypedData& info = TypedData::Handle();
17967 for (intptr_t i = 0; i < length; ++i) {
17968 DeoptTable::GetEntry(table, index: i, offset: &offset, info: &info, reason_and_flags: &reason_and_flags);
17969 if (pc == (code_entry + offset.Value())) {
17970 ASSERT(!info.IsNull());
17971 *deopt_reason = DeoptTable::ReasonField::decode(value: reason_and_flags.Value());
17972 *deopt_flags = DeoptTable::FlagsField::decode(value: reason_and_flags.Value());
17973 return info.ptr();
17974 }
17975 }
17976 *deopt_reason = ICData::kDeoptUnknown;
17977 return TypedData::null();
17978#endif // defined(DART_PRECOMPILED_RUNTIME)
17979}
17980
17981intptr_t Code::BinarySearchInSCallTable(uword pc) const {
17982#if defined(DART_PRECOMPILED_RUNTIME)
17983 UNREACHABLE();
17984#else
17985 NoSafepointScope no_safepoint;
17986 const Array& table = Array::Handle(ptr: untag()->static_calls_target_table());
17987 StaticCallsTable entries(table);
17988 const intptr_t pc_offset = pc - PayloadStart();
17989 intptr_t imin = 0;
17990 intptr_t imax = (table.Length() / kSCallTableEntryLength) - 1;
17991 while (imax >= imin) {
17992 const intptr_t imid = imin + (imax - imin) / 2;
17993 const auto offset = OffsetField::decode(
17994 value: Smi::Value(raw_smi: entries[imid].Get<kSCallTableKindAndOffset>()));
17995 if (offset < pc_offset) {
17996 imin = imid + 1;
17997 } else if (offset > pc_offset) {
17998 imax = imid - 1;
17999 } else {
18000 return imid;
18001 }
18002 }
18003#endif
18004 return -1;
18005}
18006
18007FunctionPtr Code::GetStaticCallTargetFunctionAt(uword pc) const {
18008#if defined(DART_PRECOMPILED_RUNTIME)
18009 UNREACHABLE();
18010 return Function::null();
18011#else
18012 const intptr_t i = BinarySearchInSCallTable(pc);
18013 if (i < 0) {
18014 return Function::null();
18015 }
18016 const Array& array = Array::Handle(ptr: untag()->static_calls_target_table());
18017 StaticCallsTable entries(array);
18018 return entries[i].Get<kSCallTableFunctionTarget>();
18019#endif
18020}
18021
18022void Code::SetStaticCallTargetCodeAt(uword pc, const Code& code) const {
18023#if defined(DART_PRECOMPILED_RUNTIME)
18024 UNREACHABLE();
18025#else
18026 const intptr_t i = BinarySearchInSCallTable(pc);
18027 ASSERT(i >= 0);
18028 const Array& array = Array::Handle(ptr: untag()->static_calls_target_table());
18029 StaticCallsTable entries(array);
18030 ASSERT(code.IsNull() ||
18031 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
18032 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
18033#endif
18034}
18035
18036void Code::SetStubCallTargetCodeAt(uword pc, const Code& code) const {
18037#if defined(DART_PRECOMPILED_RUNTIME)
18038 UNREACHABLE();
18039#else
18040 const intptr_t i = BinarySearchInSCallTable(pc);
18041 ASSERT(i >= 0);
18042 const Array& array = Array::Handle(ptr: untag()->static_calls_target_table());
18043 StaticCallsTable entries(array);
18044#if defined(DEBUG)
18045 if (entries[i].Get<kSCallTableFunctionTarget>() == Function::null()) {
18046 ASSERT(!code.IsNull() && Object::Handle(code.owner()).IsClass());
18047 } else {
18048 ASSERT(code.IsNull() ||
18049 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
18050 }
18051#endif
18052 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
18053#endif
18054}
18055
18056void Code::Disassemble(DisassemblyFormatter* formatter) const {
18057#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
18058 if (!FLAG_support_disassembler) {
18059 return;
18060 }
18061 const uword start = PayloadStart();
18062 if (formatter == nullptr) {
18063 Disassembler::Disassemble(start, end: start + Size(), code: *this);
18064 } else {
18065 Disassembler::Disassemble(start, end: start + Size(), formatter, code: *this);
18066 }
18067#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
18068}
18069
18070#if defined(INCLUDE_IL_PRINTER)
18071#if defined(PRODUCT)
18072// In PRODUCT builds we don't have space in Code object to store code comments
18073// so we move them into malloced heap (and leak them). This functionality
18074// is only intended to be used in AOT compiler so leaking is fine.
18075class MallocCodeComments final : public CodeComments {
18076 public:
18077 explicit MallocCodeComments(const CodeComments& comments)
18078 : length_(comments.Length()), comments_(new Comment[comments.Length()]) {
18079 for (intptr_t i = 0; i < length_; i++) {
18080 comments_[i].pc_offset = comments.PCOffsetAt(i);
18081 comments_[i].comment =
18082 Utils::CreateCStringUniquePtr(Utils::StrDup(comments.CommentAt(i)));
18083 }
18084 }
18085
18086 intptr_t Length() const override { return length_; }
18087
18088 intptr_t PCOffsetAt(intptr_t i) const override {
18089 return comments_[i].pc_offset;
18090 }
18091
18092 const char* CommentAt(intptr_t i) const override {
18093 return comments_[i].comment.get();
18094 }
18095
18096 private:
18097 struct Comment {
18098 intptr_t pc_offset;
18099 Utils::CStringUniquePtr comment{nullptr, std::free};
18100 };
18101
18102 intptr_t length_;
18103 std::unique_ptr<Comment[]> comments_;
18104};
18105#endif
18106
18107const CodeComments& Code::comments() const {
18108#if defined(PRODUCT)
18109 auto comments =
18110 static_cast<CodeComments*>(Thread::Current()->heap()->GetPeer(ptr()));
18111 return (comments != nullptr) ? *comments : Code::Comments::New(0);
18112#else
18113 return *new Code::Comments(Array::Handle(ptr: untag()->comments()));
18114#endif
18115}
18116
18117void Code::set_comments(const CodeComments& comments) const {
18118#if !defined(PRODUCT)
18119 auto& wrapper = static_cast<const Code::Comments&>(comments);
18120 ASSERT(wrapper.comments_.IsOld());
18121 untag()->set_comments(wrapper.comments_.ptr());
18122#else
18123 if (FLAG_code_comments && comments.Length() > 0) {
18124 Thread::Current()->heap()->SetPeer(ptr(), new MallocCodeComments(comments));
18125 } else {
18126 Thread::Current()->heap()->SetPeer(ptr(), nullptr);
18127 }
18128#endif
18129}
18130#endif // defined(INCLUDE_IL_PRINTER)
18131
18132void Code::SetPrologueOffset(intptr_t offset) const {
18133#if defined(PRODUCT)
18134 UNREACHABLE();
18135#else
18136 ASSERT(offset >= 0);
18137 untag()->set_return_address_metadata(Smi::New(value: offset));
18138#endif
18139}
18140
18141intptr_t Code::GetPrologueOffset() const {
18142#if defined(PRODUCT)
18143 UNREACHABLE();
18144 return -1;
18145#else
18146 const Object& object = Object::Handle(ptr: untag()->return_address_metadata());
18147 // In the future we may put something other than a smi in
18148 // |return_address_metadata_|.
18149 if (object.IsNull() || !object.IsSmi()) {
18150 return -1;
18151 }
18152 return Smi::Cast(obj: object).Value();
18153#endif
18154}
18155
18156ArrayPtr Code::inlined_id_to_function() const {
18157 return untag()->inlined_id_to_function();
18158}
18159
18160void Code::set_inlined_id_to_function(const Array& value) const {
18161 ASSERT(value.IsOld());
18162 untag()->set_inlined_id_to_function(value.ptr());
18163}
18164
18165CodePtr Code::New(intptr_t pointer_offsets_length) {
18166 if (pointer_offsets_length < 0 || pointer_offsets_length > kMaxElements) {
18167 // This should be caught before we reach here.
18168 FATAL("Fatal error in Code::New: invalid pointer_offsets_length %" Pd "\n",
18169 pointer_offsets_length);
18170 }
18171 ASSERT(Object::code_class() != Class::null());
18172 Code& result = Code::Handle();
18173 {
18174 auto raw = Object::Allocate<Code>(space: Heap::kOld, elements: pointer_offsets_length);
18175 NoSafepointScope no_safepoint;
18176 result = raw;
18177 ASSERT_EQUAL(result.untag()->state_bits_, 0);
18178 result.set_pointer_offsets_length(pointer_offsets_length);
18179 }
18180 DEBUG_ASSERT(result.compile_timestamp() == 0);
18181#if defined(INCLUDE_IL_PRINTER)
18182 result.set_comments(Comments::New(count: 0));
18183#endif
18184 result.set_pc_descriptors(Object::empty_descriptors());
18185 result.set_compressed_stackmaps(Object::empty_compressed_stackmaps());
18186 return result.ptr();
18187}
18188
18189#if !defined(DART_PRECOMPILED_RUNTIME)
18190CodePtr Code::FinalizeCodeAndNotify(const Function& function,
18191 FlowGraphCompiler* compiler,
18192 compiler::Assembler* assembler,
18193 PoolAttachment pool_attachment,
18194 bool optimized,
18195 CodeStatistics* stats) {
18196 auto thread = Thread::Current();
18197 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
18198
18199 const auto& code = Code::Handle(
18200 ptr: FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
18201 NotifyCodeObservers(function, code, optimized);
18202 return code.ptr();
18203}
18204
18205CodePtr Code::FinalizeCodeAndNotify(const char* name,
18206 FlowGraphCompiler* compiler,
18207 compiler::Assembler* assembler,
18208 PoolAttachment pool_attachment,
18209 bool optimized,
18210 CodeStatistics* stats) {
18211 auto thread = Thread::Current();
18212 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
18213
18214 const auto& code = Code::Handle(
18215 ptr: FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
18216 NotifyCodeObservers(name, code, optimized);
18217 return code.ptr();
18218}
18219
18220#if defined(DART_PRECOMPILER)
18221DECLARE_FLAG(charp, write_v8_snapshot_profile_to);
18222DECLARE_FLAG(charp, trace_precompiler_to);
18223#endif // defined(DART_PRECOMPILER)
18224
18225CodePtr Code::FinalizeCode(FlowGraphCompiler* compiler,
18226 compiler::Assembler* assembler,
18227 PoolAttachment pool_attachment,
18228 bool optimized,
18229 CodeStatistics* stats /* = nullptr */) {
18230 auto thread = Thread::Current();
18231 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
18232
18233 ASSERT(assembler != nullptr);
18234 ObjectPool& object_pool = ObjectPool::Handle();
18235
18236 if (pool_attachment == PoolAttachment::kAttachPool) {
18237 if (assembler->HasObjectPoolBuilder()) {
18238 object_pool =
18239 ObjectPool::NewFromBuilder(builder: assembler->object_pool_builder());
18240 } else {
18241 object_pool = ObjectPool::empty_object_pool().ptr();
18242 }
18243 } else {
18244#if defined(DART_PRECOMPILER)
18245 if (assembler->HasObjectPoolBuilder() &&
18246 assembler->object_pool_builder().HasParent()) {
18247 // We are not going to write this pool into snapshot, but we will use
18248 // it to emit references from this code object to other objects in the
18249 // snapshot that it uses.
18250 object_pool =
18251 ObjectPool::NewFromBuilder(assembler->object_pool_builder());
18252 }
18253#endif // defined(DART_PRECOMPILER)
18254 }
18255
18256 // Allocate the Code and Instructions objects. Code is allocated first
18257 // because a GC during allocation of the code will leave the instruction
18258 // pages read-only.
18259 intptr_t pointer_offset_count = assembler->CountPointerOffsets();
18260 Code& code = Code::ZoneHandle(ptr: Code::New(pointer_offsets_length: pointer_offset_count));
18261#ifdef TARGET_ARCH_IA32
18262 assembler->GetSelfHandle() = code.ptr();
18263#endif
18264 Instructions& instrs = Instructions::ZoneHandle(ptr: Instructions::New(
18265 size: assembler->CodeSize(), has_monomorphic_entry: assembler->has_monomorphic_entry()));
18266
18267 {
18268 // Important: if GC is triggered at any point between Instructions::New
18269 // and here it would write protect instructions object that we are trying
18270 // to fill in.
18271 NoSafepointScope no_safepoint;
18272
18273 // Copy the instructions into the instruction area and apply all fixups.
18274 // Embedded pointers are still in handles at this point.
18275 MemoryRegion region(reinterpret_cast<void*>(instrs.PayloadStart()),
18276 instrs.Size());
18277 assembler->FinalizeInstructions(region);
18278
18279 const auto& pointer_offsets = assembler->GetPointerOffsets();
18280 ASSERT(pointer_offsets.length() == pointer_offset_count);
18281 ASSERT(code.pointer_offsets_length() == pointer_offsets.length());
18282
18283 // Set pointer offsets list in Code object and resolve all handles in
18284 // the instruction stream to raw objects.
18285 for (intptr_t i = 0; i < pointer_offsets.length(); i++) {
18286 intptr_t offset_in_instrs = pointer_offsets[i];
18287 code.SetPointerOffsetAt(index: i, offset_in_instructions: offset_in_instrs);
18288 uword addr = region.start() + offset_in_instrs;
18289 ASSERT(instrs.PayloadStart() <= addr);
18290 ASSERT((instrs.PayloadStart() + instrs.Size()) > addr);
18291 const Object* object = LoadUnaligned(ptr: reinterpret_cast<Object**>(addr));
18292 ASSERT(object->IsOld());
18293 // N.B. The pointer is embedded in the Instructions object, but visited
18294 // through the Code object.
18295 code.StorePointerUnaligned(addr: reinterpret_cast<ObjectPtr*>(addr),
18296 value: object->ptr(), thread);
18297 }
18298
18299 // Write protect instructions and, if supported by OS, use dual mapping
18300 // for execution.
18301 if (FLAG_write_protect_code) {
18302 uword address = UntaggedObject::ToAddr(raw_obj: instrs.ptr());
18303 // Check if a dual mapping exists.
18304 instrs = Instructions::RawCast(raw: Page::ToExecutable(obj: instrs.ptr()));
18305 uword exec_address = UntaggedObject::ToAddr(raw_obj: instrs.ptr());
18306 const bool use_dual_mapping = exec_address != address;
18307 ASSERT(use_dual_mapping == FLAG_dual_map_code);
18308
18309 // When dual mapping is enabled the executable mapping is RX from the
18310 // point of allocation and never changes protection.
18311 // Yet the writable mapping is still turned back from RW to R.
18312 if (use_dual_mapping) {
18313 VirtualMemory::Protect(address: reinterpret_cast<void*>(address),
18314 size: instrs.ptr()->untag()->HeapSize(),
18315 mode: VirtualMemory::kReadOnly);
18316 address = exec_address;
18317 } else {
18318 // If dual mapping is disabled and we write protect then we have to
18319 // change the single mapping from RW -> RX.
18320 VirtualMemory::Protect(address: reinterpret_cast<void*>(address),
18321 size: instrs.ptr()->untag()->HeapSize(),
18322 mode: VirtualMemory::kReadExecute);
18323 }
18324 }
18325
18326 // Hook up Code and Instructions objects.
18327 const uword unchecked_offset = assembler->UncheckedEntryOffset();
18328 code.SetActiveInstructions(instructions: instrs, unchecked_offset);
18329 code.set_instructions(instrs);
18330 NOT_IN_PRECOMPILED(code.set_unchecked_offset(unchecked_offset));
18331 code.set_is_alive(true);
18332
18333 // Set object pool in Instructions object.
18334 if (!object_pool.IsNull()) {
18335 code.set_object_pool(object_pool.ptr());
18336 }
18337
18338#if defined(DART_PRECOMPILER)
18339 if (stats != nullptr) {
18340 stats->Finalize();
18341 instrs.set_stats(stats);
18342 }
18343#endif
18344
18345 CPU::FlushICache(start: instrs.PayloadStart(), size: instrs.Size());
18346 }
18347
18348#if defined(INCLUDE_IL_PRINTER)
18349 code.set_comments(CreateCommentsFrom(assembler));
18350#endif // defined(INCLUDE_IL_PRINTER)
18351
18352#ifndef PRODUCT
18353 code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
18354 if (assembler->prologue_offset() >= 0) {
18355 code.SetPrologueOffset(assembler->prologue_offset());
18356 } else {
18357 // No prologue was ever entered, optimistically assume nothing was ever
18358 // pushed onto the stack.
18359 code.SetPrologueOffset(assembler->CodeSize());
18360 }
18361#endif
18362 return code.ptr();
18363}
18364
18365void Code::NotifyCodeObservers(const Code& code, bool optimized) {
18366#if !defined(PRODUCT)
18367 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18368 if (CodeObservers::AreActive()) {
18369 if (code.IsFunctionCode()) {
18370 const auto& function = Function::Handle(ptr: code.function());
18371 if (!function.IsNull()) {
18372 return NotifyCodeObservers(function, code, optimized);
18373 }
18374 }
18375 NotifyCodeObservers(name: code.Name(), code, optimized);
18376 }
18377#endif
18378}
18379
18380void Code::NotifyCodeObservers(const Function& function,
18381 const Code& code,
18382 bool optimized) {
18383#if !defined(PRODUCT)
18384 ASSERT(!function.IsNull());
18385 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18386 // Calling ToLibNamePrefixedQualifiedCString is very expensive,
18387 // try to avoid it.
18388 if (CodeObservers::AreActive()) {
18389 const char* name = function.ToLibNamePrefixedQualifiedCString();
18390 NotifyCodeObservers(name, code, optimized);
18391 }
18392#endif
18393}
18394
18395void Code::NotifyCodeObservers(const char* name,
18396 const Code& code,
18397 bool optimized) {
18398#if !defined(PRODUCT)
18399 ASSERT(name != nullptr);
18400 ASSERT(!code.IsNull());
18401 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18402 if (CodeObservers::AreActive()) {
18403 const auto& instrs = Instructions::Handle(ptr: code.instructions());
18404 CodeObservers::NotifyAll(name, base: instrs.PayloadStart(),
18405 prologue_offset: code.GetPrologueOffset(), size: instrs.Size(), optimized,
18406 comments: &code.comments());
18407 }
18408#endif
18409}
18410#endif // !defined(DART_PRECOMPILED_RUNTIME)
18411
18412CodePtr Code::FindCode(uword pc, int64_t timestamp) {
18413 class SlowFindCodeVisitor : public ObjectVisitor {
18414 public:
18415 SlowFindCodeVisitor(uword pc, int64_t timestamp)
18416 : pc_(pc), timestamp_(timestamp), result_(Code::null()) {}
18417
18418 void VisitObject(ObjectPtr obj) {
18419 if (!obj->IsCode()) return;
18420 CodePtr code = static_cast<CodePtr>(obj);
18421 if (Code::PayloadStartOf(code) != pc_) return;
18422#if !defined(PRODUCT)
18423 if (code->untag()->compile_timestamp_ != timestamp_) return;
18424#endif
18425 ASSERT(result_ == Code::null());
18426 result_ = code;
18427 }
18428
18429 CodePtr result() const { return result_; }
18430
18431 private:
18432 uword pc_;
18433 int64_t timestamp_;
18434 CodePtr result_;
18435 };
18436
18437 HeapIterationScope iteration(Thread::Current());
18438 SlowFindCodeVisitor visitor(pc, timestamp);
18439 iteration.IterateVMIsolateObjects(visitor: &visitor);
18440 iteration.IterateOldObjectsNoImagePages(visitor: &visitor);
18441 return visitor.result();
18442}
18443
18444TokenPosition Code::GetTokenIndexOfPC(uword pc) const {
18445 uword pc_offset = pc - PayloadStart();
18446 const PcDescriptors& descriptors = PcDescriptors::Handle(ptr: pc_descriptors());
18447 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
18448 while (iter.MoveNext()) {
18449 if (iter.PcOffset() == pc_offset) {
18450 return iter.TokenPos();
18451 }
18452 }
18453 return TokenPosition::kNoSource;
18454}
18455
18456uword Code::GetPcForDeoptId(intptr_t deopt_id,
18457 UntaggedPcDescriptors::Kind kind) const {
18458 const PcDescriptors& descriptors = PcDescriptors::Handle(ptr: pc_descriptors());
18459 PcDescriptors::Iterator iter(descriptors, kind);
18460 while (iter.MoveNext()) {
18461 if (iter.DeoptId() == deopt_id) {
18462 uword pc_offset = iter.PcOffset();
18463 uword pc = PayloadStart() + pc_offset;
18464 ASSERT(ContainsInstructionAt(pc));
18465 return pc;
18466 }
18467 }
18468 return 0;
18469}
18470
18471intptr_t Code::GetDeoptIdForOsr(uword pc) const {
18472 uword pc_offset = pc - PayloadStart();
18473 const PcDescriptors& descriptors = PcDescriptors::Handle(ptr: pc_descriptors());
18474 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kOsrEntry);
18475 while (iter.MoveNext()) {
18476 if (iter.PcOffset() == pc_offset) {
18477 return iter.DeoptId();
18478 }
18479 }
18480 return DeoptId::kNone;
18481}
18482
18483const char* Code::ToCString() const {
18484 return OS::SCreate(zone: Thread::Current()->zone(), format: "Code(%s)",
18485 QualifiedName(params: NameFormattingParams(
18486 kScrubbedName, NameDisambiguation::kYes)));
18487}
18488
18489uint32_t Code::Hash() const {
18490 // PayloadStart() is a tempting hash as Instructions are not moved by the
18491 // compactor, but Instructions are effectively moved between the process
18492 // creating an AppJIT/AOT snapshot and the process loading the snapshot.
18493 const Object& obj =
18494 Object::Handle(ptr: WeakSerializationReference::UnwrapIfTarget(obj: owner()));
18495 if (obj.IsClass()) {
18496 return Class::Cast(obj).Hash();
18497 } else if (obj.IsAbstractType()) {
18498 return AbstractType::Cast(obj).Hash();
18499 } else if (obj.IsFunction()) {
18500 return Function::Cast(obj).Hash();
18501 } else {
18502 // E.g., VM stub.
18503 return 42;
18504 }
18505}
18506
18507const char* Code::Name() const {
18508 Zone* zone = Thread::Current()->zone();
18509 if (IsStubCode()) {
18510 // Regular stub.
18511 const char* name = StubCode::NameOfStub(entry_point: EntryPoint());
18512 if (name == nullptr) {
18513 return "[unknown stub]"; // Not yet recorded.
18514 }
18515 return OS::SCreate(zone, format: "[Stub] %s", name);
18516 }
18517 const auto& obj =
18518 Object::Handle(zone, ptr: WeakSerializationReference::UnwrapIfTarget(obj: owner()));
18519 if (obj.IsClass()) {
18520 // Allocation stub.
18521 return OS::SCreate(zone, format: "[Stub] Allocate %s",
18522 Class::Cast(obj).ScrubbedNameCString());
18523 } else if (obj.IsAbstractType()) {
18524 // Type test stub.
18525 return OS::SCreate(zone, format: "[Stub] Type Test %s",
18526 AbstractType::Cast(obj).ToCString());
18527 } else {
18528 ASSERT(IsFunctionCode());
18529 // Dart function.
18530 const char* opt = is_optimized() ? "[Optimized]" : "[Unoptimized]";
18531 const char* function_name =
18532 obj.IsFunction()
18533 ? String::Handle(zone, ptr: Function::Cast(obj).UserVisibleName())
18534 .ToCString()
18535 : WeakSerializationReference::Cast(obj).ToCString();
18536 return OS::SCreate(zone, format: "%s %s", opt, function_name);
18537 }
18538}
18539
18540const char* Code::QualifiedName(const NameFormattingParams& params) const {
18541 Zone* zone = Thread::Current()->zone();
18542 const Object& obj =
18543 Object::Handle(zone, ptr: WeakSerializationReference::UnwrapIfTarget(obj: owner()));
18544 if (obj.IsFunction()) {
18545 ZoneTextBuffer printer(zone);
18546 printer.AddString(s: is_optimized() ? "[Optimized] " : "[Unoptimized] ");
18547 Function::Cast(obj).PrintName(params, printer: &printer);
18548 return printer.buffer();
18549 }
18550 return Name();
18551}
18552
18553bool Code::IsStubCode() const {
18554 // We should _not_ unwrap any possible WSRs here, as the null value is never
18555 // wrapped by a WSR.
18556 return owner() == Object::null();
18557}
18558
18559bool Code::IsAllocationStubCode() const {
18560 return OwnerClassId() == kClassCid;
18561}
18562
18563bool Code::IsTypeTestStubCode() const {
18564 auto const cid = OwnerClassId();
18565 return cid == kAbstractTypeCid || cid == kTypeCid ||
18566 cid == kFunctionTypeCid || cid == kRecordTypeCid ||
18567 cid == kTypeParameterCid;
18568}
18569
18570bool Code::IsFunctionCode() const {
18571 return OwnerClassId() == kFunctionCid;
18572}
18573
18574bool Code::IsUnknownDartCode(CodePtr code) {
18575 return StubCode::HasBeenInitialized() &&
18576 (code == StubCode::UnknownDartCode().ptr());
18577}
18578
18579void Code::DisableDartCode() const {
18580 GcSafepointOperationScope safepoint(Thread::Current());
18581 ASSERT(IsFunctionCode());
18582 ASSERT(instructions() == active_instructions());
18583 const Code& new_code = StubCode::FixCallersTarget();
18584 SetActiveInstructions(instructions: Instructions::Handle(ptr: new_code.instructions()),
18585 unchecked_offset: new_code.UncheckedEntryPointOffset());
18586}
18587
18588void Code::DisableStubCode(bool is_cls_parameterized) const {
18589 GcSafepointOperationScope safepoint(Thread::Current());
18590 ASSERT(IsAllocationStubCode());
18591 ASSERT(instructions() == active_instructions());
18592 const Code& new_code = is_cls_parameterized
18593 ? StubCode::FixParameterizedAllocationStubTarget()
18594 : StubCode::FixAllocationStubTarget();
18595 SetActiveInstructions(instructions: Instructions::Handle(ptr: new_code.instructions()),
18596 unchecked_offset: new_code.UncheckedEntryPointOffset());
18597}
18598
18599void Code::InitializeCachedEntryPointsFrom(CodePtr code,
18600 InstructionsPtr instructions,
18601 uint32_t unchecked_offset) {
18602 NoSafepointScope _;
18603 const uword entry_point = Instructions::EntryPoint(instr: instructions);
18604 const uword monomorphic_entry_point =
18605 Instructions::MonomorphicEntryPoint(instr: instructions);
18606 code->untag()->entry_point_ = entry_point;
18607 code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
18608 code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
18609 code->untag()->monomorphic_unchecked_entry_point_ =
18610 monomorphic_entry_point + unchecked_offset;
18611}
18612
18613void Code::SetActiveInstructions(const Instructions& instructions,
18614 uint32_t unchecked_offset) const {
18615#if defined(DART_PRECOMPILED_RUNTIME)
18616 UNREACHABLE();
18617#else
18618 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
18619 SetActiveInstructionsSafe(instructions, unchecked_offset);
18620#endif
18621}
18622
18623void Code::SetActiveInstructionsSafe(const Instructions& instructions,
18624 uint32_t unchecked_offset) const {
18625#if defined(DART_PRECOMPILED_RUNTIME)
18626 UNREACHABLE();
18627#else
18628 // RawInstructions are never allocated in New space and hence a
18629 // store buffer update is not needed here.
18630 untag()->set_active_instructions(instructions.ptr());
18631 Code::InitializeCachedEntryPointsFrom(code: ptr(), instructions: instructions.ptr(),
18632 unchecked_offset);
18633#endif
18634}
18635
18636void Code::ResetActiveInstructions() const {
18637#if defined(DART_PRECOMPILED_RUNTIME)
18638 UNREACHABLE();
18639#else
18640 SetActiveInstructions(instructions: Instructions::Handle(ptr: instructions()),
18641 unchecked_offset: untag()->unchecked_offset_);
18642#endif
18643}
18644
18645void Code::GetInlinedFunctionsAtInstruction(
18646 intptr_t pc_offset,
18647 GrowableArray<const Function*>* functions,
18648 GrowableArray<TokenPosition>* token_positions) const {
18649 const CodeSourceMap& map = CodeSourceMap::Handle(ptr: code_source_map());
18650 if (map.IsNull()) {
18651 ASSERT(!IsFunctionCode());
18652 return; // VM stub, allocation stub, or type testing stub.
18653 }
18654 const Array& id_map = Array::Handle(ptr: inlined_id_to_function());
18655 const Function& root = Function::Handle(ptr: function());
18656 CodeSourceMapReader reader(map, id_map, root);
18657 reader.GetInlinedFunctionsAt(pc_offset, function_stack: functions, token_positions);
18658}
18659
18660#ifndef PRODUCT
18661void Code::PrintJSONInlineIntervals(JSONObject* jsobj) const {
18662 if (!is_optimized()) {
18663 return; // No inlining.
18664 }
18665 const CodeSourceMap& map = CodeSourceMap::Handle(ptr: code_source_map());
18666 const Array& id_map = Array::Handle(ptr: inlined_id_to_function());
18667 const Function& root = Function::Handle(ptr: function());
18668 CodeSourceMapReader reader(map, id_map, root);
18669 reader.PrintJSONInlineIntervals(jsobj);
18670}
18671#endif
18672
18673void Code::DumpInlineIntervals() const {
18674 const CodeSourceMap& map = CodeSourceMap::Handle(ptr: code_source_map());
18675 if (map.IsNull()) {
18676 // Stub code.
18677 return;
18678 }
18679 const Array& id_map = Array::Handle(ptr: inlined_id_to_function());
18680 const Function& root = Function::Handle(ptr: function());
18681 CodeSourceMapReader reader(map, id_map, root);
18682 reader.DumpInlineIntervals(start: PayloadStart());
18683}
18684
18685void Code::DumpSourcePositions(bool relative_addresses) const {
18686 const CodeSourceMap& map = CodeSourceMap::Handle(ptr: code_source_map());
18687 if (map.IsNull()) {
18688 // Stub code.
18689 return;
18690 }
18691 const Array& id_map = Array::Handle(ptr: inlined_id_to_function());
18692 const Function& root = Function::Handle(ptr: function());
18693 CodeSourceMapReader reader(map, id_map, root);
18694 reader.DumpSourcePositions(start: relative_addresses ? 0 : PayloadStart());
18695}
18696
18697intptr_t Context::GetLevel() const {
18698 intptr_t level = 0;
18699 Context& parent_ctx = Context::Handle(ptr: parent());
18700 while (!parent_ctx.IsNull()) {
18701 level++;
18702 parent_ctx = parent_ctx.parent();
18703 }
18704 return level;
18705}
18706
18707ContextPtr Context::New(intptr_t num_variables, Heap::Space space) {
18708 ASSERT(num_variables >= 0);
18709 ASSERT(Object::context_class() != Class::null());
18710
18711 if (!IsValidLength(len: num_variables)) {
18712 // This should be caught before we reach here.
18713 FATAL("Fatal error in Context::New: invalid num_variables %" Pd "\n",
18714 num_variables);
18715 }
18716 auto raw = Object::Allocate<Context>(space, elements: num_variables);
18717 NoSafepointScope no_safepoint;
18718 raw->untag()->num_variables_ = num_variables;
18719 return raw;
18720}
18721
18722const char* Context::ToCString() const {
18723 if (IsNull()) {
18724 return "Context: null";
18725 }
18726 Zone* zone = Thread::Current()->zone();
18727 const Context& parent_ctx = Context::Handle(ptr: parent());
18728 if (parent_ctx.IsNull()) {
18729 return zone->PrintToString(format: "Context num_variables: %" Pd "",
18730 num_variables());
18731 } else {
18732 const char* parent_str = parent_ctx.ToCString();
18733 return zone->PrintToString(format: "Context num_variables: %" Pd " parent:{ %s }",
18734 num_variables(), parent_str);
18735 }
18736}
18737
18738static void IndentN(int count) {
18739 for (int i = 0; i < count; i++) {
18740 THR_Print(" ");
18741 }
18742}
18743
18744void Context::Dump(int indent) const {
18745 if (IsNull()) {
18746 IndentN(count: indent);
18747 THR_Print("Context@null\n");
18748 return;
18749 }
18750
18751 IndentN(count: indent);
18752 THR_Print("Context vars(%" Pd ") {\n", num_variables());
18753 Object& obj = Object::Handle();
18754 for (intptr_t i = 0; i < num_variables(); i++) {
18755 IndentN(count: indent + 2);
18756 obj = At(context_index: i);
18757 const char* s = obj.ToCString();
18758 if (strlen(s: s) > 50) {
18759 THR_Print("[%" Pd "] = [first 50 chars:] %.50s...\n", i, s);
18760 } else {
18761 THR_Print("[%" Pd "] = %s\n", i, s);
18762 }
18763 }
18764
18765 const Context& parent_ctx = Context::Handle(ptr: parent());
18766 if (!parent_ctx.IsNull()) {
18767 parent_ctx.Dump(indent: indent + 2);
18768 }
18769 IndentN(count: indent);
18770 THR_Print("}\n");
18771}
18772
18773ContextScopePtr ContextScope::New(intptr_t num_variables, bool is_implicit) {
18774 ASSERT(Object::context_scope_class() != Class::null());
18775 if (num_variables < 0 || num_variables > kMaxElements) {
18776 // This should be caught before we reach here.
18777 FATAL("Fatal error in ContextScope::New: invalid num_variables %" Pd "\n",
18778 num_variables);
18779 }
18780 ContextScope& result = ContextScope::Handle();
18781 {
18782 auto raw = Object::Allocate<ContextScope>(space: Heap::kOld, elements: num_variables);
18783 NoSafepointScope no_safepoint;
18784 result = raw;
18785 result.set_num_variables(num_variables);
18786 }
18787 result.set_is_implicit(is_implicit);
18788 return result.ptr();
18789}
18790
18791TokenPosition ContextScope::TokenIndexAt(intptr_t scope_index) const {
18792 return TokenPosition::Deserialize(
18793 value: Smi::Value(raw_smi: untag()->token_pos_at(index: scope_index)));
18794}
18795
18796void ContextScope::SetTokenIndexAt(intptr_t scope_index,
18797 TokenPosition token_pos) const {
18798 untag()->set_token_pos_at(index: scope_index, value: Smi::New(value: token_pos.Serialize()));
18799}
18800
18801TokenPosition ContextScope::DeclarationTokenIndexAt(
18802 intptr_t scope_index) const {
18803 return TokenPosition::Deserialize(
18804 value: Smi::Value(raw_smi: untag()->declaration_token_pos_at(index: scope_index)));
18805}
18806
18807void ContextScope::SetDeclarationTokenIndexAt(
18808 intptr_t scope_index,
18809 TokenPosition declaration_token_pos) const {
18810 untag()->set_declaration_token_pos_at(
18811 index: scope_index, value: Smi::New(value: declaration_token_pos.Serialize()));
18812}
18813
18814StringPtr ContextScope::NameAt(intptr_t scope_index) const {
18815 return untag()->name_at(index: scope_index);
18816}
18817
18818void ContextScope::SetNameAt(intptr_t scope_index, const String& name) const {
18819 untag()->set_name_at(index: scope_index, value: name.ptr());
18820}
18821
18822void ContextScope::ClearFlagsAt(intptr_t scope_index) const {
18823 untag()->set_flags_at(index: scope_index, value: Smi::New(value: 0));
18824}
18825
18826bool ContextScope::GetFlagAt(intptr_t scope_index, intptr_t bit_index) const {
18827 const intptr_t mask = 1 << bit_index;
18828 return (Smi::Value(raw_smi: untag()->flags_at(index: scope_index)) & mask) != 0;
18829}
18830
18831void ContextScope::SetFlagAt(intptr_t scope_index,
18832 intptr_t bit_index,
18833 bool value) const {
18834 const intptr_t mask = 1 << bit_index;
18835 intptr_t flags = Smi::Value(raw_smi: untag()->flags_at(index: scope_index));
18836 untag()->set_flags_at(index: scope_index,
18837 value: Smi::New(value: value ? flags | mask : flags & ~mask));
18838}
18839
18840#define DEFINE_FLAG_ACCESSORS(Name) \
18841 bool ContextScope::Is##Name##At(intptr_t scope_index) const { \
18842 return GetFlagAt(scope_index, \
18843 UntaggedContextScope::VariableDesc::kIs##Name); \
18844 } \
18845 \
18846 void ContextScope::SetIs##Name##At(intptr_t scope_index, bool value) const { \
18847 SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIs##Name, \
18848 value); \
18849 }
18850
18851CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(DEFINE_FLAG_ACCESSORS)
18852#undef DEFINE_FLAG_ACCESSORS
18853
18854intptr_t ContextScope::LateInitOffsetAt(intptr_t scope_index) const {
18855 return Smi::Value(raw_smi: untag()->late_init_offset_at(index: scope_index));
18856}
18857
18858void ContextScope::SetLateInitOffsetAt(intptr_t scope_index,
18859 intptr_t late_init_offset) const {
18860 untag()->set_late_init_offset_at(index: scope_index, value: Smi::New(value: late_init_offset));
18861}
18862
18863AbstractTypePtr ContextScope::TypeAt(intptr_t scope_index) const {
18864 ASSERT(!IsConstAt(scope_index));
18865 return untag()->type_at(index: scope_index);
18866}
18867
18868void ContextScope::SetTypeAt(intptr_t scope_index,
18869 const AbstractType& type) const {
18870 untag()->set_type_at(index: scope_index, value: type.ptr());
18871}
18872
18873InstancePtr ContextScope::ConstValueAt(intptr_t scope_index) const {
18874 ASSERT(IsConstAt(scope_index));
18875 return untag()->value_at(index: scope_index);
18876}
18877
18878void ContextScope::SetConstValueAt(intptr_t scope_index,
18879 const Instance& value) const {
18880 ASSERT(IsConstAt(scope_index));
18881 untag()->set_value_at(index: scope_index, value: value.ptr());
18882}
18883
18884intptr_t ContextScope::ContextIndexAt(intptr_t scope_index) const {
18885 return Smi::Value(raw_smi: untag()->context_index_at(index: scope_index));
18886}
18887
18888void ContextScope::SetContextIndexAt(intptr_t scope_index,
18889 intptr_t context_index) const {
18890 untag()->set_context_index_at(index: scope_index, value: Smi::New(value: context_index));
18891}
18892
18893intptr_t ContextScope::ContextLevelAt(intptr_t scope_index) const {
18894 return Smi::Value(raw_smi: untag()->context_level_at(index: scope_index));
18895}
18896
18897void ContextScope::SetContextLevelAt(intptr_t scope_index,
18898 intptr_t context_level) const {
18899 untag()->set_context_level_at(index: scope_index, value: Smi::New(value: context_level));
18900}
18901
18902intptr_t ContextScope::KernelOffsetAt(intptr_t scope_index) const {
18903 return Smi::Value(raw_smi: untag()->kernel_offset_at(index: scope_index));
18904}
18905
18906void ContextScope::SetKernelOffsetAt(intptr_t scope_index,
18907 intptr_t kernel_offset) const {
18908 untag()->set_kernel_offset_at(index: scope_index, value: Smi::New(value: kernel_offset));
18909}
18910
18911const char* ContextScope::ToCString() const {
18912 const char* prev_cstr = "ContextScope:";
18913 String& name = String::Handle();
18914 for (int i = 0; i < num_variables(); i++) {
18915 name = NameAt(scope_index: i);
18916 const char* cname = name.ToCString();
18917 TokenPosition pos = TokenIndexAt(scope_index: i);
18918 intptr_t idx = ContextIndexAt(scope_index: i);
18919 intptr_t lvl = ContextLevelAt(scope_index: i);
18920 char* chars =
18921 OS::SCreate(zone: Thread::Current()->zone(),
18922 format: "%s\nvar %s token-pos %s ctx lvl %" Pd " index %" Pd "",
18923 prev_cstr, cname, pos.ToCString(), lvl, idx);
18924 prev_cstr = chars;
18925 }
18926 return prev_cstr;
18927}
18928
18929SentinelPtr Sentinel::New() {
18930 return Object::Allocate<Sentinel>(space: Heap::kOld);
18931}
18932
18933const char* Sentinel::ToCString() const {
18934 if (ptr() == Object::sentinel().ptr()) {
18935 return "sentinel";
18936 } else if (ptr() == Object::transition_sentinel().ptr()) {
18937 return "transition_sentinel";
18938 } else if (ptr() == Object::unknown_constant().ptr()) {
18939 return "unknown_constant";
18940 } else if (ptr() == Object::non_constant().ptr()) {
18941 return "non_constant";
18942 } else if (ptr() == Object::optimized_out().ptr()) {
18943 return "<optimized out>";
18944 }
18945 return "Sentinel(unknown)";
18946}
18947
18948ArrayPtr MegamorphicCache::buckets() const {
18949 return untag()->buckets();
18950}
18951
18952void MegamorphicCache::set_buckets(const Array& buckets) const {
18953 untag()->set_buckets(buckets.ptr());
18954}
18955
18956// Class IDs in the table are smi-tagged, so we use a smi-tagged mask
18957// and target class ID to avoid untagging (on each iteration of the
18958// test loop) in generated code.
18959intptr_t MegamorphicCache::mask() const {
18960 return Smi::Value(raw_smi: untag()->mask());
18961}
18962
18963void MegamorphicCache::set_mask(intptr_t mask) const {
18964 untag()->set_mask(Smi::New(value: mask));
18965}
18966
18967intptr_t MegamorphicCache::filled_entry_count() const {
18968 return untag()->filled_entry_count_;
18969}
18970
18971void MegamorphicCache::set_filled_entry_count(intptr_t count) const {
18972 StoreNonPointer(addr: &untag()->filled_entry_count_, value: count);
18973}
18974
18975MegamorphicCachePtr MegamorphicCache::New() {
18976 return Object::Allocate<MegamorphicCache>(space: Heap::kOld);
18977}
18978
18979MegamorphicCachePtr MegamorphicCache::New(const String& target_name,
18980 const Array& arguments_descriptor) {
18981 auto* const zone = Thread::Current()->zone();
18982 const auto& result = MegamorphicCache::Handle(
18983 zone, ptr: Object::Allocate<MegamorphicCache>(space: Heap::kOld));
18984 const intptr_t capacity = kInitialCapacity;
18985 const Array& buckets =
18986 Array::Handle(zone, ptr: Array::New(len: kEntryLength * capacity, space: Heap::kOld));
18987 const Object& handler = Object::Handle(zone);
18988 for (intptr_t i = 0; i < capacity; ++i) {
18989 SetEntry(array: buckets, index: i, class_id: smi_illegal_cid(), target: handler);
18990 }
18991 result.set_buckets(buckets);
18992 result.set_mask(capacity - 1);
18993 result.set_target_name(target_name);
18994 result.set_arguments_descriptor(arguments_descriptor);
18995 result.set_filled_entry_count(0);
18996 return result.ptr();
18997}
18998
18999void MegamorphicCache::EnsureContains(const Smi& class_id,
19000 const Object& target) const {
19001 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
19002
19003 if (LookupLocked(class_id) == Object::null()) {
19004 InsertLocked(class_id, target);
19005 }
19006
19007#if defined(DEBUG)
19008 ASSERT(LookupLocked(class_id) == target.ptr());
19009#endif // define(DEBUG)
19010}
19011
19012ObjectPtr MegamorphicCache::Lookup(const Smi& class_id) const {
19013 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
19014 return LookupLocked(class_id);
19015}
19016
19017ObjectPtr MegamorphicCache::LookupLocked(const Smi& class_id) const {
19018 auto thread = Thread::Current();
19019 auto isolate_group = thread->isolate_group();
19020 auto zone = thread->zone();
19021 ASSERT(thread->IsDartMutatorThread());
19022 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
19023
19024 const auto& backing_array = Array::Handle(zone, ptr: buckets());
19025 intptr_t id_mask = mask();
19026 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
19027 intptr_t i = index;
19028 do {
19029 const classid_t current_cid =
19030 Smi::Value(raw_smi: Smi::RawCast(raw: GetClassId(array: backing_array, index: i)));
19031 if (current_cid == class_id.Value()) {
19032 return GetTargetFunction(array: backing_array, index: i);
19033 } else if (current_cid == kIllegalCid) {
19034 return Object::null();
19035 }
19036 i = (i + 1) & id_mask;
19037 } while (i != index);
19038 UNREACHABLE();
19039}
19040
19041void MegamorphicCache::InsertLocked(const Smi& class_id,
19042 const Object& target) const {
19043 auto isolate_group = IsolateGroup::Current();
19044 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
19045
19046 // As opposed to ICData we are stopping mutator threads from other isolates
19047 // while modifying the megamorphic cache, since updates are not atomic.
19048 //
19049 // NOTE: In the future we might change the megamorphic cache insertions to
19050 // carefully use store-release barriers on the writer as well as
19051 // load-acquire barriers on the reader, ...
19052 isolate_group->RunWithStoppedMutators(
19053 function: [&]() {
19054 EnsureCapacityLocked();
19055 InsertEntryLocked(class_id, target);
19056 },
19057 /*use_force_growth=*/true);
19058}
19059
19060void MegamorphicCache::EnsureCapacityLocked() const {
19061 auto thread = Thread::Current();
19062 auto zone = thread->zone();
19063 auto isolate_group = thread->isolate_group();
19064 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
19065
19066 intptr_t old_capacity = mask() + 1;
19067 double load_limit = kLoadFactor * static_cast<double>(old_capacity);
19068 if (static_cast<double>(filled_entry_count() + 1) > load_limit) {
19069 const Array& old_buckets = Array::Handle(zone, ptr: buckets());
19070 intptr_t new_capacity = old_capacity * 2;
19071 const Array& new_buckets =
19072 Array::Handle(zone, ptr: Array::New(len: kEntryLength * new_capacity));
19073
19074 auto& target = Object::Handle(zone);
19075 for (intptr_t i = 0; i < new_capacity; ++i) {
19076 SetEntry(array: new_buckets, index: i, class_id: smi_illegal_cid(), target);
19077 }
19078 set_buckets(new_buckets);
19079 set_mask(new_capacity - 1);
19080 set_filled_entry_count(0);
19081
19082 // Rehash the valid entries.
19083 Smi& class_id = Smi::Handle(zone);
19084 for (intptr_t i = 0; i < old_capacity; ++i) {
19085 class_id ^= GetClassId(array: old_buckets, index: i);
19086 if (class_id.Value() != kIllegalCid) {
19087 target = GetTargetFunction(array: old_buckets, index: i);
19088 InsertEntryLocked(class_id, target);
19089 }
19090 }
19091 }
19092}
19093
19094void MegamorphicCache::InsertEntryLocked(const Smi& class_id,
19095 const Object& target) const {
19096 auto thread = Thread::Current();
19097 auto isolate_group = thread->isolate_group();
19098 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
19099
19100 ASSERT(Thread::Current()->IsDartMutatorThread());
19101 ASSERT(static_cast<double>(filled_entry_count() + 1) <=
19102 (kLoadFactor * static_cast<double>(mask() + 1)));
19103 const Array& backing_array = Array::Handle(ptr: buckets());
19104 intptr_t id_mask = mask();
19105 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
19106 intptr_t i = index;
19107 do {
19108 if (Smi::Value(raw_smi: Smi::RawCast(raw: GetClassId(array: backing_array, index: i))) == kIllegalCid) {
19109 SetEntry(array: backing_array, index: i, class_id, target);
19110 set_filled_entry_count(filled_entry_count() + 1);
19111 return;
19112 }
19113 i = (i + 1) & id_mask;
19114 } while (i != index);
19115 UNREACHABLE();
19116}
19117
19118const char* MegamorphicCache::ToCString() const {
19119 const String& name = String::Handle(ptr: target_name());
19120 return OS::SCreate(zone: Thread::Current()->zone(), format: "MegamorphicCache(%s)",
19121 name.ToCString());
19122}
19123
19124SubtypeTestCachePtr SubtypeTestCache::New(intptr_t num_inputs) {
19125 ASSERT(Object::subtypetestcache_class() != Class::null());
19126 ASSERT(num_inputs >= 1);
19127 ASSERT(num_inputs <= kMaxInputs);
19128 // SubtypeTestCache objects are long living objects, allocate them in the
19129 // old generation.
19130 const auto& result =
19131 SubtypeTestCache::Handle(ptr: Object::Allocate<SubtypeTestCache>(space: Heap::kOld));
19132 ASSERT_EQUAL(result.num_occupied(), 0);
19133 result.untag()->num_inputs_ = num_inputs;
19134 result.set_cache(Object::empty_subtype_test_cache_array());
19135 return result.ptr();
19136}
19137
19138ArrayPtr SubtypeTestCache::cache() const {
19139 return untag()->cache<std::memory_order_acquire>();
19140}
19141
19142void SubtypeTestCache::set_cache(const Array& value) const {
19143 // We have to ensure that initializing stores to the array are available
19144 // when releasing the pointer to the array pointer.
19145 // => We have to use store-release here.
19146 untag()->set_cache<std::memory_order_release>(value.ptr());
19147}
19148
19149void SubtypeTestCache::set_num_occupied(intptr_t value) const {
19150 ASSERT(Utils::IsUint(32, value));
19151 untag()->num_occupied_ = value;
19152}
19153
19154intptr_t SubtypeTestCache::NumberOfChecks() const {
19155 ASSERT(!IsNull());
19156 return num_occupied();
19157}
19158
19159intptr_t SubtypeTestCache::NumEntries() const {
19160 ASSERT(!IsNull());
19161 return Array::LengthOf(array: cache()) / kTestEntryLength;
19162}
19163
19164intptr_t SubtypeTestCache::NumEntries(const Array& array) {
19165 SubtypeTestCacheTable table(array);
19166 return table.Length();
19167}
19168
19169bool SubtypeTestCache::IsHash() const {
19170 if (IsNull()) return false;
19171 return Array::LengthOf(array: cache()) > kMaxLinearCacheSize;
19172}
19173
19174bool SubtypeTestCache::IsHash(const Array& array) {
19175 return array.Length() > kMaxLinearCacheSize;
19176}
19177
19178intptr_t SubtypeTestCache::AddCheck(
19179 const Object& instance_class_id_or_signature,
19180 const AbstractType& destination_type,
19181 const TypeArguments& instance_type_arguments,
19182 const TypeArguments& instantiator_type_arguments,
19183 const TypeArguments& function_type_arguments,
19184 const TypeArguments& instance_parent_function_type_arguments,
19185 const TypeArguments& instance_delayed_type_arguments,
19186 const Bool& test_result) const {
19187 ASSERT(Thread::Current()
19188 ->isolate_group()
19189 ->subtype_test_cache_mutex()
19190 ->IsOwnedByCurrentThread());
19191 ASSERT(!test_result.IsNull());
19192 ASSERT(Smi::New(kRecordCid) != instance_class_id_or_signature.ptr());
19193
19194 const intptr_t old_num = NumberOfChecks();
19195 Zone* const zone = Thread::Current()->zone();
19196 Array& data = Array::Handle(zone, ptr: cache());
19197 bool was_grown;
19198 data = EnsureCapacity(zone, array: data, new_capacity: old_num + 1, was_grown: &was_grown);
19199 ASSERT(data.ptr() != Object::empty_subtype_test_cache_array().ptr());
19200
19201 const auto& loc = FindKeyOrUnused(
19202 array: data, num_inputs: num_inputs(), instance_class_id_or_signature, destination_type,
19203 instance_type_arguments, instantiator_type_arguments,
19204 function_type_arguments, instance_parent_function_type_arguments,
19205 instance_delayed_type_arguments);
19206 SubtypeTestCacheTable entries(data);
19207 const auto& entry = entries[loc.entry];
19208 if (loc.present) {
19209 if (entry.Get<kTestResult>() != test_result.ptr()) {
19210 const auto& old_result = Bool::Handle(zone, ptr: entry.Get<kTestResult>());
19211 FATAL("Existing subtype test cache entry has result %s, not %s",
19212 old_result.ToCString(), test_result.ToCString());
19213 }
19214 return loc.entry;
19215 }
19216
19217 // Set the used elements in the entry in reverse order, so that the instance
19218 // cid or signature is last, then increment the number of entries.
19219 entry.Set<kTestResult>(test_result);
19220 switch (num_inputs()) {
19221 case 7:
19222 entry.Set<kDestinationType>(destination_type);
19223 FALL_THROUGH;
19224 case 6:
19225 entry.Set<kInstanceDelayedFunctionTypeArguments>(
19226 instance_delayed_type_arguments);
19227 FALL_THROUGH;
19228 case 5:
19229 entry.Set<kInstanceParentFunctionTypeArguments>(
19230 instance_parent_function_type_arguments);
19231 FALL_THROUGH;
19232 case 4:
19233 entry.Set<kFunctionTypeArguments>(function_type_arguments);
19234 FALL_THROUGH;
19235 case 3:
19236 entry.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19237 FALL_THROUGH;
19238 case 2:
19239 entry.Set<kInstanceTypeArguments>(instance_type_arguments);
19240 FALL_THROUGH;
19241 case 1:
19242 // If this is a new backing array, we don't need store-release barriers,
19243 // as no reader has access to the array until it is set as the backing
19244 // store (which is done with a store-release barrier).
19245 //
19246 // Otherwise, the instance cid or signature must be set last with a
19247 // store-release barrier, so concurrent readers can depend on a non-null
19248 // value meaning the rest of the entry is safe to load without barriers.
19249 if (was_grown) {
19250 entry.Set<kInstanceCidOrSignature>(instance_class_id_or_signature);
19251 } else {
19252 entry.Set<kInstanceCidOrSignature, std::memory_order_release>(
19253 instance_class_id_or_signature);
19254 }
19255 break;
19256 default:
19257 UNREACHABLE();
19258 }
19259 set_num_occupied(old_num + 1);
19260 if (was_grown) {
19261 set_cache(data);
19262 }
19263 return loc.entry;
19264}
19265
19266static inline bool SubtypeTestCacheEntryMatches(
19267 const SubtypeTestCacheTable::TupleView& t,
19268 intptr_t num_inputs,
19269 const Object& instance_class_id_or_signature,
19270 const AbstractType& destination_type,
19271 const TypeArguments& instance_type_arguments,
19272 const TypeArguments& instantiator_type_arguments,
19273 const TypeArguments& function_type_arguments,
19274 const TypeArguments& instance_parent_function_type_arguments,
19275 const TypeArguments& instance_delayed_type_arguments) {
19276 switch (num_inputs) {
19277 case 7:
19278 if (t.Get<SubtypeTestCache::kDestinationType>() !=
19279 destination_type.ptr()) {
19280 return false;
19281 }
19282 FALL_THROUGH;
19283 case 6:
19284 if (t.Get<SubtypeTestCache::kInstanceDelayedFunctionTypeArguments>() !=
19285 instance_delayed_type_arguments.ptr()) {
19286 return false;
19287 }
19288 FALL_THROUGH;
19289 case 5:
19290 if (t.Get<SubtypeTestCache::kInstanceParentFunctionTypeArguments>() !=
19291 instance_parent_function_type_arguments.ptr()) {
19292 return false;
19293 }
19294 FALL_THROUGH;
19295 case 4:
19296 if (t.Get<SubtypeTestCache::kFunctionTypeArguments>() !=
19297 function_type_arguments.ptr()) {
19298 return false;
19299 }
19300 FALL_THROUGH;
19301 case 3:
19302 if (t.Get<SubtypeTestCache::kInstantiatorTypeArguments>() !=
19303 instantiator_type_arguments.ptr()) {
19304 return false;
19305 }
19306 FALL_THROUGH;
19307 case 2:
19308 if (t.Get<SubtypeTestCache::kInstanceTypeArguments>() !=
19309 instance_type_arguments.ptr()) {
19310 return false;
19311 }
19312 FALL_THROUGH;
19313 case 1:
19314 // We don't need to perform load-acquire semantics when re-retrieving
19315 // the kInstanceCidOrSignature field, as this is performed only if the
19316 // entry is occupied, and occupied entries never change.
19317 return t.Get<SubtypeTestCache::kInstanceCidOrSignature>() ==
19318 instance_class_id_or_signature.ptr();
19319 default:
19320 UNREACHABLE();
19321 }
19322}
19323
19324SubtypeTestCache::KeyLocation SubtypeTestCache::FindKeyOrUnused(
19325 const Array& array,
19326 intptr_t num_inputs,
19327 const Object& instance_class_id_or_signature,
19328 const AbstractType& destination_type,
19329 const TypeArguments& instance_type_arguments,
19330 const TypeArguments& instantiator_type_arguments,
19331 const TypeArguments& function_type_arguments,
19332 const TypeArguments& instance_parent_function_type_arguments,
19333 const TypeArguments& instance_delayed_type_arguments) {
19334 // Fast case for empty STCs.
19335 if (array.ptr() == Object::empty_subtype_test_cache_array().ptr()) {
19336 return {.entry: 0, .present: false};
19337 }
19338 const bool is_hash = IsHash(array);
19339 SubtypeTestCacheTable table(array);
19340 const intptr_t num_entries = table.Length();
19341 // For a linear cache, start at the first entry and probe linearly. This can
19342 // be done because a linear cache always has at least one unoccupied entry
19343 // after all the occupied ones.
19344 intptr_t probe = 0;
19345 intptr_t probe_distance = 1;
19346 if (is_hash) {
19347 // For a hash-based cache, instead start at an entry determined by the hash
19348 // of the keys.
19349 //
19350 // If we have an instance cid, then just use that as our starting hash.
19351 uint32_t hash =
19352 instance_class_id_or_signature.IsFunctionType()
19353 ? FunctionType::Cast(obj: instance_class_id_or_signature).Hash()
19354 : Smi::Cast(obj: instance_class_id_or_signature).Value();
19355 switch (num_inputs) {
19356 case 7:
19357 hash = CombineHashes(hash, other_hash: destination_type.Hash());
19358 FALL_THROUGH;
19359 case 6:
19360 hash = CombineHashes(hash, other_hash: instance_delayed_type_arguments.Hash());
19361 FALL_THROUGH;
19362 case 5:
19363 hash =
19364 CombineHashes(hash, other_hash: instance_parent_function_type_arguments.Hash());
19365 FALL_THROUGH;
19366 case 4:
19367 hash = CombineHashes(hash, other_hash: function_type_arguments.Hash());
19368 FALL_THROUGH;
19369 case 3:
19370 hash = CombineHashes(hash, other_hash: instantiator_type_arguments.Hash());
19371 FALL_THROUGH;
19372 case 2:
19373 hash = CombineHashes(hash, other_hash: instance_type_arguments.Hash());
19374 FALL_THROUGH;
19375 case 1:
19376 break;
19377 default:
19378 UNREACHABLE();
19379 }
19380 hash = FinalizeHash(hash);
19381 probe = hash & (num_entries - 1);
19382 }
19383 while (true) {
19384 const auto& tuple = table.At(i: probe);
19385 if (tuple.Get<kInstanceCidOrSignature, std::memory_order_acquire>() ==
19386 Object::null()) {
19387 break;
19388 }
19389 if (SubtypeTestCacheEntryMatches(
19390 t: tuple, num_inputs, instance_class_id_or_signature, destination_type,
19391 instance_type_arguments, instantiator_type_arguments,
19392 function_type_arguments, instance_parent_function_type_arguments,
19393 instance_delayed_type_arguments)) {
19394 return {.entry: probe, .present: true};
19395 }
19396 // Advance probe by the current probing distance.
19397 probe = probe + probe_distance;
19398 if (is_hash) {
19399 // Wrap around if the probe goes off the end of the entries array.
19400 probe = probe & (num_entries - 1);
19401 // We had a collision, so increase the probe distance. See comment in
19402 // EnsureCapacityLocked for an explanation of how this hits all slots.
19403 probe_distance++;
19404 }
19405 }
19406 return {.entry: probe, .present: false};
19407}
19408
19409ArrayPtr SubtypeTestCache::EnsureCapacity(Zone* zone,
19410 const Array& array,
19411 intptr_t new_occupied,
19412 bool* was_grown) const {
19413 ASSERT(new_occupied > NumberOfChecks());
19414 ASSERT(was_grown != nullptr);
19415 // How many entries are in the current array (including unoccupied entries).
19416 const intptr_t current_capacity = NumEntries(array);
19417
19418 // Early returns for cases where no growth is needed.
19419 *was_grown = false;
19420 const bool is_linear = IsLinear(array);
19421 if (is_linear) {
19422 // We need at least one unoccupied entry in addition to the occupied ones.
19423 if (current_capacity > new_occupied) return array.ptr();
19424 } else {
19425 if (LoadFactor(occupied: new_occupied, capacity: current_capacity) < kMaxLoadFactor) {
19426 return array.ptr();
19427 }
19428 }
19429
19430 // Every path from here should result in a new backing array.
19431 *was_grown = true;
19432 // Initially null for initializing unoccupied entries.
19433 auto& instance_cid_or_signature = Object::Handle(zone);
19434 if (new_occupied <= kMaxLinearCacheEntries) {
19435 ASSERT(is_linear);
19436 // Not enough room for both the new entry and at least one unoccupied
19437 // entry, so grow the tuple capacity of the linear cache by about 50%,
19438 // ensuring that space for at least one new tuple is added, capping the
19439 // total number of occupied entries to the max allowed.
19440 const intptr_t new_capacity =
19441 Utils::Minimum(x: current_capacity + (current_capacity >> 1),
19442 y: kMaxLinearCacheEntries) +
19443 1;
19444 const intptr_t cache_size = new_capacity * kTestEntryLength;
19445 ASSERT(cache_size <= kMaxLinearCacheSize);
19446 const auto& new_data =
19447 Array::Handle(zone, ptr: Array::Grow(source: array, new_length: cache_size, space: Heap::kOld));
19448 ASSERT(!new_data.IsNull());
19449 // No need to adjust old entries, as they are copied over by Array::Grow.
19450 // Just mark any new entries as unoccupied.
19451 SubtypeTestCacheTable table(new_data);
19452 for (intptr_t i = current_capacity; i < new_capacity; i++) {
19453 const auto& tuple = table.At(i);
19454 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19455 }
19456 return new_data.ptr();
19457 }
19458
19459 // Either we're converting a linear cache into a hash-based cache, or the
19460 // load factor of the hash-based cache has increased to the point where we
19461 // need to grow it.
19462 const intptr_t new_capacity =
19463 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
19464 // Because we use quadratic (actually triangle number) probing it is
19465 // important that the size is a power of two (otherwise we could fail to
19466 // find an empty slot). This is described in Knuth's The Art of Computer
19467 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
19468 // appendix, 2nd edition).
19469 //
19470 // This is also important because when we do hash probing, we take the
19471 // calculated hash from the inputs and then calculate (hash % capacity) to get
19472 // the initial probe index. To ensure this is a fast calculation in the stubs,
19473 // we ensure the capacity is a power of 2, which allows (hash % capacity) to
19474 // be calculated as (hash & (capacity - 1)).
19475 ASSERT(Utils::IsPowerOfTwo(new_capacity));
19476 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
19477 const intptr_t new_size = new_capacity * kTestEntryLength;
19478 const auto& new_data =
19479 Array::Handle(zone, ptr: Array::NewUninitialized(len: new_size, space: Heap::kOld));
19480 ASSERT(!new_data.IsNull());
19481 // Mark all the entries in new_data as unoccupied.
19482 SubtypeTestCacheTable to_table(new_data);
19483 for (const auto& tuple : to_table) {
19484 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19485 }
19486 // Finally, copy over the entries.
19487 auto& destination_type = AbstractType::Handle(zone);
19488 auto& instance_type_arguments = TypeArguments::Handle(zone);
19489 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19490 auto& function_type_arguments = TypeArguments::Handle(zone);
19491 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19492 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19493 auto& test_result = Bool::Handle(zone);
19494 const SubtypeTestCacheTable from_table(array);
19495 const intptr_t used_inputs = num_inputs();
19496 for (intptr_t i = 0; i < current_capacity; i++) {
19497 const auto& from_tuple = from_table.At(i);
19498 // Skip unoccupied entries.
19499 if (from_tuple.Get<kInstanceCidOrSignature>() == Object::null()) continue;
19500 GetCheckFromArray(array, num_inputs: used_inputs, ix: i, instance_class_id_or_signature: &instance_cid_or_signature,
19501 destination_type: &destination_type, instance_type_arguments: &instance_type_arguments,
19502 instantiator_type_arguments: &instantiator_type_arguments, function_type_arguments: &function_type_arguments,
19503 instance_parent_function_type_arguments: &instance_parent_function_type_arguments,
19504 instance_delayed_type_arguments: &instance_delayed_type_arguments, test_result: &test_result);
19505 // Since new_data has a different total capacity, we can't use the old
19506 // entry indexes, but must recalculate them.
19507 auto loc = FindKeyOrUnused(
19508 array: new_data, num_inputs: used_inputs, instance_class_id_or_signature: instance_cid_or_signature, destination_type,
19509 instance_type_arguments, instantiator_type_arguments,
19510 function_type_arguments, instance_parent_function_type_arguments,
19511 instance_delayed_type_arguments);
19512 ASSERT(!loc.present);
19513 const auto& to_tuple = to_table.At(i: loc.entry);
19514 to_tuple.Set<kTestResult>(test_result);
19515 switch (used_inputs) {
19516 case 7:
19517 to_tuple.Set<kDestinationType>(destination_type);
19518 FALL_THROUGH;
19519 case 6:
19520 to_tuple.Set<kInstanceDelayedFunctionTypeArguments>(
19521 instance_delayed_type_arguments);
19522 FALL_THROUGH;
19523 case 5:
19524 to_tuple.Set<kInstanceParentFunctionTypeArguments>(
19525 instance_parent_function_type_arguments);
19526 FALL_THROUGH;
19527 case 4:
19528 to_tuple.Set<kFunctionTypeArguments>(function_type_arguments);
19529 FALL_THROUGH;
19530 case 3:
19531 to_tuple.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19532 FALL_THROUGH;
19533 case 2:
19534 to_tuple.Set<kInstanceTypeArguments>(instance_type_arguments);
19535 FALL_THROUGH;
19536 case 1:
19537 to_tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19538 break;
19539 default:
19540 UNREACHABLE();
19541 }
19542 }
19543 return new_data.ptr();
19544}
19545
19546void SubtypeTestCache::GetCheck(
19547 intptr_t ix,
19548 Object* instance_class_id_or_signature,
19549 AbstractType* destination_type,
19550 TypeArguments* instance_type_arguments,
19551 TypeArguments* instantiator_type_arguments,
19552 TypeArguments* function_type_arguments,
19553 TypeArguments* instance_parent_function_type_arguments,
19554 TypeArguments* instance_delayed_type_arguments,
19555 Bool* test_result) const {
19556 ASSERT(Thread::Current()
19557 ->isolate_group()
19558 ->subtype_test_cache_mutex()
19559 ->IsOwnedByCurrentThread());
19560 GetCurrentCheck(ix, instance_class_id_or_signature, destination_type,
19561 instance_type_arguments, instantiator_type_arguments,
19562 function_type_arguments,
19563 instance_parent_function_type_arguments,
19564 instance_delayed_type_arguments, test_result);
19565}
19566
19567void SubtypeTestCache::GetCurrentCheck(
19568 intptr_t ix,
19569 Object* instance_class_id_or_signature,
19570 AbstractType* destination_type,
19571 TypeArguments* instance_type_arguments,
19572 TypeArguments* instantiator_type_arguments,
19573 TypeArguments* function_type_arguments,
19574 TypeArguments* instance_parent_function_type_arguments,
19575 TypeArguments* instance_delayed_type_arguments,
19576 Bool* test_result) const {
19577 const Array& array = Array::Handle(ptr: cache());
19578 GetCheckFromArray(array, num_inputs: num_inputs(), ix, instance_class_id_or_signature,
19579 destination_type, instance_type_arguments,
19580 instantiator_type_arguments, function_type_arguments,
19581 instance_parent_function_type_arguments,
19582 instance_delayed_type_arguments, test_result);
19583}
19584
19585void SubtypeTestCache::GetCheckFromArray(
19586 const Array& array,
19587 intptr_t num_inputs,
19588 intptr_t ix,
19589 Object* instance_class_id_or_signature,
19590 AbstractType* destination_type,
19591 TypeArguments* instance_type_arguments,
19592 TypeArguments* instantiator_type_arguments,
19593 TypeArguments* function_type_arguments,
19594 TypeArguments* instance_parent_function_type_arguments,
19595 TypeArguments* instance_delayed_type_arguments,
19596 Bool* test_result) {
19597 ASSERT(array.ptr() != Object::empty_subtype_test_cache_array().ptr());
19598 SubtypeTestCacheTable entries(array);
19599 auto entry = entries[ix];
19600 // First get the field that determines occupancy. We have to do this with
19601 // load-acquire because some callers may not have the subtype test cache lock.
19602 *instance_class_id_or_signature =
19603 entry.Get<kInstanceCidOrSignature, std::memory_order_acquire>();
19604 // We should not be retrieving unoccupied entries.
19605 ASSERT(!instance_class_id_or_signature->IsNull());
19606 switch (num_inputs) {
19607 case 7:
19608 *destination_type = entry.Get<kDestinationType>();
19609 FALL_THROUGH;
19610 case 6:
19611 *instance_delayed_type_arguments =
19612 entry.Get<kInstanceDelayedFunctionTypeArguments>();
19613 FALL_THROUGH;
19614 case 5:
19615 *instance_parent_function_type_arguments =
19616 entry.Get<kInstanceParentFunctionTypeArguments>();
19617 FALL_THROUGH;
19618 case 4:
19619 *function_type_arguments = entry.Get<kFunctionTypeArguments>();
19620 FALL_THROUGH;
19621 case 3:
19622 *instantiator_type_arguments = entry.Get<kInstantiatorTypeArguments>();
19623 FALL_THROUGH;
19624 case 2:
19625 *instance_type_arguments = entry.Get<kInstanceTypeArguments>();
19626 FALL_THROUGH;
19627 case 1:
19628 break;
19629 default:
19630 UNREACHABLE();
19631 }
19632 *test_result = entry.Get<kTestResult>();
19633}
19634
19635bool SubtypeTestCache::GetNextCheck(
19636 intptr_t* ix,
19637 Object* instance_class_id_or_signature,
19638 AbstractType* destination_type,
19639 TypeArguments* instance_type_arguments,
19640 TypeArguments* instantiator_type_arguments,
19641 TypeArguments* function_type_arguments,
19642 TypeArguments* instance_parent_function_type_arguments,
19643 TypeArguments* instance_delayed_type_arguments,
19644 Bool* test_result) const {
19645 ASSERT(ix != nullptr);
19646 for (intptr_t i = *ix; i < NumEntries(); i++) {
19647 ASSERT(Thread::Current()
19648 ->isolate_group()
19649 ->subtype_test_cache_mutex()
19650 ->IsOwnedByCurrentThread());
19651 if (IsOccupied(index: i)) {
19652 GetCurrentCheck(ix: i, instance_class_id_or_signature, destination_type,
19653 instance_type_arguments, instantiator_type_arguments,
19654 function_type_arguments,
19655 instance_parent_function_type_arguments,
19656 instance_delayed_type_arguments, test_result);
19657 *ix = i + 1;
19658 return true;
19659 }
19660 }
19661 return false;
19662}
19663
19664bool SubtypeTestCache::HasCheck(
19665 const Object& instance_class_id_or_signature,
19666 const AbstractType& destination_type,
19667 const TypeArguments& instance_type_arguments,
19668 const TypeArguments& instantiator_type_arguments,
19669 const TypeArguments& function_type_arguments,
19670 const TypeArguments& instance_parent_function_type_arguments,
19671 const TypeArguments& instance_delayed_type_arguments,
19672 intptr_t* index,
19673 Bool* result) const {
19674 const auto& data = Array::Handle(ptr: cache());
19675 auto loc = FindKeyOrUnused(
19676 array: data, num_inputs: num_inputs(), instance_class_id_or_signature, destination_type,
19677 instance_type_arguments, instantiator_type_arguments,
19678 function_type_arguments, instance_parent_function_type_arguments,
19679 instance_delayed_type_arguments);
19680 if (loc.present) {
19681 if (index != nullptr) {
19682 *index = loc.entry;
19683 }
19684 if (result != nullptr) {
19685 SubtypeTestCacheTable entries(data);
19686 const auto& entry = entries[loc.entry];
19687 // A positive result from FindKeyOrUnused means that load-acquire is not
19688 // needed, as an occupied entry never changes for a given backing array.
19689 *result = entry.Get<kTestResult>();
19690 ASSERT(!result->IsNull());
19691 }
19692 }
19693 return loc.present;
19694}
19695
19696void SubtypeTestCache::WriteEntryToBuffer(Zone* zone,
19697 BaseTextBuffer* buffer,
19698 intptr_t index,
19699 const char* line_prefix) const {
19700 ASSERT(Thread::Current()
19701 ->isolate_group()
19702 ->subtype_test_cache_mutex()
19703 ->IsOwnedByCurrentThread());
19704 WriteCurrentEntryToBuffer(zone, buffer, index, line_prefix);
19705}
19706
19707void SubtypeTestCache::WriteToBuffer(Zone* zone,
19708 BaseTextBuffer* buffer,
19709 const char* line_prefix) const {
19710 ASSERT(Thread::Current()
19711 ->isolate_group()
19712 ->subtype_test_cache_mutex()
19713 ->IsOwnedByCurrentThread());
19714 WriteToBufferUnlocked(zone, buffer, line_prefix);
19715}
19716
19717void SubtypeTestCache::WriteCurrentEntryToBuffer(
19718 Zone* zone,
19719 BaseTextBuffer* buffer,
19720 intptr_t index,
19721 const char* line_prefix) const {
19722 const char* separator =
19723 line_prefix == nullptr ? ", " : OS::SCreate(zone, format: "\n%s", line_prefix);
19724 auto& instance_class_id_or_signature = Object::Handle(zone);
19725 auto& destination_type = AbstractType::Handle(zone);
19726 auto& instance_type_arguments = TypeArguments::Handle(zone);
19727 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19728 auto& function_type_arguments = TypeArguments::Handle(zone);
19729 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19730 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19731 auto& result = Bool::Handle(zone);
19732 GetCurrentCheck(ix: index, instance_class_id_or_signature: &instance_class_id_or_signature, destination_type: &destination_type,
19733 instance_type_arguments: &instance_type_arguments, instantiator_type_arguments: &instantiator_type_arguments,
19734 function_type_arguments: &function_type_arguments,
19735 instance_parent_function_type_arguments: &instance_parent_function_type_arguments,
19736 instance_delayed_type_arguments: &instance_delayed_type_arguments, test_result: &result);
19737 buffer->Printf(
19738 format: "%" Pd ": [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
19739 ", %#" Px ", %#" Px " ]",
19740 index, static_cast<uword>(instance_class_id_or_signature.ptr()),
19741 static_cast<uword>(instance_type_arguments.ptr()),
19742 static_cast<uword>(instantiator_type_arguments.ptr()),
19743 static_cast<uword>(function_type_arguments.ptr()),
19744 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
19745 static_cast<uword>(instance_delayed_type_arguments.ptr()),
19746 static_cast<uword>(destination_type.ptr()),
19747 static_cast<uword>(result.ptr()));
19748 if (instance_class_id_or_signature.IsSmi()) {
19749 buffer->Printf(format: "%sclass id: %" Pd "", separator,
19750 Smi::Cast(obj: instance_class_id_or_signature).Value());
19751 } else {
19752 buffer->Printf(
19753 format: "%ssignature: %s", separator,
19754 FunctionType::Cast(obj: instance_class_id_or_signature).ToCString());
19755 }
19756 if (!instance_type_arguments.IsNull()) {
19757 if (instance_class_id_or_signature.IsSmi()) {
19758 buffer->Printf(format: "%sinstance type arguments: %s", separator,
19759 instance_type_arguments.ToCString());
19760 } else {
19761 ASSERT(instance_class_id_or_signature.IsFunctionType());
19762 buffer->Printf(format: "%sclosure instantiator function type arguments: %s",
19763 separator, instance_type_arguments.ToCString());
19764 }
19765 }
19766 if (!instantiator_type_arguments.IsNull()) {
19767 buffer->Printf(format: "%sinstantiator type arguments: %s", separator,
19768 instantiator_type_arguments.ToCString());
19769 }
19770 if (!function_type_arguments.IsNull()) {
19771 buffer->Printf(format: "%sfunction type arguments: %s", separator,
19772 function_type_arguments.ToCString());
19773 }
19774 if (!instance_parent_function_type_arguments.IsNull()) {
19775 buffer->Printf(format: "%sclosure parent function type arguments: %s", separator,
19776 instance_parent_function_type_arguments.ToCString());
19777 }
19778 if (!instance_delayed_type_arguments.IsNull()) {
19779 buffer->Printf(format: "%sclosure delayed function type arguments: %s", separator,
19780 instance_delayed_type_arguments.ToCString());
19781 }
19782 if (!destination_type.IsNull()) {
19783 buffer->Printf(format: "%sdestination type: %s", separator,
19784 destination_type.ToCString());
19785 if (!destination_type.IsInstantiated()) {
19786 AbstractType& test_type = AbstractType::Handle(
19787 zone, ptr: destination_type.InstantiateFrom(instantiator_type_arguments,
19788 function_type_arguments,
19789 num_free_fun_type_params: kAllFree, space: Heap::kNew));
19790 const auto type_class_id = test_type.type_class_id();
19791 buffer->Printf(format: "%sinstantiated type: %s", separator,
19792 test_type.ToCString());
19793 buffer->Printf(format: "%sinstantiated type class id: %d", separator,
19794 type_class_id);
19795 }
19796 }
19797 buffer->Printf(format: "%sresult: %s", separator, result.ToCString());
19798}
19799
19800void SubtypeTestCache::WriteToBufferUnlocked(Zone* zone,
19801 BaseTextBuffer* buffer,
19802 const char* line_prefix) const {
19803 const char* separator =
19804 line_prefix == nullptr ? " " : OS::SCreate(zone, format: "\n%s", line_prefix);
19805 const char* internal_line_prefix =
19806 line_prefix == nullptr
19807 ? nullptr
19808 : OS::SCreate(zone, format: "%s%s", line_prefix, line_prefix);
19809 const intptr_t num_entries = NumEntries();
19810 buffer->Printf(format: "SubtypeTestCache(%" Pd ", %" Pd "", num_inputs(),
19811 num_occupied());
19812 for (intptr_t i = 0; i < num_entries; i++) {
19813 if (!IsOccupied(index: i)) continue;
19814 buffer->Printf(format: ",%s{", separator);
19815 WriteCurrentEntryToBuffer(zone, buffer, index: i, line_prefix: internal_line_prefix);
19816 buffer->Printf(format: line_prefix != nullptr ? "}" : " }");
19817 }
19818 buffer->AddString(s: line_prefix != nullptr && num_entries != 0 ? "\n)" : ")");
19819}
19820
19821void SubtypeTestCache::Reset() const {
19822 set_num_occupied(0);
19823 set_cache(Object::empty_subtype_test_cache_array());
19824}
19825
19826bool SubtypeTestCache::Equals(const SubtypeTestCache& other) const {
19827 ASSERT(Thread::Current()
19828 ->isolate_group()
19829 ->subtype_test_cache_mutex()
19830 ->IsOwnedByCurrentThread());
19831 if (ptr() == other.ptr()) {
19832 return true;
19833 }
19834 if (num_inputs() != other.num_inputs()) return false;
19835 if (num_occupied() != other.num_occupied()) return false;
19836 return Array::Handle(ptr: cache()).Equals(other: Array::Handle(ptr: other.cache()));
19837}
19838
19839SubtypeTestCachePtr SubtypeTestCache::Copy(Thread* thread) const {
19840 ASSERT(thread->isolate_group()
19841 ->subtype_test_cache_mutex()
19842 ->IsOwnedByCurrentThread());
19843 if (IsNull()) {
19844 return SubtypeTestCache::null();
19845 }
19846 Zone* const zone = thread->zone();
19847 // STC caches are only copied on write if there are not enough unoccupied
19848 // entries to store a new one, so we need to copy the array.
19849 const auto& result =
19850 SubtypeTestCache::Handle(zone, ptr: SubtypeTestCache::New(num_inputs: num_inputs()));
19851 auto& entry_cache = Array::Handle(zone, ptr: cache());
19852 entry_cache = entry_cache.Copy();
19853 result.set_cache(entry_cache);
19854 result.set_num_occupied(num_occupied());
19855 return result.ptr();
19856}
19857
19858bool SubtypeTestCache::IsOccupied(intptr_t index) const {
19859 ASSERT(!IsNull());
19860 ASSERT(index < NumEntries());
19861 const intptr_t cache_index =
19862 index * kTestEntryLength + kInstanceCidOrSignature;
19863 NoSafepointScope no_safepoint;
19864 return cache()->untag()->element<std::memory_order_acquire>(index: cache_index) !=
19865 Object::null();
19866}
19867
19868intptr_t SubtypeTestCache::UsedInputsForType(const AbstractType& type) {
19869 if (type.IsType()) {
19870 if (type.IsInstantiated()) return 2;
19871 if (type.IsInstantiated(genericity: kFunctions)) return 3;
19872 return 4;
19873 }
19874 // Default to all inputs except for the destination type, which must be
19875 // statically known, otherwise this method wouldn't be called.
19876 static_assert(kDestinationType == kMaxInputs - 1,
19877 "destination type is not last input");
19878 return kMaxInputs - 1;
19879}
19880
19881const char* SubtypeTestCache::ToCString() const {
19882 auto const zone = Thread::Current()->zone();
19883 ZoneTextBuffer buffer(zone);
19884 WriteToBufferUnlocked(zone, buffer: &buffer);
19885 return buffer.buffer();
19886}
19887
19888LoadingUnitPtr LoadingUnit::New() {
19889 ASSERT(Object::loadingunit_class() != Class::null());
19890 // LoadingUnit objects are long living objects, allocate them in the
19891 // old generation.
19892 return Object::Allocate<LoadingUnit>(space: Heap::kOld);
19893}
19894
19895LoadingUnitPtr LoadingUnit::parent() const {
19896 return untag()->parent();
19897}
19898void LoadingUnit::set_parent(const LoadingUnit& value) const {
19899 untag()->set_parent(value.ptr());
19900}
19901
19902ArrayPtr LoadingUnit::base_objects() const {
19903 return untag()->base_objects();
19904}
19905void LoadingUnit::set_base_objects(const Array& value) const {
19906 untag()->set_base_objects(value.ptr());
19907}
19908
19909const char* LoadingUnit::ToCString() const {
19910 return "LoadingUnit";
19911}
19912
19913ObjectPtr LoadingUnit::IssueLoad() const {
19914 ASSERT(!loaded());
19915 ASSERT(!load_outstanding());
19916 set_load_outstanding(true);
19917 return Isolate::Current()->CallDeferredLoadHandler(id: id());
19918}
19919
19920ObjectPtr LoadingUnit::CompleteLoad(const String& error_message,
19921 bool transient_error) const {
19922 ASSERT(!loaded());
19923 ASSERT(load_outstanding());
19924 set_loaded(error_message.IsNull());
19925 set_load_outstanding(false);
19926
19927 const Library& lib = Library::Handle(ptr: Library::CoreLibrary());
19928 const String& sel = String::Handle(ptr: String::New(cstr: "_completeLoads"));
19929 const Function& func = Function::Handle(ptr: lib.LookupFunctionAllowPrivate(name: sel));
19930 ASSERT(!func.IsNull());
19931 const Array& args = Array::Handle(ptr: Array::New(len: 3));
19932 args.SetAt(index: 0, value: Smi::Handle(ptr: Smi::New(value: id())));
19933 args.SetAt(index: 1, value: error_message);
19934 args.SetAt(index: 2, value: Bool::Get(value: transient_error));
19935 return DartEntry::InvokeFunction(function: func, arguments: args);
19936}
19937
19938// The assignment to loading units here must match that in
19939// AssignLoadingUnitsCodeVisitor, which runs after compilation is done.
19940intptr_t LoadingUnit::LoadingUnitOf(const Function& function) {
19941 Thread* thread = Thread::Current();
19942 REUSABLE_CLASS_HANDLESCOPE(thread);
19943 REUSABLE_LIBRARY_HANDLESCOPE(thread);
19944 REUSABLE_LOADING_UNIT_HANDLESCOPE(thread);
19945
19946 Class& cls = thread->ClassHandle();
19947 Library& lib = thread->LibraryHandle();
19948 LoadingUnit& unit = thread->LoadingUnitHandle();
19949
19950 cls = function.Owner();
19951 lib = cls.library();
19952 unit = lib.loading_unit();
19953 ASSERT(!unit.IsNull());
19954 return unit.id();
19955}
19956
19957intptr_t LoadingUnit::LoadingUnitOf(const Code& code) {
19958 if (code.IsStubCode() || code.IsTypeTestStubCode() ||
19959 code.IsAllocationStubCode()) {
19960 return LoadingUnit::kRootId;
19961 } else {
19962 Thread* thread = Thread::Current();
19963 REUSABLE_FUNCTION_HANDLESCOPE(thread);
19964 REUSABLE_CLASS_HANDLESCOPE(thread);
19965 REUSABLE_LIBRARY_HANDLESCOPE(thread);
19966 REUSABLE_LOADING_UNIT_HANDLESCOPE(thread);
19967
19968 Class& cls = thread->ClassHandle();
19969 Library& lib = thread->LibraryHandle();
19970 LoadingUnit& unit = thread->LoadingUnitHandle();
19971 Function& func = thread->FunctionHandle();
19972
19973 if (code.IsFunctionCode()) {
19974 func ^= code.function();
19975 cls = func.Owner();
19976 lib = cls.library();
19977 unit = lib.loading_unit();
19978 ASSERT(!unit.IsNull());
19979 return unit.id();
19980 } else {
19981 UNREACHABLE();
19982 return LoadingUnit::kIllegalId;
19983 }
19984 }
19985}
19986
19987const char* Error::ToErrorCString() const {
19988 if (IsNull()) {
19989 return "Error: null";
19990 }
19991 UNREACHABLE();
19992 return "Error";
19993}
19994
19995const char* Error::ToCString() const {
19996 if (IsNull()) {
19997 return "Error: null";
19998 }
19999 // Error is an abstract class. We should never reach here.
20000 UNREACHABLE();
20001 return "Error";
20002}
20003
20004ApiErrorPtr ApiError::New() {
20005 ASSERT(Object::api_error_class() != Class::null());
20006 return Object::Allocate<ApiError>(space: Heap::kOld);
20007}
20008
20009ApiErrorPtr ApiError::New(const String& message, Heap::Space space) {
20010#ifndef PRODUCT
20011 if (FLAG_print_stacktrace_at_api_error) {
20012 OS::PrintErr(format: "ApiError: %s\n", message.ToCString());
20013 Profiler::DumpStackTrace(for_crash: false /* for_crash */);
20014 }
20015#endif // !PRODUCT
20016
20017 ASSERT(Object::api_error_class() != Class::null());
20018 const auto& result = ApiError::Handle(ptr: Object::Allocate<ApiError>(space));
20019 result.set_message(message);
20020 return result.ptr();
20021}
20022
20023void ApiError::set_message(const String& message) const {
20024 untag()->set_message(message.ptr());
20025}
20026
20027const char* ApiError::ToErrorCString() const {
20028 const String& msg_str = String::Handle(ptr: message());
20029 return msg_str.ToCString();
20030}
20031
20032const char* ApiError::ToCString() const {
20033 return "ApiError";
20034}
20035
20036LanguageErrorPtr LanguageError::New() {
20037 ASSERT(Object::language_error_class() != Class::null());
20038 return Object::Allocate<LanguageError>(space: Heap::kOld);
20039}
20040
20041LanguageErrorPtr LanguageError::NewFormattedV(const Error& prev_error,
20042 const Script& script,
20043 TokenPosition token_pos,
20044 bool report_after_token,
20045 Report::Kind kind,
20046 Heap::Space space,
20047 const char* format,
20048 va_list args) {
20049 ASSERT(Object::language_error_class() != Class::null());
20050 const auto& result =
20051 LanguageError::Handle(ptr: Object::Allocate<LanguageError>(space));
20052 result.set_previous_error(prev_error);
20053 result.set_script(script);
20054 result.set_token_pos(token_pos);
20055 result.set_report_after_token(report_after_token);
20056 result.set_kind(kind);
20057 result.set_message(
20058 String::Handle(ptr: String::NewFormattedV(format, args, space)));
20059 return result.ptr();
20060}
20061
20062LanguageErrorPtr LanguageError::NewFormatted(const Error& prev_error,
20063 const Script& script,
20064 TokenPosition token_pos,
20065 bool report_after_token,
20066 Report::Kind kind,
20067 Heap::Space space,
20068 const char* format,
20069 ...) {
20070 va_list args;
20071 va_start(args, format);
20072 LanguageErrorPtr result = LanguageError::NewFormattedV(
20073 prev_error, script, token_pos, report_after_token, kind, space, format,
20074 args);
20075 NoSafepointScope no_safepoint;
20076 va_end(args);
20077 return result;
20078}
20079
20080LanguageErrorPtr LanguageError::New(const String& formatted_message,
20081 Report::Kind kind,
20082 Heap::Space space) {
20083 ASSERT(Object::language_error_class() != Class::null());
20084 const auto& result =
20085 LanguageError::Handle(ptr: Object::Allocate<LanguageError>(space));
20086 result.set_formatted_message(formatted_message);
20087 result.set_kind(kind);
20088 return result.ptr();
20089}
20090
20091void LanguageError::set_previous_error(const Error& value) const {
20092 untag()->set_previous_error(value.ptr());
20093}
20094
20095void LanguageError::set_script(const Script& value) const {
20096 untag()->set_script(value.ptr());
20097}
20098
20099void LanguageError::set_token_pos(TokenPosition token_pos) const {
20100 ASSERT(!token_pos.IsClassifying());
20101 StoreNonPointer(addr: &untag()->token_pos_, value: token_pos);
20102}
20103
20104void LanguageError::set_report_after_token(bool value) const {
20105 StoreNonPointer(addr: &untag()->report_after_token_, value);
20106}
20107
20108void LanguageError::set_kind(uint8_t value) const {
20109 StoreNonPointer(addr: &untag()->kind_, value);
20110}
20111
20112void LanguageError::set_message(const String& value) const {
20113 untag()->set_message(value.ptr());
20114}
20115
20116void LanguageError::set_formatted_message(const String& value) const {
20117 untag()->set_formatted_message(value.ptr());
20118}
20119
20120StringPtr LanguageError::FormatMessage() const {
20121 if (formatted_message() != String::null()) {
20122 return formatted_message();
20123 }
20124 String& result = String::Handle(
20125 ptr: Report::PrependSnippet(kind: kind(), script: Script::Handle(ptr: script()), token_pos: token_pos(),
20126 report_after_token: report_after_token(), message: String::Handle(ptr: message())));
20127 // Prepend previous error message.
20128 const Error& prev_error = Error::Handle(ptr: previous_error());
20129 if (!prev_error.IsNull()) {
20130 result = String::Concat(
20131 str1: String::Handle(ptr: String::New(cstr: prev_error.ToErrorCString())), str2: result);
20132 }
20133 set_formatted_message(result);
20134 return result.ptr();
20135}
20136
20137const char* LanguageError::ToErrorCString() const {
20138 const String& msg_str = String::Handle(ptr: FormatMessage());
20139 return msg_str.ToCString();
20140}
20141
20142const char* LanguageError::ToCString() const {
20143 return "LanguageError";
20144}
20145
20146UnhandledExceptionPtr UnhandledException::New(const Instance& exception,
20147 const Instance& stacktrace,
20148 Heap::Space space) {
20149 ASSERT(Object::unhandled_exception_class() != Class::null());
20150 const auto& result =
20151 UnhandledException::Handle(ptr: Object::Allocate<UnhandledException>(space));
20152 result.set_exception(exception);
20153 result.set_stacktrace(stacktrace);
20154 return result.ptr();
20155}
20156
20157UnhandledExceptionPtr UnhandledException::New(Heap::Space space) {
20158 ASSERT(Object::unhandled_exception_class() != Class::null());
20159 return Object::Allocate<UnhandledException>(space);
20160}
20161
20162void UnhandledException::set_exception(const Instance& exception) const {
20163 untag()->set_exception(exception.ptr());
20164}
20165
20166void UnhandledException::set_stacktrace(const Instance& stacktrace) const {
20167 untag()->set_stacktrace(stacktrace.ptr());
20168}
20169
20170const char* UnhandledException::ToErrorCString() const {
20171 Thread* thread = Thread::Current();
20172 auto isolate_group = thread->isolate_group();
20173 NoReloadScope no_reload_scope(thread);
20174 HANDLESCOPE(thread);
20175 Object& strtmp = Object::Handle();
20176 const char* exc_str;
20177 if (exception() == isolate_group->object_store()->out_of_memory()) {
20178 exc_str = "Out of Memory";
20179 } else if (exception() == isolate_group->object_store()->stack_overflow()) {
20180 exc_str = "Stack Overflow";
20181 } else {
20182 const Instance& exc = Instance::Handle(ptr: exception());
20183 strtmp = DartLibraryCalls::ToString(receiver: exc);
20184 if (!strtmp.IsError()) {
20185 exc_str = strtmp.ToCString();
20186 } else {
20187 exc_str = "<Received error while converting exception to string>";
20188 }
20189 }
20190 const Instance& stack = Instance::Handle(ptr: stacktrace());
20191 const char* stack_str;
20192 if (stack.IsNull()) {
20193 stack_str = "null";
20194 } else if (stack.IsStackTrace()) {
20195 stack_str = StackTrace::Cast(obj: stack).ToCString();
20196 } else {
20197 strtmp = DartLibraryCalls::ToString(receiver: stack);
20198 if (!strtmp.IsError()) {
20199 stack_str = strtmp.ToCString();
20200 } else {
20201 stack_str = "<Received error while converting stack trace to string>";
20202 }
20203 }
20204 return OS::SCreate(zone: thread->zone(), format: "Unhandled exception:\n%s\n%s", exc_str,
20205 stack_str);
20206}
20207
20208const char* UnhandledException::ToCString() const {
20209 return "UnhandledException";
20210}
20211
20212UnwindErrorPtr UnwindError::New(const String& message, Heap::Space space) {
20213 ASSERT(Object::unwind_error_class() != Class::null());
20214 const auto& result =
20215 UnwindError::Handle(ptr: Object::Allocate<UnwindError>(space));
20216 result.set_message(message);
20217 ASSERT_EQUAL(result.is_user_initiated(), false);
20218 return result.ptr();
20219}
20220
20221void UnwindError::set_message(const String& message) const {
20222 untag()->set_message(message.ptr());
20223}
20224
20225void UnwindError::set_is_user_initiated(bool value) const {
20226 StoreNonPointer(addr: &untag()->is_user_initiated_, value);
20227}
20228
20229const char* UnwindError::ToErrorCString() const {
20230 const String& msg_str = String::Handle(ptr: message());
20231 return msg_str.ToCString();
20232}
20233
20234const char* UnwindError::ToCString() const {
20235 return "UnwindError";
20236}
20237
20238ObjectPtr Instance::InvokeGetter(const String& getter_name,
20239 bool respect_reflectable,
20240 bool check_is_entrypoint) const {
20241 Thread* thread = Thread::Current();
20242 Zone* zone = thread->zone();
20243
20244 Class& klass = Class::Handle(zone, ptr: clazz());
20245 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20246 const auto& inst_type_args =
20247 klass.NumTypeArguments() > 0
20248 ? TypeArguments::Handle(zone, ptr: GetTypeArguments())
20249 : Object::null_type_arguments();
20250
20251 const String& internal_getter_name =
20252 String::Handle(zone, ptr: Field::GetterName(field_name: getter_name));
20253 Function& function = Function::Handle(
20254 zone, ptr: Resolver::ResolveDynamicAnyArgs(zone, receiver_class: klass, function_name: internal_getter_name));
20255
20256 if (!function.IsNull() && check_is_entrypoint) {
20257 // The getter must correspond to either an entry-point field or a getter
20258 // method explicitly marked.
20259 Field& field = Field::Handle(zone);
20260 if (function.kind() == UntaggedFunction::kImplicitGetter) {
20261 field = function.accessor_field();
20262 }
20263 if (!field.IsNull()) {
20264 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
20265 } else {
20266 CHECK_ERROR(function.VerifyCallEntryPoint());
20267 }
20268 }
20269
20270 // Check for method extraction when method extractors are not created.
20271 if (function.IsNull() && !FLAG_lazy_dispatchers) {
20272 function = Resolver::ResolveDynamicAnyArgs(zone, receiver_class: klass, function_name: getter_name);
20273
20274 if (!function.IsNull() && check_is_entrypoint) {
20275 CHECK_ERROR(function.VerifyClosurizedEntryPoint());
20276 }
20277
20278 if (!function.IsNull() && function.SafeToClosurize()) {
20279 const Function& closure_function =
20280 Function::Handle(zone, ptr: function.ImplicitClosureFunction());
20281 return closure_function.ImplicitInstanceClosure(receiver: *this);
20282 }
20283 }
20284
20285 const int kTypeArgsLen = 0;
20286 const int kNumArgs = 1;
20287 const Array& args = Array::Handle(zone, ptr: Array::New(len: kNumArgs));
20288 args.SetAt(index: 0, value: *this);
20289 const Array& args_descriptor = Array::Handle(
20290 zone,
20291 ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: args.Length(), space: Heap::kNew));
20292
20293 return InvokeInstanceFunction(thread, receiver: *this, function, target_name: internal_getter_name,
20294 args, args_descriptor_array: args_descriptor, respect_reflectable,
20295 instantiator_type_args: inst_type_args);
20296}
20297
20298ObjectPtr Instance::InvokeSetter(const String& setter_name,
20299 const Instance& value,
20300 bool respect_reflectable,
20301 bool check_is_entrypoint) const {
20302 Thread* thread = Thread::Current();
20303 Zone* zone = thread->zone();
20304
20305 const Class& klass = Class::Handle(zone, ptr: clazz());
20306 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20307 const auto& inst_type_args =
20308 klass.NumTypeArguments() > 0
20309 ? TypeArguments::Handle(zone, ptr: GetTypeArguments())
20310 : Object::null_type_arguments();
20311
20312 const String& internal_setter_name =
20313 String::Handle(zone, ptr: Field::SetterName(setter_name));
20314 const Function& setter = Function::Handle(
20315 zone, ptr: Resolver::ResolveDynamicAnyArgs(zone, receiver_class: klass, function_name: internal_setter_name));
20316
20317 if (check_is_entrypoint) {
20318 // The setter must correspond to either an entry-point field or a setter
20319 // method explicitly marked.
20320 Field& field = Field::Handle(zone);
20321 if (setter.kind() == UntaggedFunction::kImplicitSetter) {
20322 field = setter.accessor_field();
20323 }
20324 if (!field.IsNull()) {
20325 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
20326 } else if (!setter.IsNull()) {
20327 CHECK_ERROR(setter.VerifyCallEntryPoint());
20328 }
20329 }
20330
20331 const int kTypeArgsLen = 0;
20332 const int kNumArgs = 2;
20333 const Array& args = Array::Handle(zone, ptr: Array::New(len: kNumArgs));
20334 args.SetAt(index: 0, value: *this);
20335 args.SetAt(index: 1, value);
20336 const Array& args_descriptor = Array::Handle(
20337 zone,
20338 ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: args.Length(), space: Heap::kNew));
20339
20340 return InvokeInstanceFunction(thread, receiver: *this, function: setter, target_name: internal_setter_name,
20341 args, args_descriptor_array: args_descriptor, respect_reflectable,
20342 instantiator_type_args: inst_type_args);
20343}
20344
20345ObjectPtr Instance::Invoke(const String& function_name,
20346 const Array& args,
20347 const Array& arg_names,
20348 bool respect_reflectable,
20349 bool check_is_entrypoint) const {
20350 Thread* thread = Thread::Current();
20351 Zone* zone = thread->zone();
20352 Class& klass = Class::Handle(zone, ptr: clazz());
20353 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20354
20355 Function& function = Function::Handle(
20356 zone, ptr: Resolver::ResolveDynamicAnyArgs(zone, receiver_class: klass, function_name));
20357
20358 if (!function.IsNull() && check_is_entrypoint) {
20359 CHECK_ERROR(function.VerifyCallEntryPoint());
20360 }
20361
20362 // We don't pass any explicit type arguments, which will be understood as
20363 // using dynamic for any function type arguments by lower layers.
20364 const int kTypeArgsLen = 0;
20365 const Array& args_descriptor = Array::Handle(
20366 zone, ptr: ArgumentsDescriptor::NewBoxed(type_args_len: kTypeArgsLen, num_arguments: args.Length(),
20367 optional_arguments_names: arg_names, space: Heap::kNew));
20368
20369 const auto& inst_type_args =
20370 klass.NumTypeArguments() > 0
20371 ? TypeArguments::Handle(zone, ptr: GetTypeArguments())
20372 : Object::null_type_arguments();
20373
20374 if (function.IsNull()) {
20375 // Didn't find a method: try to find a getter and invoke call on its result.
20376 const String& getter_name =
20377 String::Handle(zone, ptr: Field::GetterName(field_name: function_name));
20378 function = Resolver::ResolveDynamicAnyArgs(zone, receiver_class: klass, function_name: getter_name);
20379 if (!function.IsNull()) {
20380 if (check_is_entrypoint) {
20381 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
20382 }
20383 ASSERT(function.kind() != UntaggedFunction::kMethodExtractor);
20384 // Invoke the getter.
20385 const int kNumArgs = 1;
20386 const Array& getter_args = Array::Handle(zone, ptr: Array::New(len: kNumArgs));
20387 getter_args.SetAt(index: 0, value: *this);
20388 const Array& getter_args_descriptor = Array::Handle(
20389 zone, ptr: ArgumentsDescriptor::NewBoxed(
20390 type_args_len: kTypeArgsLen, num_arguments: getter_args.Length(), space: Heap::kNew));
20391 const Object& getter_result = Object::Handle(
20392 zone, ptr: InvokeInstanceFunction(thread, receiver: *this, function, target_name: getter_name,
20393 args: getter_args, args_descriptor_array: getter_args_descriptor,
20394 respect_reflectable, instantiator_type_args: inst_type_args));
20395 if (getter_result.IsError()) {
20396 return getter_result.ptr();
20397 }
20398 // Replace the closure as the receiver in the arguments list.
20399 args.SetAt(index: 0, value: getter_result);
20400 return DartEntry::InvokeClosure(thread, arguments: args, arguments_descriptor: args_descriptor);
20401 }
20402 }
20403
20404 // Found an ordinary method.
20405 return InvokeInstanceFunction(thread, receiver: *this, function, target_name: function_name, args,
20406 args_descriptor_array: args_descriptor, respect_reflectable,
20407 instantiator_type_args: inst_type_args);
20408}
20409
20410ObjectPtr Instance::HashCode() const {
20411 // TODO(koda): Optimize for all builtin classes and all classes
20412 // that do not override hashCode.
20413 return DartLibraryCalls::HashCode(receiver: *this);
20414}
20415
20416// Keep in sync with AsmIntrinsifier::Object_getHash.
20417IntegerPtr Instance::IdentityHashCode(Thread* thread) const {
20418 if (IsInteger()) return Integer::Cast(obj: *this).ptr();
20419
20420#if defined(HASH_IN_OBJECT_HEADER)
20421 intptr_t hash = Object::GetCachedHash(obj: ptr());
20422#else
20423 intptr_t hash = thread->heap()->GetHash(ptr());
20424#endif
20425 if (hash == 0) {
20426 if (IsNull()) {
20427 hash = kNullIdentityHash;
20428 } else if (IsBool()) {
20429 hash = Bool::Cast(obj: *this).value() ? kTrueIdentityHash : kFalseIdentityHash;
20430 } else if (IsDouble()) {
20431 double val = Double::Cast(obj: *this).value();
20432 if ((val >= kMinInt64RepresentableAsDouble) &&
20433 (val <= kMaxInt64RepresentableAsDouble)) {
20434 int64_t ival = static_cast<int64_t>(val);
20435 if (static_cast<double>(ival) == val) {
20436 return Integer::New(value: ival);
20437 }
20438 }
20439
20440 uint64_t uval = bit_cast<uint64_t>(source: val);
20441 hash = ((uval >> 32) ^ (uval)) & kSmiMax;
20442 } else {
20443 do {
20444 hash = thread->random()->NextUInt32() & 0x3FFFFFFF;
20445 } while (hash == 0);
20446 }
20447
20448#if defined(HASH_IN_OBJECT_HEADER)
20449 hash = Object::SetCachedHashIfNotSet(obj: ptr(), hash);
20450#else
20451 hash = thread->heap()->SetHashIfNotSet(ptr(), hash);
20452#endif
20453 }
20454 return Smi::New(value: hash);
20455}
20456
20457bool Instance::CanonicalizeEquals(const Instance& other) const {
20458 if (this->ptr() == other.ptr()) {
20459 return true; // "===".
20460 }
20461
20462 if (other.IsNull() || (this->clazz() != other.clazz())) {
20463 return false;
20464 }
20465
20466 {
20467 NoSafepointScope no_safepoint;
20468 // Raw bits compare.
20469 const intptr_t instance_size = SizeFromClass();
20470 ASSERT(instance_size != 0);
20471 const intptr_t other_instance_size = other.SizeFromClass();
20472 ASSERT(other_instance_size != 0);
20473 if (instance_size != other_instance_size) {
20474 return false;
20475 }
20476 uword this_addr = reinterpret_cast<uword>(this->untag());
20477 uword other_addr = reinterpret_cast<uword>(other.untag());
20478 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20479 offset += kCompressedWordSize) {
20480 if ((reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20481 ->Decompress(heap_base: untag()->heap_base())) !=
20482 (reinterpret_cast<CompressedObjectPtr*>(other_addr + offset)
20483 ->Decompress(heap_base: untag()->heap_base()))) {
20484 return false;
20485 }
20486 }
20487 }
20488 return true;
20489}
20490
20491bool Symbol::IsSymbolCid(Thread* thread, classid_t class_id) {
20492 auto object_store = thread->isolate_group()->object_store();
20493 return Class::GetClassId(cls: object_store->symbol_class()) == class_id;
20494}
20495
20496// Must be kept in sync with Symbol.hashCode in symbol_patch.dart
20497uint32_t Symbol::CanonicalizeHash(Thread* thread, const Instance& instance) {
20498 ASSERT(IsSymbolCid(thread, instance.GetClassId()));
20499
20500 auto zone = thread->zone();
20501 auto object_store = thread->isolate_group()->object_store();
20502
20503 const auto& symbol_name_field =
20504 Field::Handle(zone, ptr: object_store->symbol_name_field());
20505 ASSERT(!symbol_name_field.IsNull());
20506
20507 // Keep in sync with sdk/lib/_internal/vm/lib/symbol_patch.dart.
20508 const auto& name =
20509 String::Cast(obj: Object::Handle(zone, ptr: instance.GetField(field: symbol_name_field)));
20510 const uint32_t arbitrary_prime = 664597;
20511 return 0x1fffffff & (arbitrary_prime * name.CanonicalizeHash());
20512}
20513
20514uint32_t Instance::CanonicalizeHash() const {
20515 if (GetClassId() == kNullCid) {
20516 return kNullIdentityHash;
20517 }
20518 Thread* thread = Thread::Current();
20519 uint32_t hash = thread->heap()->GetCanonicalHash(raw_obj: ptr());
20520 if (hash != 0) {
20521 return hash;
20522 }
20523 Zone* zone = thread->zone();
20524 const Class& cls = Class::Handle(zone, ptr: clazz());
20525 const bool is_symbol = Symbol::IsSymbolCid(thread, class_id: cls.id());
20526
20527 NoSafepointScope no_safepoint(thread);
20528
20529 if (is_symbol) {
20530 hash = Symbol::CanonicalizeHash(thread, instance: *this);
20531 } else {
20532 const intptr_t class_id = cls.id();
20533 ASSERT(class_id != 0);
20534 hash = class_id;
20535 uword this_addr = reinterpret_cast<uword>(this->untag());
20536 Object& obj = Object::Handle(zone);
20537 Instance& instance = Instance::Handle(zone);
20538
20539 const auto unboxed_fields_bitmap =
20540 thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
20541 cid: GetClassId());
20542
20543 for (intptr_t offset = Instance::NextFieldOffset();
20544 offset < cls.host_next_field_offset(); offset += kCompressedWordSize) {
20545 if (unboxed_fields_bitmap.Get(position: offset / kCompressedWordSize)) {
20546 if (kCompressedWordSize == 8) {
20547 hash = CombineHashes(
20548 hash, other_hash: *reinterpret_cast<uint32_t*>(this_addr + offset));
20549 hash = CombineHashes(
20550 hash, other_hash: *reinterpret_cast<uint32_t*>(this_addr + offset + 4));
20551 } else {
20552 hash = CombineHashes(
20553 hash, other_hash: *reinterpret_cast<uint32_t*>(this_addr + offset));
20554 }
20555 } else {
20556 obj = reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20557 ->Decompress(heap_base: untag()->heap_base());
20558 if (obj.IsSentinel()) {
20559 hash = CombineHashes(hash, other_hash: 11);
20560 } else {
20561 instance ^= obj.ptr();
20562 hash = CombineHashes(hash, other_hash: instance.CanonicalizeHash());
20563 }
20564 }
20565 }
20566 hash = FinalizeHash(hash, hashbits: String::kHashBits);
20567 }
20568 thread->heap()->SetCanonicalHash(raw_obj: ptr(), hash);
20569 return hash;
20570}
20571
20572#if defined(DEBUG)
20573class CheckForPointers : public ObjectPointerVisitor {
20574 public:
20575 explicit CheckForPointers(IsolateGroup* isolate_group)
20576 : ObjectPointerVisitor(isolate_group), has_pointers_(false) {}
20577
20578 bool has_pointers() const { return has_pointers_; }
20579
20580 void VisitPointers(ObjectPtr* first, ObjectPtr* last) override {
20581 if (last >= first) {
20582 has_pointers_ = true;
20583 }
20584 }
20585
20586#if defined(DART_COMPRESSED_POINTERS)
20587 void VisitCompressedPointers(uword heap_base,
20588 CompressedObjectPtr* first,
20589 CompressedObjectPtr* last) override {
20590 if (last >= first) {
20591 has_pointers_ = true;
20592 }
20593 }
20594#endif
20595
20596 private:
20597 bool has_pointers_;
20598
20599 DISALLOW_COPY_AND_ASSIGN(CheckForPointers);
20600};
20601#endif // DEBUG
20602
20603void Instance::CanonicalizeFieldsLocked(Thread* thread) const {
20604 const intptr_t class_id = GetClassId();
20605 if (class_id >= kNumPredefinedCids) {
20606 // Iterate over all fields, canonicalize numbers and strings, expect all
20607 // other instances to be canonical otherwise report error (return false).
20608 Zone* zone = thread->zone();
20609 Instance& obj = Instance::Handle(zone);
20610 const intptr_t instance_size = SizeFromClass();
20611 ASSERT(instance_size != 0);
20612 const auto unboxed_fields_bitmap =
20613 thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid: class_id);
20614 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20615 offset += kCompressedWordSize) {
20616 if (unboxed_fields_bitmap.Get(position: offset / kCompressedWordSize)) {
20617 continue;
20618 }
20619 obj ^= this->FieldAddrAtOffset(offset)->Decompress(heap_base: untag()->heap_base());
20620 obj = obj.CanonicalizeLocked(thread);
20621 this->SetFieldAtOffset(offset, value: obj);
20622 }
20623 } else {
20624#if defined(DEBUG) && !defined(DART_COMPRESSED_POINTERS)
20625 // Make sure that we are not missing any fields.
20626 IsolateGroup* group = IsolateGroup::Current();
20627 CheckForPointers has_pointers(group);
20628 this->ptr()->untag()->VisitPointersPrecise(&has_pointers);
20629 ASSERT(!has_pointers.has_pointers());
20630#endif // DEBUG
20631 }
20632}
20633
20634InstancePtr Instance::CopyShallowToOldSpace(Thread* thread) const {
20635 return Instance::RawCast(raw: Object::Clone(orig: *this, space: Heap::kOld));
20636}
20637
20638InstancePtr Instance::Canonicalize(Thread* thread) const {
20639 SafepointMutexLocker ml(
20640 thread->isolate_group()->constant_canonicalization_mutex());
20641 return CanonicalizeLocked(thread);
20642}
20643
20644InstancePtr Instance::CanonicalizeLocked(Thread* thread) const {
20645 if (!this->ptr()->IsHeapObject() || this->IsCanonical()) {
20646 return this->ptr();
20647 }
20648 ASSERT(!IsNull());
20649 CanonicalizeFieldsLocked(thread);
20650 Zone* zone = thread->zone();
20651 const Class& cls = Class::Handle(zone, ptr: this->clazz());
20652 Instance& result =
20653 Instance::Handle(zone, ptr: cls.LookupCanonicalInstance(zone, value: *this));
20654 if (!result.IsNull()) {
20655 return result.ptr();
20656 }
20657 if (IsNew()) {
20658 ASSERT((thread->isolate() == Dart::vm_isolate()) || !InVMIsolateHeap());
20659 // Create a canonical object in old space.
20660 result ^= Object::Clone(orig: *this, space: Heap::kOld);
20661 } else {
20662 result = this->ptr();
20663 }
20664 ASSERT(result.IsOld());
20665 result.SetCanonical();
20666 return cls.InsertCanonicalConstant(zone, constant: result);
20667}
20668
20669ObjectPtr Instance::GetField(const Field& field) const {
20670 if (field.is_unboxed()) {
20671 switch (field.guarded_cid()) {
20672 case kDoubleCid:
20673 return Double::New(d: *reinterpret_cast<double_t*>(FieldAddr(field)));
20674 case kFloat32x4Cid:
20675 return Float32x4::New(
20676 value: *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20677 case kFloat64x2Cid:
20678 return Float64x2::New(
20679 value: *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20680 default:
20681 return Integer::New(value: *reinterpret_cast<int64_t*>(FieldAddr(field)));
20682 }
20683 } else {
20684 return FieldAddr(field)->Decompress(heap_base: untag()->heap_base());
20685 }
20686}
20687
20688void Instance::SetField(const Field& field, const Object& value) const {
20689 if (field.is_unboxed()) {
20690 switch (field.guarded_cid()) {
20691 case kDoubleCid:
20692 StoreNonPointer(addr: reinterpret_cast<double_t*>(FieldAddr(field)),
20693 value: Double::Cast(obj: value).value());
20694 break;
20695 case kFloat32x4Cid:
20696 StoreNonPointer(addr: reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20697 value: Float32x4::Cast(obj: value).value());
20698 break;
20699 case kFloat64x2Cid:
20700 StoreNonPointer(addr: reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20701 value: Float64x2::Cast(obj: value).value());
20702 break;
20703 default:
20704 StoreNonPointer(addr: reinterpret_cast<int64_t*>(FieldAddr(field)),
20705 value: Integer::Cast(obj: value).AsInt64Value());
20706 break;
20707 }
20708 } else {
20709 field.RecordStore(value);
20710 StoreCompressedPointer(addr: FieldAddr(field), value: value.ptr());
20711 }
20712}
20713
20714AbstractTypePtr Instance::GetType(Heap::Space space) const {
20715 if (IsNull()) {
20716 return Type::NullType();
20717 }
20718 Thread* thread = Thread::Current();
20719 Zone* zone = thread->zone();
20720 const Class& cls = Class::Handle(zone, ptr: clazz());
20721 if (!cls.is_finalized()) {
20722 // Various predefined classes can be instantiated by the VM or
20723 // Dart_NewString/Integer/TypedData/... before the class is finalized.
20724 ASSERT(cls.is_prefinalized());
20725 cls.EnsureDeclarationLoaded();
20726 }
20727 if (cls.IsClosureClass()) {
20728 FunctionType& signature = FunctionType::Handle(
20729 zone, ptr: Closure::Cast(obj: *this).GetInstantiatedSignature(zone));
20730 if (!signature.IsFinalized()) {
20731 signature.SetIsFinalized();
20732 }
20733 signature ^= signature.Canonicalize(thread);
20734 return signature.ptr();
20735 }
20736 if (IsRecord()) {
20737 ASSERT(cls.IsRecordClass());
20738 auto& record_type =
20739 RecordType::Handle(zone, ptr: Record::Cast(obj: *this).GetRecordType());
20740 ASSERT(record_type.IsFinalized());
20741 ASSERT(record_type.IsCanonical());
20742 return record_type.ptr();
20743 }
20744 Type& type = Type::Handle(zone);
20745 if (!cls.IsGeneric()) {
20746 type = cls.DeclarationType();
20747 }
20748 if (type.IsNull()) {
20749 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20750 const intptr_t num_type_arguments = cls.NumTypeArguments();
20751 if (num_type_arguments > 0) {
20752 type_arguments = GetTypeArguments();
20753 if (!type_arguments.IsNull()) {
20754 type_arguments = type_arguments.FromInstanceTypeArguments(thread, cls);
20755 }
20756 }
20757 type = Type::New(clazz: cls, arguments: type_arguments, nullability: Nullability::kNonNullable, space);
20758 type.SetIsFinalized();
20759 type ^= type.Canonicalize(thread);
20760 }
20761 return type.ptr();
20762}
20763
20764TypeArgumentsPtr Instance::GetTypeArguments() const {
20765 ASSERT(!IsType());
20766 const Class& cls = Class::Handle(ptr: clazz());
20767 intptr_t field_offset = cls.host_type_arguments_field_offset();
20768 ASSERT(field_offset != Class::kNoTypeArguments);
20769 TypeArguments& type_arguments = TypeArguments::Handle();
20770 type_arguments ^=
20771 FieldAddrAtOffset(offset: field_offset)->Decompress(heap_base: untag()->heap_base());
20772 return type_arguments.ptr();
20773}
20774
20775void Instance::SetTypeArguments(const TypeArguments& value) const {
20776 ASSERT(!IsType());
20777 ASSERT(value.IsNull() || value.IsCanonical());
20778 const Class& cls = Class::Handle(ptr: clazz());
20779 intptr_t field_offset = cls.host_type_arguments_field_offset();
20780 ASSERT(field_offset != Class::kNoTypeArguments);
20781 SetFieldAtOffset(offset: field_offset, value);
20782}
20783
20784/*
20785Specification of instance checks (e is T) and casts (e as T), where e evaluates
20786to a value v and v has runtime type S:
20787
20788Instance checks (e is T) in weak checking mode in a legacy or opted-in library:
20789 If v == null and T is a legacy type
20790 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20791 If v == null and T is not a legacy type, return NNBD_SUBTYPE(Null, T)
20792 Otherwise return LEGACY_SUBTYPE(S, T)
20793
20794Instance checks (e is T) in strong checking mode in a legacy or opted-in lib:
20795 If v == null and T is a legacy type
20796 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20797 Otherwise return NNBD_SUBTYPE(S, T)
20798
20799Casts (e as T) in weak checking mode in a legacy or opted-in library:
20800 If LEGACY_SUBTYPE(S, T) then e as T evaluates to v.
20801 Otherwise a TypeError is thrown.
20802
20803Casts (e as T) in strong checking mode in a legacy or opted-in library:
20804 If NNBD_SUBTYPE(S, T) then e as T evaluates to v.
20805 Otherwise a TypeError is thrown.
20806*/
20807
20808bool Instance::IsInstanceOf(
20809 const AbstractType& other,
20810 const TypeArguments& other_instantiator_type_arguments,
20811 const TypeArguments& other_function_type_arguments) const {
20812 ASSERT(!other.IsDynamicType());
20813 if (IsNull()) {
20814 return Instance::NullIsInstanceOf(other, other_instantiator_type_arguments,
20815 other_function_type_arguments);
20816 }
20817 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20818 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20819 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20820 other_function_type_arguments);
20821}
20822
20823bool Instance::IsAssignableTo(
20824 const AbstractType& other,
20825 const TypeArguments& other_instantiator_type_arguments,
20826 const TypeArguments& other_function_type_arguments) const {
20827 ASSERT(!other.IsDynamicType());
20828 // In weak mode type casts, whether in legacy or opted-in libraries, the null
20829 // instance is detected and handled in inlined code and therefore cannot be
20830 // encountered here as a Dart null receiver.
20831 ASSERT(IsolateGroup::Current()->use_strict_null_safety_checks() || !IsNull());
20832 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20833 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20834 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20835 other_function_type_arguments);
20836}
20837
20838// If 'other' type (once instantiated) is a legacy type:
20839// return LEGACY_SUBTYPE(other, Null) || LEGACY_SUBTYPE(Object, other).
20840// Otherwise return NNBD_SUBTYPE(Null, T).
20841// Ignore value of strong flag value.
20842bool Instance::NullIsInstanceOf(
20843 const AbstractType& other,
20844 const TypeArguments& other_instantiator_type_arguments,
20845 const TypeArguments& other_function_type_arguments) {
20846 ASSERT(other.IsFinalized());
20847 if (other.IsNullable()) {
20848 // This case includes top types (void, dynamic, Object?).
20849 // The uninstantiated nullable type will remain nullable after
20850 // instantiation.
20851 return true;
20852 }
20853 if (other.IsFutureOrType()) {
20854 const auto& type = AbstractType::Handle(ptr: other.UnwrapFutureOr());
20855 return NullIsInstanceOf(other: type, other_instantiator_type_arguments,
20856 other_function_type_arguments);
20857 }
20858 // No need to instantiate type, unless it is a type parameter.
20859 // Note that a typeref cannot refer to a type parameter.
20860 if (other.IsTypeParameter()) {
20861 auto& type = AbstractType::Handle(ptr: other.InstantiateFrom(
20862 instantiator_type_arguments: other_instantiator_type_arguments, function_type_arguments: other_function_type_arguments,
20863 num_free_fun_type_params: kAllFree, space: Heap::kOld));
20864 return Instance::NullIsInstanceOf(other: type, other_instantiator_type_arguments: Object::null_type_arguments(),
20865 other_function_type_arguments: Object::null_type_arguments());
20866 }
20867 return other.IsLegacy() && (other.IsObjectType() || other.IsNeverType());
20868}
20869
20870// Must be kept in sync with GenerateNullIsAssignableToType in
20871// stub_code_compiler.cc if any changes are made.
20872bool Instance::NullIsAssignableTo(const AbstractType& other) {
20873 Thread* thread = Thread::Current();
20874 auto isolate_group = thread->isolate_group();
20875
20876 // In weak mode, Null is a bottom type (according to LEGACY_SUBTYPE).
20877 if (!isolate_group->use_strict_null_safety_checks()) {
20878 return true;
20879 }
20880 // "Left Null" rule: null is assignable when destination type is either
20881 // legacy or nullable. Otherwise it is not assignable or we cannot tell
20882 // without instantiating type parameter.
20883 if (other.IsLegacy() || other.IsNullable()) {
20884 return true;
20885 }
20886 if (other.IsFutureOrType()) {
20887 return NullIsAssignableTo(
20888 other: AbstractType::Handle(zone: thread->zone(), ptr: other.UnwrapFutureOr()));
20889 }
20890 // Since the TAVs are not available, for non-nullable type parameters
20891 // this returns a conservative approximation of "not assignable" .
20892 return false;
20893}
20894
20895// Must be kept in sync with GenerateNullIsAssignableToType in
20896// stub_code_compiler.cc if any changes are made.
20897bool Instance::NullIsAssignableTo(
20898 const AbstractType& other,
20899 const TypeArguments& other_instantiator_type_arguments,
20900 const TypeArguments& other_function_type_arguments) {
20901 // Do checks that don't require instantiation first.
20902 if (NullIsAssignableTo(other)) return true;
20903 if (!other.IsTypeParameter()) return false;
20904 const auto& type = AbstractType::Handle(ptr: other.InstantiateFrom(
20905 instantiator_type_arguments: other_instantiator_type_arguments, function_type_arguments: other_function_type_arguments,
20906 num_free_fun_type_params: kAllFree, space: Heap::kNew));
20907 return NullIsAssignableTo(other: type);
20908}
20909
20910bool Instance::RuntimeTypeIsSubtypeOf(
20911 const AbstractType& other,
20912 const TypeArguments& other_instantiator_type_arguments,
20913 const TypeArguments& other_function_type_arguments) const {
20914 ASSERT(other.IsFinalized());
20915 ASSERT(ptr() != Object::sentinel().ptr());
20916 // Instance may not have runtimeType dynamic, void, or Never.
20917 if (other.IsTopTypeForSubtyping()) {
20918 return true;
20919 }
20920 Thread* thread = Thread::Current();
20921 auto isolate_group = thread->isolate_group();
20922 // In weak testing mode, Null type is a subtype of any type.
20923 if (IsNull() && !isolate_group->use_strict_null_safety_checks()) {
20924 return true;
20925 }
20926 Zone* zone = thread->zone();
20927 const Class& cls = Class::Handle(zone, ptr: clazz());
20928 if (cls.IsClosureClass()) {
20929 if (other.IsDartFunctionType() || other.IsDartClosureType() ||
20930 other.IsObjectType()) {
20931 return true;
20932 }
20933 AbstractType& instantiated_other = AbstractType::Handle(zone, ptr: other.ptr());
20934 if (!other.IsInstantiated()) {
20935 instantiated_other = other.InstantiateFrom(
20936 instantiator_type_arguments: other_instantiator_type_arguments, function_type_arguments: other_function_type_arguments,
20937 num_free_fun_type_params: kAllFree, space: Heap::kOld);
20938 if (instantiated_other.IsTopTypeForSubtyping() ||
20939 instantiated_other.IsObjectType() ||
20940 instantiated_other.IsDartFunctionType()) {
20941 return true;
20942 }
20943 }
20944 if (RuntimeTypeIsSubtypeOfFutureOr(zone, other: instantiated_other)) {
20945 return true;
20946 }
20947 if (!instantiated_other.IsFunctionType()) {
20948 return false;
20949 }
20950 const FunctionType& sig = FunctionType::Handle(
20951 ptr: Closure::Cast(obj: *this).GetInstantiatedSignature(zone));
20952 return sig.IsSubtypeOf(other: FunctionType::Cast(obj: instantiated_other), space: Heap::kOld);
20953 }
20954 if (cls.IsRecordClass()) {
20955 if (other.IsDartRecordType() || other.IsObjectType()) {
20956 return true;
20957 }
20958 AbstractType& instantiated_other = AbstractType::Handle(zone, ptr: other.ptr());
20959 if (!other.IsInstantiated()) {
20960 instantiated_other = other.InstantiateFrom(
20961 instantiator_type_arguments: other_instantiator_type_arguments, function_type_arguments: other_function_type_arguments,
20962 num_free_fun_type_params: kAllFree, space: Heap::kOld);
20963 if (instantiated_other.IsTopTypeForSubtyping() ||
20964 instantiated_other.IsObjectType() ||
20965 instantiated_other.IsDartRecordType()) {
20966 return true;
20967 }
20968 }
20969 if (RuntimeTypeIsSubtypeOfFutureOr(zone, other: instantiated_other)) {
20970 return true;
20971 }
20972 if (!instantiated_other.IsRecordType()) {
20973 return false;
20974 }
20975 const Record& record = Record::Cast(obj: *this);
20976 const RecordType& record_type = RecordType::Cast(obj: instantiated_other);
20977 if (record.shape() != record_type.shape()) {
20978 return false;
20979 }
20980 Instance& field_value = Instance::Handle(zone);
20981 AbstractType& field_type = AbstractType::Handle(zone);
20982 const intptr_t num_fields = record.num_fields();
20983 for (intptr_t i = 0; i < num_fields; ++i) {
20984 field_value ^= record.FieldAt(field_index: i);
20985 field_type = record_type.FieldTypeAt(index: i);
20986 if (!field_value.RuntimeTypeIsSubtypeOf(other: field_type,
20987 other_instantiator_type_arguments: Object::null_type_arguments(),
20988 other_function_type_arguments: Object::null_type_arguments())) {
20989 return false;
20990 }
20991 }
20992 return true;
20993 }
20994 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20995 const intptr_t num_type_arguments = cls.NumTypeArguments();
20996 if (num_type_arguments > 0) {
20997 type_arguments = GetTypeArguments();
20998 ASSERT(type_arguments.IsNull() || type_arguments.IsCanonical());
20999 // The number of type arguments in the instance must be greater or equal to
21000 // the number of type arguments expected by the instance class.
21001 // A discrepancy is allowed for closures, which borrow the type argument
21002 // vector of their instantiator, which may be of a subclass of the class
21003 // defining the closure. Truncating the vector to the correct length on
21004 // instantiation is unnecessary. The vector may therefore be longer.
21005 // Also, an optimization reuses the type argument vector of the instantiator
21006 // of generic instances when its layout is compatible.
21007 ASSERT(type_arguments.IsNull() ||
21008 (type_arguments.Length() >= num_type_arguments));
21009 }
21010 AbstractType& instantiated_other = AbstractType::Handle(zone, ptr: other.ptr());
21011 if (!other.IsInstantiated()) {
21012 instantiated_other = other.InstantiateFrom(
21013 instantiator_type_arguments: other_instantiator_type_arguments, function_type_arguments: other_function_type_arguments,
21014 num_free_fun_type_params: kAllFree, space: Heap::kOld);
21015 if (instantiated_other.IsTopTypeForSubtyping()) {
21016 return true;
21017 }
21018 }
21019 if (IsNull()) {
21020 ASSERT(isolate_group->use_strict_null_safety_checks());
21021 if (instantiated_other.IsNullType()) {
21022 return true;
21023 }
21024 if (RuntimeTypeIsSubtypeOfFutureOr(zone, other: instantiated_other)) {
21025 return true;
21026 }
21027 // At this point, instantiated_other can be a function type.
21028 return !instantiated_other.IsNonNullable();
21029 }
21030 if (!instantiated_other.IsType()) {
21031 return false;
21032 }
21033 // RuntimeType of non-null instance is non-nullable, so there is no need to
21034 // check nullability of other type.
21035 return Class::IsSubtypeOf(cls, type_arguments, nullability: Nullability::kNonNullable,
21036 other: instantiated_other, space: Heap::kOld);
21037}
21038
21039bool Instance::RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
21040 const AbstractType& other) const {
21041 if (other.IsFutureOrType()) {
21042 const TypeArguments& other_type_arguments =
21043 TypeArguments::Handle(zone, ptr: other.arguments());
21044 const AbstractType& other_type_arg =
21045 AbstractType::Handle(zone, ptr: other_type_arguments.TypeAtNullSafe(index: 0));
21046 if (other_type_arg.IsTopTypeForSubtyping()) {
21047 return true;
21048 }
21049 if (Class::Handle(zone, ptr: clazz()).IsFutureClass()) {
21050 const TypeArguments& type_arguments =
21051 TypeArguments::Handle(zone, ptr: GetTypeArguments());
21052 const AbstractType& type_arg =
21053 AbstractType::Handle(zone, ptr: type_arguments.TypeAtNullSafe(index: 0));
21054 if (type_arg.IsSubtypeOf(other: other_type_arg, space: Heap::kOld)) {
21055 return true;
21056 }
21057 }
21058 // Retry RuntimeTypeIsSubtypeOf after unwrapping type arg of FutureOr.
21059 if (RuntimeTypeIsSubtypeOf(other: other_type_arg, other_instantiator_type_arguments: Object::null_type_arguments(),
21060 other_function_type_arguments: Object::null_type_arguments())) {
21061 return true;
21062 }
21063 }
21064 return false;
21065}
21066
21067bool Instance::OperatorEquals(const Instance& other) const {
21068 // TODO(koda): Optimize for all builtin classes and all classes
21069 // that do not override operator==.
21070 return DartLibraryCalls::Equals(left: *this, right: other) == Object::bool_true().ptr();
21071}
21072
21073bool Instance::IsIdenticalTo(const Instance& other) const {
21074 if (ptr() == other.ptr()) return true;
21075 if (IsInteger() && other.IsInteger()) {
21076 return Integer::Cast(obj: *this).Equals(other);
21077 }
21078 if (IsDouble() && other.IsDouble()) {
21079 double other_value = Double::Cast(obj: other).value();
21080 return Double::Cast(obj: *this).BitwiseEqualsToDouble(value: other_value);
21081 }
21082 return false;
21083}
21084
21085intptr_t* Instance::NativeFieldsDataAddr() const {
21086 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
21087 TypedDataPtr native_fields = static_cast<TypedDataPtr>(
21088 NativeFieldsAddr()->Decompress(heap_base: untag()->heap_base()));
21089 if (native_fields == TypedData::null()) {
21090 return nullptr;
21091 }
21092 return reinterpret_cast<intptr_t*>(native_fields->untag()->data());
21093}
21094
21095void Instance::SetNativeField(int index, intptr_t value) const {
21096 ASSERT(IsValidNativeIndex(index));
21097 Object& native_fields =
21098 Object::Handle(ptr: NativeFieldsAddr()->Decompress(heap_base: untag()->heap_base()));
21099 if (native_fields.IsNull()) {
21100 // Allocate backing storage for the native fields.
21101 native_fields = TypedData::New(kIntPtrCid, len: NumNativeFields());
21102 StoreCompressedPointer(addr: NativeFieldsAddr(), value: native_fields.ptr());
21103 }
21104 intptr_t byte_offset = index * sizeof(intptr_t);
21105 TypedData::Cast(obj: native_fields).SetIntPtr(byte_offset, value);
21106}
21107
21108void Instance::SetNativeFields(uint16_t num_native_fields,
21109 const intptr_t* field_values) const {
21110 ASSERT(num_native_fields == NumNativeFields());
21111 ASSERT(field_values != nullptr);
21112 Object& native_fields =
21113 Object::Handle(ptr: NativeFieldsAddr()->Decompress(heap_base: untag()->heap_base()));
21114 if (native_fields.IsNull()) {
21115 // Allocate backing storage for the native fields.
21116 native_fields = TypedData::New(kIntPtrCid, len: NumNativeFields());
21117 StoreCompressedPointer(addr: NativeFieldsAddr(), value: native_fields.ptr());
21118 }
21119 for (uint16_t i = 0; i < num_native_fields; i++) {
21120 intptr_t byte_offset = i * sizeof(intptr_t);
21121 TypedData::Cast(obj: native_fields).SetIntPtr(byte_offset, value: field_values[i]);
21122 }
21123}
21124
21125bool Instance::IsCallable(Function* function) const {
21126 Class& cls = Class::Handle(ptr: clazz());
21127 if (cls.IsClosureClass()) {
21128 if (function != nullptr) {
21129 *function = Closure::Cast(obj: *this).function();
21130 }
21131 return true;
21132 }
21133 // Try to resolve a "call" method.
21134 Zone* zone = Thread::Current()->zone();
21135 Function& call_function = Function::Handle(
21136 zone, ptr: Resolver::ResolveDynamicAnyArgs(zone, receiver_class: cls, function_name: Symbols::DynamicCall(),
21137 /*allow_add=*/false));
21138 if (call_function.IsNull()) {
21139 return false;
21140 }
21141 if (function != nullptr) {
21142 *function = call_function.ptr();
21143 }
21144 return true;
21145}
21146
21147InstancePtr Instance::New(const Class& cls, Heap::Space space) {
21148 Thread* thread = Thread::Current();
21149 if (cls.EnsureIsAllocateFinalized(thread) != Error::null()) {
21150 return Instance::null();
21151 }
21152 return NewAlreadyFinalized(cls, space);
21153}
21154
21155InstancePtr Instance::NewAlreadyFinalized(const Class& cls, Heap::Space space) {
21156 ASSERT(cls.is_allocate_finalized());
21157 intptr_t instance_size = cls.host_instance_size();
21158 ASSERT(instance_size > 0);
21159 // Initialize everything after the object header with Object::null(), since
21160 // this isn't a predefined class.
21161 const uword ptr_field_end_offset =
21162 instance_size - (Instance::ContainsCompressedPointers()
21163 ? kCompressedWordSize
21164 : kWordSize);
21165 return static_cast<InstancePtr>(Object::Allocate(
21166 cls_id: cls.id(), size: instance_size, space, compressed: Instance::ContainsCompressedPointers(),
21167 ptr_field_start_offset: from_offset<Instance>(), ptr_field_end_offset));
21168}
21169
21170bool Instance::IsValidFieldOffset(intptr_t offset) const {
21171 Thread* thread = Thread::Current();
21172 REUSABLE_CLASS_HANDLESCOPE(thread);
21173 Class& cls = thread->ClassHandle();
21174 cls = clazz();
21175 return (offset >= 0 &&
21176 offset <= (cls.host_instance_size() - kCompressedWordSize));
21177}
21178
21179intptr_t Instance::ElementSizeFor(intptr_t cid) {
21180 if (IsExternalTypedDataClassId(index: cid) || IsTypedDataClassId(index: cid) ||
21181 IsTypedDataViewClassId(index: cid) || IsUnmodifiableTypedDataViewClassId(index: cid)) {
21182 return TypedDataBase::ElementSizeInBytes(cid);
21183 }
21184 switch (cid) {
21185 case kArrayCid:
21186 case kImmutableArrayCid:
21187 return Array::kBytesPerElement;
21188 case kTypeArgumentsCid:
21189 return TypeArguments::ArrayTraits::kElementSize;
21190 case kOneByteStringCid:
21191 return OneByteString::kBytesPerElement;
21192 case kTwoByteStringCid:
21193 return TwoByteString::kBytesPerElement;
21194 case kExternalOneByteStringCid:
21195 return ExternalOneByteString::kBytesPerElement;
21196 case kExternalTwoByteStringCid:
21197 return ExternalTwoByteString::kBytesPerElement;
21198 default:
21199 UNIMPLEMENTED();
21200 return 0;
21201 }
21202}
21203
21204intptr_t Instance::DataOffsetFor(intptr_t cid) {
21205 if (IsExternalTypedDataClassId(index: cid) || IsExternalStringClassId(index: cid)) {
21206 // Elements start at offset 0 of the external data.
21207 return 0;
21208 }
21209 if (IsTypedDataClassId(index: cid)) {
21210 return TypedData::payload_offset();
21211 }
21212 switch (cid) {
21213 case kArrayCid:
21214 case kImmutableArrayCid:
21215 return Array::data_offset();
21216 case kTypeArgumentsCid:
21217 return TypeArguments::types_offset();
21218 case kOneByteStringCid:
21219 return OneByteString::data_offset();
21220 case kTwoByteStringCid:
21221 return TwoByteString::data_offset();
21222 case kRecordCid:
21223 return Record::field_offset(index: 0);
21224 default:
21225 UNIMPLEMENTED();
21226 return Array::data_offset();
21227 }
21228}
21229
21230const char* Instance::ToCString() const {
21231 if (IsNull()) {
21232 return "null";
21233 } else if (Thread::Current()->no_safepoint_scope_depth() > 0) {
21234 // Can occur when running disassembler.
21235 return "Instance";
21236 } else {
21237 if (IsClosure()) {
21238 return Closure::Cast(obj: *this).ToCString();
21239 }
21240 // Background compiler disassembly of instructions referring to pool objects
21241 // calls this function and requires allocation of Type in old space.
21242 const AbstractType& type = AbstractType::Handle(ptr: GetType(space: Heap::kOld));
21243 const String& type_name = String::Handle(ptr: type.UserVisibleName());
21244 return OS::SCreate(zone: Thread::Current()->zone(), format: "Instance of '%s'",
21245 type_name.ToCString());
21246 }
21247}
21248
21249classid_t AbstractType::type_class_id() const {
21250 // All subclasses should implement this appropriately, so the only value that
21251 // should reach this implementation should be the null value.
21252 ASSERT(IsNull());
21253 // AbstractType is an abstract class.
21254 UNREACHABLE();
21255 return kIllegalCid;
21256}
21257
21258ClassPtr AbstractType::type_class() const {
21259 // All subclasses should implement this appropriately, so the only value that
21260 // should reach this implementation should be the null value.
21261 ASSERT(IsNull());
21262 // AbstractType is an abstract class.
21263 UNREACHABLE();
21264 return Class::null();
21265}
21266
21267TypeArgumentsPtr AbstractType::arguments() const {
21268 // All subclasses should implement this appropriately, so the only value that
21269 // should reach this implementation should be the null value.
21270 ASSERT(IsNull());
21271 // AbstractType is an abstract class.
21272 UNREACHABLE();
21273 return nullptr;
21274}
21275
21276bool AbstractType::IsStrictlyNonNullable() const {
21277 // Null can be assigned to legacy and nullable types.
21278 if (!IsNonNullable()) {
21279 return false;
21280 }
21281
21282 Thread* thread = Thread::Current();
21283 Zone* zone = thread->zone();
21284
21285 // In weak mode null can be assigned to any type.
21286 if (!thread->isolate_group()->null_safety()) {
21287 return false;
21288 }
21289
21290 if (IsTypeParameter()) {
21291 const auto& bound =
21292 AbstractType::Handle(zone, ptr: TypeParameter::Cast(obj: *this).bound());
21293 ASSERT(!bound.IsNull());
21294 return bound.IsStrictlyNonNullable();
21295 }
21296 if (IsFutureOrType()) {
21297 return AbstractType::Handle(zone, ptr: UnwrapFutureOr()).IsStrictlyNonNullable();
21298 }
21299 return true;
21300}
21301
21302AbstractTypePtr AbstractType::SetInstantiatedNullability(
21303 const TypeParameter& type_param,
21304 Heap::Space space) const {
21305 Nullability result_nullability;
21306 const Nullability arg_nullability = nullability();
21307 const Nullability var_nullability = type_param.nullability();
21308 // Adjust nullability of result 'arg' instantiated from 'var'.
21309 // arg/var ! ? *
21310 // ! ! ? *
21311 // ? ? ? ?
21312 // * * ? *
21313 if (var_nullability == Nullability::kNullable ||
21314 arg_nullability == Nullability::kNullable) {
21315 result_nullability = Nullability::kNullable;
21316 } else if (var_nullability == Nullability::kLegacy ||
21317 arg_nullability == Nullability::kLegacy) {
21318 result_nullability = Nullability::kLegacy;
21319 } else {
21320 // Keep arg nullability.
21321 return ptr();
21322 }
21323 if (arg_nullability == result_nullability) {
21324 return ptr();
21325 }
21326 if (IsType()) {
21327 return Type::Cast(obj: *this).ToNullability(value: result_nullability, space);
21328 }
21329 if (IsFunctionType()) {
21330 return FunctionType::Cast(obj: *this).ToNullability(value: result_nullability, space);
21331 }
21332 if (IsRecordType()) {
21333 return RecordType::Cast(obj: *this).ToNullability(value: result_nullability, space);
21334 }
21335 if (IsTypeParameter()) {
21336 return TypeParameter::Cast(obj: *this).ToNullability(value: result_nullability, space);
21337 }
21338 UNREACHABLE();
21339}
21340
21341AbstractTypePtr AbstractType::NormalizeFutureOrType(Heap::Space space) const {
21342 if (IsFutureOrType()) {
21343 Zone* zone = Thread::Current()->zone();
21344 const AbstractType& unwrapped_type =
21345 AbstractType::Handle(zone, ptr: UnwrapFutureOr());
21346 const classid_t cid = unwrapped_type.type_class_id();
21347 if (cid == kDynamicCid || cid == kVoidCid) {
21348 return unwrapped_type.ptr();
21349 }
21350 if (cid == kInstanceCid) {
21351 if (IsNonNullable()) {
21352 return unwrapped_type.ptr();
21353 }
21354 if (IsNullable() || unwrapped_type.IsNullable()) {
21355 return Type::Cast(obj: unwrapped_type)
21356 .ToNullability(value: Nullability::kNullable, space);
21357 }
21358 return Type::Cast(obj: unwrapped_type)
21359 .ToNullability(value: Nullability::kLegacy, space);
21360 }
21361 if (cid == kNeverCid && unwrapped_type.IsNonNullable()) {
21362 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21363 const Type& future_never_type =
21364 Type::Handle(zone, ptr: object_store->non_nullable_future_never_type());
21365 ASSERT(!future_never_type.IsNull());
21366 return future_never_type.ToNullability(value: nullability(), space);
21367 }
21368 if (cid == kNullCid) {
21369 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21370 ASSERT(object_store->nullable_future_null_type() != Type::null());
21371 return object_store->nullable_future_null_type();
21372 }
21373 if (IsNullable() && unwrapped_type.IsNullable()) {
21374 return Type::Cast(obj: *this).ToNullability(value: Nullability::kNonNullable, space);
21375 }
21376 }
21377 return ptr();
21378}
21379
21380bool AbstractType::IsInstantiated(Genericity genericity,
21381 intptr_t num_free_fun_type_params) const {
21382 // All subclasses should implement this appropriately, so the only value that
21383 // should reach this implementation should be the null value.
21384 ASSERT(IsNull());
21385 // AbstractType is an abstract class.
21386 UNREACHABLE();
21387 return false;
21388}
21389
21390void AbstractType::SetIsFinalized() const {
21391 ASSERT(!IsFinalized());
21392 set_type_state(IsInstantiated()
21393 ? UntaggedAbstractType::kFinalizedInstantiated
21394 : UntaggedAbstractType::kFinalizedUninstantiated);
21395}
21396
21397void AbstractType::set_flags(uint32_t value) const {
21398 untag()->set_flags(value);
21399}
21400
21401void AbstractType::set_type_state(UntaggedAbstractType::TypeState value) const {
21402 ASSERT(!IsCanonical());
21403 set_flags(
21404 UntaggedAbstractType::TypeStateBits::update(value, original: untag()->flags()));
21405}
21406
21407void AbstractType::set_nullability(Nullability value) const {
21408 ASSERT(!IsCanonical());
21409 set_flags(UntaggedAbstractType::NullabilityBits::update(
21410 value: static_cast<uint8_t>(value), original: untag()->flags()));
21411}
21412
21413bool AbstractType::IsEquivalent(
21414 const Instance& other,
21415 TypeEquality kind,
21416 FunctionTypeMapping* function_type_equivalence) const {
21417 // All subclasses should implement this appropriately, so the only value that
21418 // should reach this implementation should be the null value.
21419 ASSERT(IsNull());
21420 // AbstractType is an abstract class.
21421 UNREACHABLE();
21422 return false;
21423}
21424
21425bool AbstractType::IsNullabilityEquivalent(Thread* thread,
21426 const AbstractType& other_type,
21427 TypeEquality kind) const {
21428 Nullability this_type_nullability = nullability();
21429 Nullability other_type_nullability = other_type.nullability();
21430 if (kind == TypeEquality::kInSubtypeTest) {
21431 if (thread->isolate_group()->use_strict_null_safety_checks() &&
21432 this_type_nullability == Nullability::kNullable &&
21433 other_type_nullability == Nullability::kNonNullable) {
21434 return false;
21435 }
21436 } else {
21437 if (kind == TypeEquality::kSyntactical) {
21438 if (this_type_nullability == Nullability::kLegacy) {
21439 this_type_nullability = Nullability::kNonNullable;
21440 }
21441 if (other_type_nullability == Nullability::kLegacy) {
21442 other_type_nullability = Nullability::kNonNullable;
21443 }
21444 } else {
21445 ASSERT(kind == TypeEquality::kCanonical);
21446 }
21447 if (this_type_nullability != other_type_nullability) {
21448 return false;
21449 }
21450 }
21451 return true;
21452}
21453
21454bool AbstractType::RequireConstCanonicalTypeErasure(Zone* zone) const {
21455 // All subclasses should implement this appropriately, so the only value that
21456 // should reach this implementation should be the null value.
21457 ASSERT(IsNull());
21458 // AbstractType is an abstract class.
21459 UNREACHABLE();
21460 return false;
21461}
21462
21463AbstractTypePtr AbstractType::InstantiateFrom(
21464 const TypeArguments& instantiator_type_arguments,
21465 const TypeArguments& function_type_arguments,
21466 intptr_t num_free_fun_type_params,
21467 Heap::Space space,
21468 FunctionTypeMapping* function_type_mapping,
21469 intptr_t num_parent_type_args_adjustment) const {
21470 // All subclasses should implement this appropriately, so the only value that
21471 // should reach this implementation should be the null value.
21472 ASSERT(IsNull());
21473 // AbstractType is an abstract class.
21474 UNREACHABLE();
21475 return nullptr;
21476}
21477
21478AbstractTypePtr AbstractType::UpdateFunctionTypes(
21479 intptr_t num_parent_type_args_adjustment,
21480 intptr_t num_free_fun_type_params,
21481 Heap::Space space,
21482 FunctionTypeMapping* function_type_mapping) const {
21483 UNREACHABLE();
21484 return nullptr;
21485}
21486
21487AbstractTypePtr AbstractType::Canonicalize(Thread* thread) const {
21488 // All subclasses should implement this appropriately, so the only value that
21489 // should reach this implementation should be the null value.
21490 ASSERT(IsNull());
21491 // AbstractType is an abstract class.
21492 UNREACHABLE();
21493 return nullptr;
21494}
21495
21496void AbstractType::EnumerateURIs(URIs* uris) const {
21497 // All subclasses should implement this appropriately, so the only value that
21498 // should reach this implementation should be the null value.
21499 ASSERT(IsNull());
21500 // AbstractType is an abstract class.
21501 UNREACHABLE();
21502}
21503
21504void AbstractType::AddURI(URIs* uris, const String& name, const String& uri) {
21505 ASSERT(uris != nullptr);
21506 const intptr_t len = uris->length();
21507 ASSERT((len % 3) == 0);
21508 bool print_uri = false;
21509 for (intptr_t i = 0; i < len; i += 3) {
21510 if (uris->At(index: i).Equals(str: name)) {
21511 if (uris->At(index: i + 1).Equals(str: uri)) {
21512 // Same name and same URI: no need to add this already listed URI.
21513 return; // No state change is possible.
21514 } else {
21515 // Same name and different URI: the name is ambiguous, print both URIs.
21516 print_uri = true;
21517 uris->SetAt(index: i + 2, t: Symbols::print());
21518 }
21519 }
21520 }
21521 uris->Add(t: name);
21522 uris->Add(t: uri);
21523 if (print_uri) {
21524 uris->Add(t: Symbols::print());
21525 } else {
21526 uris->Add(t: Symbols::Empty());
21527 }
21528}
21529
21530StringPtr AbstractType::PrintURIs(URIs* uris) {
21531 ASSERT(uris != nullptr);
21532 Thread* thread = Thread::Current();
21533 Zone* zone = thread->zone();
21534 const intptr_t len = uris->length();
21535 ASSERT((len % 3) == 0);
21536 GrowableHandlePtrArray<const String> pieces(zone, 5 * (len / 3));
21537 for (intptr_t i = 0; i < len; i += 3) {
21538 // Only print URIs that have been marked.
21539 if (uris->At(index: i + 2).ptr() == Symbols::print().ptr()) {
21540 pieces.Add(t: Symbols::TwoSpaces());
21541 pieces.Add(t: uris->At(index: i));
21542 pieces.Add(t: Symbols::SpaceIsFromSpace());
21543 pieces.Add(t: uris->At(index: i + 1));
21544 pieces.Add(t: Symbols::NewLine());
21545 }
21546 }
21547 return Symbols::FromConcatAll(thread, strs: pieces);
21548}
21549
21550const char* AbstractType::NullabilitySuffix(
21551 NameVisibility name_visibility) const {
21552 if (IsDynamicType() || IsVoidType() || IsNullType()) {
21553 // Hide nullable suffix.
21554 return "";
21555 }
21556 // Keep in sync with Nullability enum in runtime/vm/object.h.
21557 switch (nullability()) {
21558 case Nullability::kNullable:
21559 return "?";
21560 case Nullability::kNonNullable:
21561 return "";
21562 case Nullability::kLegacy:
21563 return (FLAG_show_internal_names || name_visibility != kUserVisibleName)
21564 ? "*"
21565 : "";
21566 default:
21567 UNREACHABLE();
21568 }
21569}
21570
21571StringPtr AbstractType::Name() const {
21572 Thread* thread = Thread::Current();
21573 ZoneTextBuffer printer(thread->zone());
21574 PrintName(visibility: kInternalName, printer: &printer);
21575 return Symbols::New(thread, cstr: printer.buffer());
21576}
21577
21578StringPtr AbstractType::UserVisibleName() const {
21579 Thread* thread = Thread::Current();
21580 ZoneTextBuffer printer(thread->zone());
21581 PrintName(visibility: kUserVisibleName, printer: &printer);
21582 return Symbols::New(thread, cstr: printer.buffer());
21583}
21584
21585StringPtr AbstractType::ScrubbedName() const {
21586 Thread* thread = Thread::Current();
21587 ZoneTextBuffer printer(thread->zone());
21588 PrintName(visibility: kScrubbedName, printer: &printer);
21589 return Symbols::New(thread, cstr: printer.buffer());
21590}
21591
21592void AbstractType::PrintName(NameVisibility name_visibility,
21593 BaseTextBuffer* printer) const {
21594 // All subclasses should implement this appropriately, so the only value that
21595 // should reach this implementation should be the null value.
21596 ASSERT(IsNull());
21597 // AbstractType is an abstract class.
21598 UNREACHABLE();
21599}
21600
21601StringPtr AbstractType::ClassName() const {
21602 ASSERT(!IsFunctionType() && !IsRecordType());
21603 return Class::Handle(ptr: type_class()).Name();
21604}
21605
21606bool AbstractType::IsNullType() const {
21607 return type_class_id() == kNullCid;
21608}
21609
21610bool AbstractType::IsNeverType() const {
21611 return type_class_id() == kNeverCid;
21612}
21613
21614bool AbstractType::IsSentinelType() const {
21615 return type_class_id() == kSentinelCid;
21616}
21617
21618bool AbstractType::IsTopTypeForInstanceOf() const {
21619 const classid_t cid = type_class_id();
21620 if (cid == kDynamicCid || cid == kVoidCid) {
21621 return true;
21622 }
21623 if (cid == kInstanceCid) { // Object type.
21624 return !IsNonNullable(); // kLegacy or kNullable.
21625 }
21626 if (cid == kFutureOrCid) {
21627 // FutureOr<T> where T is a top type behaves as a top type.
21628 return AbstractType::Handle(ptr: UnwrapFutureOr()).IsTopTypeForInstanceOf();
21629 }
21630 return false;
21631}
21632
21633// Must be kept in sync with GenerateTypeIsTopTypeForSubtyping in
21634// stub_code_compiler.cc if any changes are made.
21635bool AbstractType::IsTopTypeForSubtyping() const {
21636 const classid_t cid = type_class_id();
21637 if (cid == kDynamicCid || cid == kVoidCid) {
21638 return true;
21639 }
21640 if (cid == kInstanceCid) { // Object type.
21641 // NNBD weak mode uses LEGACY_SUBTYPE for assignability / 'as' tests,
21642 // and non-nullable Object is a top type according to LEGACY_SUBTYPE.
21643 return !IsNonNullable() ||
21644 !IsolateGroup::Current()->use_strict_null_safety_checks();
21645 }
21646 if (cid == kFutureOrCid) {
21647 // FutureOr<T> where T is a top type behaves as a top type.
21648 return AbstractType::Handle(ptr: UnwrapFutureOr()).IsTopTypeForSubtyping();
21649 }
21650 return false;
21651}
21652
21653bool AbstractType::IsIntType() const {
21654 return HasTypeClass() &&
21655 (type_class() == Type::Handle(ptr: Type::IntType()).type_class());
21656}
21657
21658bool AbstractType::IsIntegerImplementationType() const {
21659 return HasTypeClass() &&
21660 (type_class() == IsolateGroup::Current()
21661 ->object_store()
21662 ->integer_implementation_class());
21663}
21664
21665bool AbstractType::IsDoubleType() const {
21666 return HasTypeClass() &&
21667 (type_class() == Type::Handle(ptr: Type::Double()).type_class());
21668}
21669
21670bool AbstractType::IsFloat32x4Type() const {
21671 // kFloat32x4Cid refers to the private class and cannot be used here.
21672 return HasTypeClass() &&
21673 (type_class() == Type::Handle(ptr: Type::Float32x4()).type_class());
21674}
21675
21676bool AbstractType::IsFloat64x2Type() const {
21677 // kFloat64x2Cid refers to the private class and cannot be used here.
21678 return HasTypeClass() &&
21679 (type_class() == Type::Handle(ptr: Type::Float64x2()).type_class());
21680}
21681
21682bool AbstractType::IsInt32x4Type() const {
21683 // kInt32x4Cid refers to the private class and cannot be used here.
21684 return HasTypeClass() &&
21685 (type_class() == Type::Handle(ptr: Type::Int32x4()).type_class());
21686}
21687
21688bool AbstractType::IsStringType() const {
21689 return HasTypeClass() &&
21690 (type_class() == Type::Handle(ptr: Type::StringType()).type_class());
21691}
21692
21693bool AbstractType::IsDartFunctionType() const {
21694 return HasTypeClass() &&
21695 (type_class() == Type::Handle(ptr: Type::DartFunctionType()).type_class());
21696}
21697
21698bool AbstractType::IsDartClosureType() const {
21699 return (type_class_id() == kClosureCid);
21700}
21701
21702bool AbstractType::IsDartRecordType() const {
21703 if (!HasTypeClass()) return false;
21704 const auto cid = type_class_id();
21705 return ((cid == kRecordCid) ||
21706 (cid == Class::Handle(
21707 ptr: IsolateGroup::Current()->object_store()->record_class())
21708 .id()));
21709}
21710
21711bool AbstractType::IsFfiPointerType() const {
21712 return HasTypeClass() && type_class_id() == kPointerCid;
21713}
21714
21715bool AbstractType::IsTypeClassAllowedBySpawnUri() const {
21716 if (!HasTypeClass()) return false;
21717
21718 intptr_t cid = type_class_id();
21719
21720 if (cid == kBoolCid) return true;
21721 if (cid == kDynamicCid) return true;
21722 if (cid == kInstanceCid) return true; // Object.
21723 if (cid == kNeverCid) return true;
21724 if (cid == kNullCid) return true;
21725 if (cid == kVoidCid) return true;
21726
21727 // These are not constant CID checks because kDoubleCid refers to _Double
21728 // not double, etc.
21729 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21730 Type& candidate_type = Type::Handle();
21731 candidate_type = object_store->int_type();
21732 if (cid == candidate_type.type_class_id()) return true;
21733 candidate_type = object_store->double_type();
21734 if (cid == candidate_type.type_class_id()) return true;
21735 candidate_type = object_store->number_type();
21736 if (cid == candidate_type.type_class_id()) return true;
21737 candidate_type = object_store->string_type();
21738 if (cid == candidate_type.type_class_id()) return true;
21739
21740 Class& candidate_cls = Class::Handle();
21741 candidate_cls = object_store->list_class();
21742 if (cid == candidate_cls.id()) return true;
21743 candidate_cls = object_store->map_class();
21744 if (cid == candidate_cls.id()) return true;
21745 candidate_cls = object_store->set_class();
21746 if (cid == candidate_cls.id()) return true;
21747 candidate_cls = object_store->capability_class();
21748 if (cid == candidate_cls.id()) return true;
21749 candidate_cls = object_store->send_port_class();
21750 if (cid == candidate_cls.id()) return true;
21751 candidate_cls = object_store->transferable_class();
21752 if (cid == candidate_cls.id()) return true;
21753
21754 return false;
21755}
21756
21757AbstractTypePtr AbstractType::UnwrapFutureOr() const {
21758 if (!IsFutureOrType()) {
21759 return ptr();
21760 }
21761 if (arguments() == TypeArguments::null()) {
21762 return Type::dynamic_type().ptr();
21763 }
21764 Thread* thread = Thread::Current();
21765 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
21766 TypeArguments& type_args = thread->TypeArgumentsHandle();
21767 type_args = arguments();
21768 REUSABLE_ABSTRACT_TYPE_HANDLESCOPE(thread);
21769 AbstractType& type_arg = thread->AbstractTypeHandle();
21770 type_arg = type_args.TypeAt(index: 0);
21771 while (type_arg.IsFutureOrType()) {
21772 if (type_arg.arguments() == TypeArguments::null()) {
21773 return Type::dynamic_type().ptr();
21774 }
21775 type_args = type_arg.arguments();
21776 type_arg = type_args.TypeAt(index: 0);
21777 }
21778 return type_arg.ptr();
21779}
21780
21781bool AbstractType::NeedsNullAssertion() const {
21782 if (!IsNonNullable()) {
21783 return false;
21784 }
21785 if (IsTypeParameter()) {
21786 return AbstractType::Handle(ptr: TypeParameter::Cast(obj: *this).bound())
21787 .NeedsNullAssertion();
21788 }
21789 if (IsFutureOrType()) {
21790 return AbstractType::Handle(ptr: UnwrapFutureOr()).NeedsNullAssertion();
21791 }
21792 return true;
21793}
21794
21795bool AbstractType::IsSubtypeOf(
21796 const AbstractType& other,
21797 Heap::Space space,
21798 FunctionTypeMapping* function_type_equivalence) const {
21799 TRACE_TYPE_CHECKS_VERBOSE(" AbstractType::IsSubtypeOf(%s, %s)\n",
21800 ToCString(), other.ToCString());
21801 ASSERT(IsFinalized());
21802 ASSERT(other.IsFinalized());
21803 // Reflexivity.
21804 if (ptr() == other.ptr()) {
21805 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
21806 return true;
21807 }
21808 // Right top type.
21809 if (other.IsTopTypeForSubtyping()) {
21810 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
21811 return true;
21812 }
21813 // Left bottom type.
21814 // Any form of Never in weak mode maps to Null and Null is a bottom type in
21815 // weak mode. In strong mode, Never and Never* are bottom types. Therefore,
21816 // Never and Never* are bottom types regardless of weak/strong mode.
21817 // Note that we cannot encounter Never?, as it is normalized to Null.
21818 if (IsNeverType()) {
21819 ASSERT(!IsNullable());
21820 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is Never)\n");
21821 return true;
21822 }
21823 // Left top type.
21824 if (IsDynamicType() || IsVoidType()) {
21825 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (left is top)\n");
21826 return false;
21827 }
21828 // Left Null type.
21829 if (IsNullType()) {
21830 const bool result = Instance::NullIsAssignableTo(other);
21831 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (left is Null)\n",
21832 (result ? "true" : "false"));
21833 return result;
21834 }
21835 Thread* thread = Thread::Current();
21836 auto isolate_group = thread->isolate_group();
21837 Zone* zone = thread->zone();
21838 // Type parameters cannot be handled by Class::IsSubtypeOf().
21839 // When comparing two uninstantiated function types, one returning type
21840 // parameter K, the other returning type parameter V, we cannot assume that
21841 // K is a subtype of V, or vice versa. We only return true if K equals V, as
21842 // defined by TypeParameter::Equals.
21843 // The same rule applies when checking the upper bound of a still
21844 // uninstantiated type at compile time. Returning false will defer the test
21845 // to run time.
21846 // There are however some cases that can be decided at compile time.
21847 // For example, with class A<K, V extends K>, new A<T, T> called from within
21848 // a class B<T> will never require a run time bound check, even if T is
21849 // uninstantiated at compile time.
21850 if (IsTypeParameter()) {
21851 const TypeParameter& type_param = TypeParameter::Cast(obj: *this);
21852 if (other.IsTypeParameter()) {
21853 const TypeParameter& other_type_param = TypeParameter::Cast(obj: other);
21854 if (type_param.IsEquivalent(other: other_type_param,
21855 kind: TypeEquality::kInSubtypeTest,
21856 function_type_equivalence)) {
21857 TRACE_TYPE_CHECKS_VERBOSE(
21858 " - result: true (equivalent type parameters)\n");
21859 return true;
21860 }
21861 }
21862 const AbstractType& bound = AbstractType::Handle(zone, ptr: type_param.bound());
21863 ASSERT(bound.IsFinalized());
21864 if (bound.IsSubtypeOf(other, space, function_type_equivalence)) {
21865 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (bound is a subtype)\n");
21866 return true;
21867 }
21868 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21869 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21870 TRACE_TYPE_CHECKS_VERBOSE(
21871 " - result: true (type parameter is a subtype of FutureOr)\n");
21872 return true;
21873 }
21874 TRACE_TYPE_CHECKS_VERBOSE(
21875 " - result: false (left is a type parameter)\n");
21876 return false;
21877 }
21878 if (other.IsTypeParameter()) {
21879 TRACE_TYPE_CHECKS_VERBOSE(
21880 " - result: false (right is a type parameter)\n");
21881 return false;
21882 }
21883 // Function types cannot be handled by Class::IsSubtypeOf().
21884 if (IsFunctionType()) {
21885 // Any type that can be the type of a closure is a subtype of Function or
21886 // non-nullable Object.
21887 if (other.IsObjectType() || other.IsDartFunctionType()) {
21888 const bool result = !isolate_group->use_strict_null_safety_checks() ||
21889 !IsNullable() || !other.IsNonNullable();
21890 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function vs non-function)\n",
21891 (result ? "true" : "false"));
21892 return result;
21893 }
21894 if (other.IsFunctionType()) {
21895 // Check for two function types.
21896 if (isolate_group->use_strict_null_safety_checks() && IsNullable() &&
21897 other.IsNonNullable()) {
21898 TRACE_TYPE_CHECKS_VERBOSE(
21899 " - result: false (function nullability)\n");
21900 return false;
21901 }
21902 const bool result = FunctionType::Cast(obj: *this).IsSubtypeOf(
21903 other: FunctionType::Cast(obj: other), space, function_type_equivalence);
21904 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function types)\n",
21905 (result ? "true" : "false"));
21906 return result;
21907 }
21908 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21909 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21910 TRACE_TYPE_CHECKS_VERBOSE(
21911 " - result: true (function type is a subtype of FutureOr)\n");
21912 return true;
21913 }
21914 // All possible supertypes for FunctionType have been checked.
21915 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (function type)\n");
21916 return false;
21917 } else if (other.IsFunctionType()) {
21918 // FunctionTypes can only be subtyped by other FunctionTypes, so don't
21919 // fall through to class-based type tests.
21920 TRACE_TYPE_CHECKS_VERBOSE(
21921 " - result: false (right is a function type)\n");
21922 return false;
21923 }
21924 // Record types cannot be handled by Class::IsSubtypeOf().
21925 if (IsRecordType()) {
21926 if (other.IsObjectType() || other.IsDartRecordType()) {
21927 const bool result = !isolate_group->use_strict_null_safety_checks() ||
21928 !IsNullable() || !other.IsNonNullable();
21929 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record vs non-record)\n",
21930 (result ? "true" : "false"));
21931 return result;
21932 }
21933 if (other.IsRecordType()) {
21934 // Check for two record types.
21935 if (isolate_group->use_strict_null_safety_checks() && IsNullable() &&
21936 other.IsNonNullable()) {
21937 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record nullability)\n");
21938 return false;
21939 }
21940 const bool result = RecordType::Cast(obj: *this).IsSubtypeOf(
21941 other: RecordType::Cast(obj: other), space, function_type_equivalence);
21942 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record types)\n",
21943 (result ? "true" : "false"));
21944 return result;
21945 }
21946 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21947 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21948 TRACE_TYPE_CHECKS_VERBOSE(
21949 " - result: true (record type is a subtype of FutureOr)\n");
21950 return true;
21951 }
21952 // All possible supertypes for record type have been checked.
21953 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record type)\n");
21954 return false;
21955 } else if (other.IsRecordType()) {
21956 // RecordTypes can only be subtyped by other RecordTypes, so don't
21957 // fall through to class-based type tests.
21958 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (right is a record type)\n");
21959 return false;
21960 }
21961 ASSERT(IsType());
21962 const Class& type_cls = Class::Handle(zone, ptr: type_class());
21963 const bool result = Class::IsSubtypeOf(
21964 cls: type_cls,
21965 type_arguments: TypeArguments::Handle(zone, ptr: Type::Cast(obj: *this).GetInstanceTypeArguments(
21966 thread, /*canonicalize=*/false)),
21967 nullability: nullability(), other, space, function_type_equivalence);
21968 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (class type check)\n",
21969 (result ? "true" : "false"));
21970 return result;
21971}
21972
21973bool AbstractType::IsSubtypeOfFutureOr(
21974 Zone* zone,
21975 const AbstractType& other,
21976 Heap::Space space,
21977 FunctionTypeMapping* function_type_equivalence) const {
21978 if (other.IsFutureOrType()) {
21979 // This function is only called with a receiver that is either a function
21980 // type, record type, or an uninstantiated type parameter.
21981 // Therefore, it cannot be of class Future and we can spare the check.
21982 ASSERT(IsFunctionType() || IsRecordType() || IsTypeParameter());
21983 const TypeArguments& other_type_arguments =
21984 TypeArguments::Handle(zone, ptr: other.arguments());
21985 const AbstractType& other_type_arg =
21986 AbstractType::Handle(zone, ptr: other_type_arguments.TypeAtNullSafe(index: 0));
21987 if (other_type_arg.IsTopTypeForSubtyping()) {
21988 return true;
21989 }
21990 // Retry the IsSubtypeOf check after unwrapping type arg of FutureOr.
21991 if (IsSubtypeOf(other: other_type_arg, space, function_type_equivalence)) {
21992 return true;
21993 }
21994 }
21995 return false;
21996}
21997
21998uword AbstractType::ComputeHash() const {
21999 // AbstractType is an abstract class.
22000 UNREACHABLE();
22001 return 0;
22002}
22003
22004const char* AbstractType::ToCString() const {
22005 // All subclasses should implement this appropriately, so the only value that
22006 // should reach this implementation should be the null value.
22007 ASSERT(IsNull());
22008 return "AbstractType: null";
22009}
22010
22011void AbstractType::SetTypeTestingStub(const Code& stub) const {
22012 if (stub.IsNull()) {
22013 InitializeTypeTestingStubNonAtomic(stub);
22014 return;
22015 }
22016
22017 auto& old = Code::Handle(zone: Thread::Current()->zone());
22018 while (true) {
22019 // We load the old TTS and it's entrypoint.
22020 old = untag()->type_test_stub<std::memory_order_acquire>();
22021 uword old_entry_point = old.IsNull() ? 0 : old.EntryPoint();
22022
22023 // If we can successfully update the entrypoint of the TTS, we will
22024 // unconditionally also set the [Code] of the TTS.
22025 //
22026 // Any competing writer would do the same, lose the compare-exchange, loop
22027 // around and continue loading the old [Code] TTS and continue to lose the
22028 // race until we have finally also updated the [Code] TTS.
22029 if (untag()->type_test_stub_entry_point_.compare_exchange_strong(
22030 e&: old_entry_point, d: stub.EntryPoint())) {
22031 untag()->set_type_test_stub<std::memory_order_release>(stub.ptr());
22032 return;
22033 }
22034 }
22035}
22036
22037void AbstractType::InitializeTypeTestingStubNonAtomic(const Code& stub) const {
22038 if (stub.IsNull()) {
22039 // This only happens during bootstrapping when creating Type objects before
22040 // we have the instructions.
22041 ASSERT(type_class_id() == kDynamicCid || type_class_id() == kVoidCid);
22042 StoreNonPointer(addr: &untag()->type_test_stub_entry_point_, value: 0);
22043 untag()->set_type_test_stub(stub.ptr());
22044 return;
22045 }
22046
22047 StoreNonPointer(addr: &untag()->type_test_stub_entry_point_, value: stub.EntryPoint());
22048 untag()->set_type_test_stub(stub.ptr());
22049}
22050
22051TypePtr Type::NullType() {
22052 return IsolateGroup::Current()->object_store()->null_type();
22053}
22054
22055TypePtr Type::DynamicType() {
22056 return Object::dynamic_type().ptr();
22057}
22058
22059TypePtr Type::VoidType() {
22060 return Object::void_type().ptr();
22061}
22062
22063TypePtr Type::NeverType() {
22064 return IsolateGroup::Current()->object_store()->never_type();
22065}
22066
22067TypePtr Type::ObjectType() {
22068 return IsolateGroup::Current()->object_store()->object_type();
22069}
22070
22071TypePtr Type::BoolType() {
22072 return IsolateGroup::Current()->object_store()->bool_type();
22073}
22074
22075TypePtr Type::IntType() {
22076 return IsolateGroup::Current()->object_store()->int_type();
22077}
22078
22079TypePtr Type::NullableIntType() {
22080 return IsolateGroup::Current()->object_store()->nullable_int_type();
22081}
22082
22083TypePtr Type::SmiType() {
22084 return IsolateGroup::Current()->object_store()->smi_type();
22085}
22086
22087TypePtr Type::MintType() {
22088 return IsolateGroup::Current()->object_store()->mint_type();
22089}
22090
22091TypePtr Type::Double() {
22092 return IsolateGroup::Current()->object_store()->double_type();
22093}
22094
22095TypePtr Type::NullableDouble() {
22096 return IsolateGroup::Current()->object_store()->nullable_double_type();
22097}
22098
22099TypePtr Type::Float32x4() {
22100 return IsolateGroup::Current()->object_store()->float32x4_type();
22101}
22102
22103TypePtr Type::Float64x2() {
22104 return IsolateGroup::Current()->object_store()->float64x2_type();
22105}
22106
22107TypePtr Type::Int32x4() {
22108 return IsolateGroup::Current()->object_store()->int32x4_type();
22109}
22110
22111TypePtr Type::Number() {
22112 return IsolateGroup::Current()->object_store()->number_type();
22113}
22114
22115TypePtr Type::StringType() {
22116 return IsolateGroup::Current()->object_store()->string_type();
22117}
22118
22119TypePtr Type::ArrayType() {
22120 return IsolateGroup::Current()->object_store()->array_type();
22121}
22122
22123TypePtr Type::DartFunctionType() {
22124 return IsolateGroup::Current()->object_store()->function_type();
22125}
22126
22127TypePtr Type::DartTypeType() {
22128 return IsolateGroup::Current()->object_store()->type_type();
22129}
22130
22131TypePtr Type::NewNonParameterizedType(const Class& type_class) {
22132 ASSERT(type_class.NumTypeArguments() == 0);
22133 if (type_class.IsNullClass()) {
22134 return Type::NullType();
22135 }
22136 if (type_class.IsDynamicClass()) {
22137 return Type::DynamicType();
22138 }
22139 if (type_class.IsVoidClass()) {
22140 return Type::VoidType();
22141 }
22142 // It is too early to use the class finalizer, as type_class may not be named
22143 // yet, so do not call DeclarationType().
22144 Type& type = Type::Handle(ptr: type_class.declaration_type());
22145 if (type.IsNull()) {
22146 type = Type::New(clazz: Class::Handle(ptr: type_class.ptr()),
22147 arguments: Object::null_type_arguments(), nullability: Nullability::kNonNullable);
22148 type.SetIsFinalized();
22149 type ^= type.Canonicalize(thread: Thread::Current());
22150 type_class.set_declaration_type(type);
22151 }
22152 ASSERT(type.IsFinalized());
22153 return type.ptr();
22154}
22155
22156TypePtr Type::ToNullability(Nullability value, Heap::Space space) const {
22157 if (nullability() == value) {
22158 return ptr();
22159 }
22160 // Type parameter instantiation may request a nullability change, which should
22161 // be ignored for types dynamic and void. Type Null cannot be the result of
22162 // instantiating a non-nullable type parameter (TypeError thrown).
22163 const classid_t cid = type_class_id();
22164 if (cid == kDynamicCid || cid == kVoidCid || cid == kNullCid) {
22165 return ptr();
22166 }
22167 if (cid == kNeverCid && value == Nullability::kNullable) {
22168 // Normalize Never? to Null.
22169 return Type::NullType();
22170 }
22171 // Clone type and set new nullability.
22172 Type& type = Type::Handle();
22173 // Always cloning in old space and removing space parameter would not satisfy
22174 // currently existing requests for type instantiation in new space.
22175 // Load with relaxed atomics to prevent data race with updating type
22176 // testing stub.
22177 type ^= Object::Clone(orig: *this, space, /*load_with_relaxed_atomics=*/true);
22178 type.set_nullability(value);
22179 type.SetHash(0);
22180 type.InitializeTypeTestingStubNonAtomic(
22181 stub: Code::Handle(ptr: TypeTestingStubGenerator::DefaultCodeForType(type)));
22182 if (IsCanonical()) {
22183 // Object::Clone does not clone canonical bit.
22184 ASSERT(!type.IsCanonical());
22185 type ^= type.Canonicalize(thread: Thread::Current());
22186 }
22187 return type.ptr();
22188}
22189
22190FunctionTypePtr FunctionType::ToNullability(Nullability value,
22191 Heap::Space space) const {
22192 if (nullability() == value) {
22193 return ptr();
22194 }
22195 // Clone function type and set new nullability.
22196 FunctionType& type = FunctionType::Handle(ptr: FunctionType::Clone(orig: *this, space));
22197 type.set_nullability(value);
22198 type.SetHash(0);
22199 type.InitializeTypeTestingStubNonAtomic(
22200 stub: Code::Handle(ptr: TypeTestingStubGenerator::DefaultCodeForType(type)));
22201 if (IsCanonical()) {
22202 // Object::Clone does not clone canonical bit.
22203 ASSERT(!type.IsCanonical());
22204 type ^= type.Canonicalize(thread: Thread::Current());
22205 }
22206 return type.ptr();
22207}
22208
22209classid_t Type::type_class_id() const {
22210 return untag()->type_class_id();
22211}
22212
22213ClassPtr Type::type_class() const {
22214 return IsolateGroup::Current()->class_table()->At(cid: type_class_id());
22215}
22216
22217bool Type::IsInstantiated(Genericity genericity,
22218 intptr_t num_free_fun_type_params) const {
22219 if (type_state() == UntaggedType::kFinalizedInstantiated) {
22220 return true;
22221 }
22222 if ((genericity == kAny) && (num_free_fun_type_params == kAllFree) &&
22223 (type_state() == UntaggedType::kFinalizedUninstantiated)) {
22224 return false;
22225 }
22226 if (arguments() == TypeArguments::null()) {
22227 return true;
22228 }
22229 const TypeArguments& args = TypeArguments::Handle(ptr: arguments());
22230 return args.IsSubvectorInstantiated(from_index: 0, len: args.Length(), genericity,
22231 num_free_fun_type_params);
22232}
22233
22234AbstractTypePtr Type::InstantiateFrom(
22235 const TypeArguments& instantiator_type_arguments,
22236 const TypeArguments& function_type_arguments,
22237 intptr_t num_free_fun_type_params,
22238 Heap::Space space,
22239 FunctionTypeMapping* function_type_mapping,
22240 intptr_t num_parent_type_args_adjustment) const {
22241 Zone* zone = Thread::Current()->zone();
22242 ASSERT(IsFinalized());
22243 ASSERT(!IsInstantiated());
22244 // Note that the type class has to be resolved at this time, but not
22245 // necessarily finalized yet. We may be checking bounds at compile time or
22246 // finalizing the type argument vector of a recursive type.
22247 const Class& cls = Class::Handle(zone, ptr: type_class());
22248 TypeArguments& type_arguments = TypeArguments::Handle(zone, ptr: arguments());
22249 ASSERT(type_arguments.Length() == cls.NumTypeParameters());
22250 type_arguments = type_arguments.InstantiateFrom(
22251 instantiator_type_arguments, function_type_arguments,
22252 num_free_fun_type_params, space, function_type_mapping,
22253 num_parent_type_args_adjustment);
22254 // A returned empty_type_arguments indicates a failed instantiation in dead
22255 // code that must be propagated up to the caller, the optimizing compiler.
22256 if (type_arguments.ptr() == Object::empty_type_arguments().ptr()) {
22257 return Type::null();
22258 }
22259 // This uninstantiated type is not modified, as it can be instantiated
22260 // with different instantiators. Allocate a new instantiated version of it.
22261 const Type& instantiated_type =
22262 Type::Handle(zone, ptr: Type::New(clazz: cls, arguments: type_arguments, nullability: nullability(), space));
22263 instantiated_type.SetIsFinalized();
22264 // Canonicalization is not part of instantiation.
22265 return instantiated_type.NormalizeFutureOrType(space);
22266}
22267
22268AbstractTypePtr Type::UpdateFunctionTypes(
22269 intptr_t num_parent_type_args_adjustment,
22270 intptr_t num_free_fun_type_params,
22271 Heap::Space space,
22272 FunctionTypeMapping* function_type_mapping) const {
22273 ASSERT(IsFinalized());
22274 ASSERT(num_parent_type_args_adjustment >= 0);
22275 if (arguments() == Object::null()) {
22276 return ptr();
22277 }
22278 Zone* zone = Thread::Current()->zone();
22279 const auto& type_args = TypeArguments::Handle(zone, ptr: arguments());
22280 const auto& updated_type_args = TypeArguments::Handle(
22281 zone, ptr: type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
22282 num_free_fun_type_params, space,
22283 function_type_mapping));
22284 if (type_args.ptr() == updated_type_args.ptr()) {
22285 return ptr();
22286 }
22287 const Class& cls = Class::Handle(zone, ptr: type_class());
22288 const Type& new_type = Type::Handle(
22289 zone, ptr: Type::New(clazz: cls, arguments: updated_type_args, nullability: nullability(), space));
22290 new_type.SetIsFinalized();
22291 return new_type.ptr();
22292}
22293
22294// Certain built-in classes are treated as syntactically equivalent.
22295static classid_t NormalizeClassIdForSyntacticalTypeEquality(classid_t cid) {
22296 if (IsIntegerClassId(index: cid)) {
22297 return Type::Handle(ptr: Type::IntType()).type_class_id();
22298 } else if (IsStringClassId(index: cid)) {
22299 return Type::Handle(ptr: Type::StringType()).type_class_id();
22300 } else if (cid == kDoubleCid) {
22301 return Type::Handle(ptr: Type::Double()).type_class_id();
22302 } else if (IsTypeClassId(index: cid)) {
22303 return Type::Handle(ptr: Type::DartTypeType()).type_class_id();
22304 } else if (IsArrayClassId(index: cid)) {
22305 return Class::Handle(ptr: IsolateGroup::Current()->object_store()->list_class())
22306 .id();
22307 }
22308 return cid;
22309}
22310
22311bool Type::IsEquivalent(const Instance& other,
22312 TypeEquality kind,
22313 FunctionTypeMapping* function_type_equivalence) const {
22314 ASSERT(!IsNull());
22315 if (ptr() == other.ptr()) {
22316 return true;
22317 }
22318 if (!other.IsType()) {
22319 return false;
22320 }
22321 const Type& other_type = Type::Cast(obj: other);
22322 const classid_t type_cid = type_class_id();
22323 const classid_t other_type_cid = other_type.type_class_id();
22324 if (type_cid != other_type_cid) {
22325 if ((kind != TypeEquality::kSyntactical) ||
22326 (NormalizeClassIdForSyntacticalTypeEquality(cid: type_cid) !=
22327 NormalizeClassIdForSyntacticalTypeEquality(cid: other_type_cid))) {
22328 return false;
22329 }
22330 }
22331 Thread* thread = Thread::Current();
22332 Zone* zone = thread->zone();
22333 ASSERT(
22334 Class::Handle(zone, type_class()).NumTypeParameters(thread) ==
22335 Class::Handle(zone, other_type.type_class()).NumTypeParameters(thread));
22336
22337 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22338 return false;
22339 }
22340 if (!IsFinalized() || !other_type.IsFinalized()) {
22341 ASSERT(kind != TypeEquality::kCanonical);
22342 return false; // Too early to decide if equal.
22343 }
22344 if (arguments() == other_type.arguments()) {
22345 return true;
22346 }
22347 const TypeArguments& type_args =
22348 TypeArguments::Handle(zone, ptr: this->arguments());
22349 const TypeArguments& other_type_args =
22350 TypeArguments::Handle(zone, ptr: other_type.arguments());
22351 return type_args.IsEquivalent(other: other_type_args, kind,
22352 function_type_equivalence);
22353}
22354
22355bool FunctionType::IsEquivalent(
22356 const Instance& other,
22357 TypeEquality kind,
22358 FunctionTypeMapping* function_type_equivalence) const {
22359 ASSERT(!IsNull());
22360 if (ptr() == other.ptr()) {
22361 return true;
22362 }
22363 if (!other.IsFunctionType()) {
22364 return false;
22365 }
22366 const FunctionType& other_type = FunctionType::Cast(obj: other);
22367 if ((packed_parameter_counts() != other_type.packed_parameter_counts()) ||
22368 (packed_type_parameter_counts() !=
22369 other_type.packed_type_parameter_counts())) {
22370 // Different number of type parameters or parameters.
22371 return false;
22372 }
22373 Thread* thread = Thread::Current();
22374 Zone* zone = thread->zone();
22375 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22376 return false;
22377 }
22378 if (!IsFinalized() || !other_type.IsFinalized()) {
22379 ASSERT(kind != TypeEquality::kCanonical);
22380 return false; // Too early to decide if equal.
22381 }
22382 FunctionTypeMapping scope(zone, &function_type_equivalence, *this,
22383 other_type);
22384
22385 // Equal function types must have equal signature types and equal optional
22386 // named arguments.
22387 // Compare function type parameters and their bounds.
22388 // Check the type parameters and bounds of generic functions.
22389 if (!HasSameTypeParametersAndBounds(other: other_type, kind,
22390 function_type_equivalence)) {
22391 return false;
22392 }
22393 AbstractType& param_type = Type::Handle(zone);
22394 AbstractType& other_param_type = Type::Handle(zone);
22395 // Check the result type.
22396 param_type = result_type();
22397 other_param_type = other_type.result_type();
22398 if (!param_type.IsEquivalent(other: other_param_type, kind,
22399 function_type_equivalence)) {
22400 return false;
22401 }
22402 // Check the types of all parameters.
22403 const intptr_t num_params = NumParameters();
22404 ASSERT(other_type.NumParameters() == num_params);
22405 for (intptr_t i = 0; i < num_params; i++) {
22406 param_type = ParameterTypeAt(index: i);
22407 other_param_type = other_type.ParameterTypeAt(index: i);
22408 // Use contravariant order in case we test for subtyping.
22409 if (!other_param_type.IsEquivalent(other: param_type, kind,
22410 function_type_equivalence)) {
22411 return false;
22412 }
22413 }
22414 if (HasOptionalNamedParameters()) {
22415 ASSERT(other_type.HasOptionalNamedParameters()); // Same packed counts.
22416 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22417 if (ParameterNameAt(index: i) != other_type.ParameterNameAt(index: i)) {
22418 return false;
22419 }
22420 if (IsRequiredAt(index: i) != other_type.IsRequiredAt(index: i)) {
22421 return false;
22422 }
22423 }
22424 }
22425 return true;
22426}
22427
22428bool Type::RequireConstCanonicalTypeErasure(Zone* zone) const {
22429 if (IsNonNullable()) {
22430 return true;
22431 }
22432 if (IsLegacy()) {
22433 // It is not possible for a legacy type parameter to have a non-nullable
22434 // bound or non-nullable default argument.
22435 return false;
22436 }
22437 const auto& type_args = TypeArguments::Handle(zone, ptr: this->arguments());
22438 return type_args.RequireConstCanonicalTypeErasure(zone, from_index: 0,
22439 len: type_args.Length());
22440}
22441
22442bool Type::IsDeclarationTypeOf(const Class& cls) const {
22443 ASSERT(type_class() == cls.ptr());
22444 if (cls.IsNullClass()) {
22445 return true;
22446 }
22447 if (cls.IsGeneric() || cls.IsClosureClass()) {
22448 return false;
22449 }
22450 return nullability() == Nullability::kNonNullable;
22451}
22452
22453// Keep in sync with TypeSerializationCluster::IsInCanonicalSet.
22454AbstractTypePtr Type::Canonicalize(Thread* thread) const {
22455 Zone* zone = thread->zone();
22456 ASSERT(IsFinalized());
22457 if (IsCanonical()) {
22458#ifdef DEBUG
22459 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22460 ASSERT(type_args.IsCanonical());
22461 ASSERT(type_args.IsOld());
22462#endif
22463 return this->ptr();
22464 }
22465 auto isolate_group = thread->isolate_group();
22466 const classid_t cid = type_class_id();
22467 if (cid == kDynamicCid) {
22468 ASSERT(Object::dynamic_type().IsCanonical());
22469 return Object::dynamic_type().ptr();
22470 }
22471
22472 if (cid == kVoidCid) {
22473 ASSERT(Object::void_type().IsCanonical());
22474 return Object::void_type().ptr();
22475 }
22476
22477 const Class& cls = Class::Handle(zone, ptr: type_class());
22478
22479 // Fast canonical lookup/registry for simple types.
22480 if (IsDeclarationTypeOf(cls)) {
22481 ASSERT(!cls.IsNullClass() || IsNullable());
22482 Type& type = Type::Handle(zone, ptr: cls.declaration_type());
22483 if (type.IsNull()) {
22484 ASSERT(!cls.ptr()->untag()->InVMIsolateHeap() ||
22485 (isolate_group == Dart::vm_isolate_group()));
22486 // Canonicalize the type arguments of the supertype, if any.
22487 TypeArguments& type_args = TypeArguments::Handle(zone, ptr: arguments());
22488 type_args = type_args.Canonicalize(thread);
22489 set_arguments(type_args);
22490 type = cls.declaration_type();
22491 // May be set while canonicalizing type args.
22492 if (type.IsNull()) {
22493 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22494 // Recheck if type exists.
22495 type = cls.declaration_type();
22496 if (type.IsNull()) {
22497 if (this->IsNew()) {
22498 type ^= Object::Clone(orig: *this, space: Heap::kOld);
22499 } else {
22500 type = this->ptr();
22501 }
22502 ASSERT(type.IsOld());
22503 type.ComputeHash();
22504 type.SetCanonical();
22505 cls.set_declaration_type(type);
22506 return type.ptr();
22507 }
22508 }
22509 }
22510 ASSERT(this->Equals(type));
22511 ASSERT(type.IsOld());
22512 if (type.IsCanonical()) {
22513 return type.ptr();
22514 }
22515 }
22516
22517 Type& type = Type::Handle(zone);
22518 ObjectStore* object_store = isolate_group->object_store();
22519 {
22520 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22521 CanonicalTypeSet table(zone, object_store->canonical_types());
22522 type ^= table.GetOrNull(key: CanonicalTypeKey(*this));
22523 ASSERT(object_store->canonical_types() == table.Release().ptr());
22524 }
22525 if (type.IsNull()) {
22526 // The type was not found in the table. It is not canonical yet.
22527
22528 // Canonicalize the type arguments.
22529 TypeArguments& type_args = TypeArguments::Handle(zone, ptr: arguments());
22530 ASSERT(type_args.IsNull() ||
22531 (type_args.Length() == cls.NumTypeParameters()));
22532 type_args = type_args.Canonicalize(thread);
22533 set_arguments(type_args);
22534 ASSERT(type_args.IsNull() || type_args.IsOld());
22535
22536 // Check to see if the type got added to canonical table as part of the
22537 // type arguments canonicalization.
22538 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22539 CanonicalTypeSet table(zone, object_store->canonical_types());
22540 type ^= table.GetOrNull(key: CanonicalTypeKey(*this));
22541 if (type.IsNull()) {
22542 // Add this type into the canonical table of types.
22543 if (this->IsNew()) {
22544 type ^= Object::Clone(orig: *this, space: Heap::kOld);
22545 } else {
22546 type = this->ptr();
22547 }
22548 ASSERT(type.IsOld());
22549 type.SetCanonical(); // Mark object as being canonical.
22550 bool present = table.Insert(key: type);
22551 ASSERT(!present);
22552 }
22553 object_store->set_canonical_types(table.Release());
22554 }
22555 return type.ptr();
22556}
22557
22558void Type::EnumerateURIs(URIs* uris) const {
22559 if (IsDynamicType() || IsVoidType() || IsNeverType()) {
22560 return;
22561 }
22562 Thread* thread = Thread::Current();
22563 Zone* zone = thread->zone();
22564 const Class& cls = Class::Handle(zone, ptr: type_class());
22565 const String& name = String::Handle(zone, ptr: cls.UserVisibleName());
22566 const Library& library = Library::Handle(zone, ptr: cls.library());
22567 const String& uri = String::Handle(zone, ptr: library.url());
22568 AddURI(uris, name, uri);
22569 const TypeArguments& type_args = TypeArguments::Handle(zone, ptr: arguments());
22570 type_args.EnumerateURIs(uris);
22571}
22572
22573void Type::PrintName(NameVisibility name_visibility,
22574 BaseTextBuffer* printer) const {
22575 Thread* thread = Thread::Current();
22576 Zone* zone = thread->zone();
22577 const Class& cls = Class::Handle(zone, ptr: type_class());
22578 const TypeParameters& params =
22579 TypeParameters::Handle(zone, ptr: cls.type_parameters());
22580 printer->AddString(s: cls.NameCString(name_visibility));
22581 const TypeArguments& args = TypeArguments::Handle(zone, ptr: arguments());
22582 intptr_t num_type_params = 0;
22583 if (cls.is_declaration_loaded()) {
22584 num_type_params = cls.NumTypeParameters(thread);
22585 } else if (!args.IsNull() || args.ptr() != params.defaults()) {
22586 num_type_params = args.Length();
22587 }
22588 if (num_type_params == 0) {
22589 // Do nothing.
22590 } else {
22591 args.PrintSubvectorName(from_index: 0, len: num_type_params, name_visibility, printer);
22592 }
22593 printer->AddString(s: NullabilitySuffix(name_visibility));
22594 // The name is only used for type checking and debugging purposes.
22595 // Unless profiling data shows otherwise, it is not worth caching the name in
22596 // the type.
22597}
22598
22599uword Type::ComputeHash() const {
22600 ASSERT(IsFinalized());
22601 uint32_t result = type_class_id();
22602 // A legacy type should have the same hash as its non-nullable version to be
22603 // consistent with the definition of type equality in Dart code.
22604 Nullability type_nullability = nullability();
22605 if (type_nullability == Nullability::kLegacy) {
22606 type_nullability = Nullability::kNonNullable;
22607 }
22608 result = CombineHashes(hash: result, other_hash: static_cast<uint32_t>(type_nullability));
22609 uint32_t type_args_hash = TypeArguments::kAllDynamicHash;
22610 if (arguments() != TypeArguments::null()) {
22611 const TypeArguments& args = TypeArguments::Handle(ptr: arguments());
22612 type_args_hash = args.Hash();
22613 }
22614 result = CombineHashes(hash: result, other_hash: type_args_hash);
22615 result = FinalizeHash(hash: result, hashbits: kHashBits);
22616 SetHash(result);
22617 return result;
22618}
22619
22620uword FunctionType::ComputeHash() const {
22621 ASSERT(IsFinalized());
22622 uint32_t result =
22623 CombineHashes(hash: packed_parameter_counts(), other_hash: packed_type_parameter_counts());
22624 // A legacy type should have the same hash as its non-nullable version to be
22625 // consistent with the definition of type equality in Dart code.
22626 Nullability type_nullability = nullability();
22627 if (type_nullability == Nullability::kLegacy) {
22628 type_nullability = Nullability::kNonNullable;
22629 }
22630 result = CombineHashes(hash: result, other_hash: static_cast<uint32_t>(type_nullability));
22631 AbstractType& type = AbstractType::Handle();
22632 const intptr_t num_type_params = NumTypeParameters();
22633 if (num_type_params > 0) {
22634 const TypeParameters& type_params =
22635 TypeParameters::Handle(ptr: type_parameters());
22636 const TypeArguments& bounds = TypeArguments::Handle(ptr: type_params.bounds());
22637 result = CombineHashes(hash: result, other_hash: bounds.Hash());
22638 // Since the default arguments are ignored when comparing two generic
22639 // function types for type equality, the hash does not depend on them.
22640 }
22641 type = result_type();
22642 result = CombineHashes(hash: result, other_hash: type.Hash());
22643 const intptr_t num_params = NumParameters();
22644 for (intptr_t i = 0; i < num_params; i++) {
22645 type = ParameterTypeAt(index: i);
22646 result = CombineHashes(hash: result, other_hash: type.Hash());
22647 }
22648 if (HasOptionalNamedParameters()) {
22649 String& param_name = String::Handle();
22650 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22651 param_name = ParameterNameAt(index: i);
22652 result = CombineHashes(hash: result, other_hash: param_name.Hash());
22653 }
22654 // Required flag is not hashed, see comment above about legacy type.
22655 }
22656 result = FinalizeHash(hash: result, hashbits: kHashBits);
22657 SetHash(result);
22658 return result;
22659}
22660
22661void Type::set_type_class(const Class& value) const {
22662 ASSERT(!value.IsNull());
22663 set_type_class_id(value.id());
22664}
22665
22666void Type::set_arguments(const TypeArguments& value) const {
22667 ASSERT(!IsCanonical());
22668 ASSERT(value.IsNull() ||
22669 // Do not attempt to query number of type parameters
22670 // before class declaration is fully loaded.
22671 !Class::Handle(type_class()).is_declaration_loaded() ||
22672 // Relax assertion in order to support invalid generic types
22673 // created in ClosureMirror_function.
22674 (type_class_id() == kInstanceCid) ||
22675 value.Length() == Class::Handle(type_class()).NumTypeParameters());
22676 untag()->set_arguments(value.ptr());
22677}
22678
22679TypeArgumentsPtr Type::GetInstanceTypeArguments(Thread* thread,
22680 bool canonicalize) const {
22681 Zone* zone = thread->zone();
22682 const auto& cls = Class::Handle(zone, ptr: type_class());
22683 const auto& args = TypeArguments::Handle(zone, ptr: arguments());
22684 return cls.GetInstanceTypeArguments(thread, type_arguments: args, canonicalize);
22685}
22686
22687TypePtr Type::New(Heap::Space space) {
22688 return Object::Allocate<Type>(space);
22689}
22690
22691TypePtr Type::New(const Class& clazz,
22692 const TypeArguments& arguments,
22693 Nullability nullability,
22694 Heap::Space space) {
22695 Zone* Z = Thread::Current()->zone();
22696 const Type& result = Type::Handle(zone: Z, ptr: Type::New(space));
22697 result.SetHash(0);
22698 result.set_flags(0);
22699 result.set_nullability(nullability);
22700 result.set_type_state(UntaggedAbstractType::kAllocated);
22701 result.set_type_class(clazz);
22702 result.set_arguments(arguments);
22703
22704 result.InitializeTypeTestingStubNonAtomic(
22705 stub: Code::Handle(zone: Z, ptr: TypeTestingStubGenerator::DefaultCodeForType(type: result)));
22706 return result.ptr();
22707}
22708
22709void Type::set_type_class_id(intptr_t id) const {
22710 ASSERT(Utils::IsUint(UntaggedObject::kClassIdTagSize, id));
22711 // We should never need a Type object for a top-level class.
22712 ASSERT(!ClassTable::IsTopLevelCid(id));
22713 ASSERT(id != kIllegalCid);
22714 ASSERT(!IsInternalOnlyClassId(id));
22715 untag()->set_type_class_id(id);
22716}
22717
22718const char* Type::ToCString() const {
22719 if (IsNull()) {
22720 return "Type: null";
22721 }
22722 Zone* zone = Thread::Current()->zone();
22723 ZoneTextBuffer args(zone);
22724 const TypeArguments& type_args = TypeArguments::Handle(zone, ptr: arguments());
22725 const char* args_cstr = "";
22726 if (!type_args.IsNull()) {
22727 type_args.PrintSubvectorName(from_index: 0, len: type_args.Length(), name_visibility: kInternalName, printer: &args);
22728 args_cstr = args.buffer();
22729 }
22730 const Class& cls = Class::Handle(zone, ptr: type_class());
22731 const char* class_name;
22732 const String& name = String::Handle(zone, ptr: cls.Name());
22733 class_name = name.IsNull() ? "<null>" : name.ToCString();
22734 const char* suffix = NullabilitySuffix(name_visibility: kInternalName);
22735 return OS::SCreate(zone, format: "Type: %s%s%s", class_name, args_cstr, suffix);
22736}
22737
22738bool FunctionType::RequireConstCanonicalTypeErasure(Zone* zone) const {
22739 if (IsNonNullable()) {
22740 return true;
22741 }
22742 if (IsLegacy()) {
22743 // It is not possible for a function type to have a non-nullable type in
22744 // its signature.
22745 return false;
22746 }
22747 const intptr_t num_type_params = NumTypeParameters();
22748 if (num_type_params > 0) {
22749 const TypeParameters& type_params =
22750 TypeParameters::Handle(ptr: type_parameters());
22751 TypeArguments& type_args = TypeArguments::Handle();
22752 type_args = type_params.bounds();
22753 if (type_args.RequireConstCanonicalTypeErasure(zone, from_index: 0, len: num_type_params)) {
22754 return true;
22755 }
22756 type_args = type_params.defaults();
22757 if (type_args.RequireConstCanonicalTypeErasure(zone, from_index: 0, len: num_type_params)) {
22758 return true;
22759 }
22760 }
22761 AbstractType& type = AbstractType::Handle(zone);
22762 type = result_type();
22763 if (type.RequireConstCanonicalTypeErasure(zone)) {
22764 return true;
22765 }
22766 const intptr_t num_params = NumParameters();
22767 for (intptr_t i = 0; i < num_params; i++) {
22768 type = ParameterTypeAt(index: i);
22769 if (type.RequireConstCanonicalTypeErasure(zone)) {
22770 return true;
22771 }
22772 }
22773 return false;
22774}
22775
22776AbstractTypePtr FunctionType::Canonicalize(Thread* thread) const {
22777 ASSERT(IsFinalized());
22778 Zone* zone = thread->zone();
22779 if (IsCanonical()) {
22780#ifdef DEBUG
22781 // Verify that all fields are allocated in old space and are canonical.
22782 if (IsGeneric()) {
22783 const TypeParameters& type_params =
22784 TypeParameters::Handle(zone, type_parameters());
22785 ASSERT(type_params.IsOld());
22786 TypeArguments& type_args = TypeArguments::Handle(zone);
22787 type_args = type_params.bounds();
22788 ASSERT(type_args.IsOld());
22789 ASSERT(type_args.IsCanonical());
22790 type_args = type_params.defaults();
22791 ASSERT(type_args.IsOld());
22792 ASSERT(type_args.IsCanonical());
22793 }
22794 AbstractType& type = AbstractType::Handle(zone);
22795 type = result_type();
22796 ASSERT(type.IsOld());
22797 ASSERT(type.IsCanonical());
22798 ASSERT(Array::Handle(zone, parameter_types()).IsOld());
22799 ASSERT(Array::Handle(zone, named_parameter_names()).IsOld());
22800 const intptr_t num_params = NumParameters();
22801 for (intptr_t i = 0; i < num_params; i++) {
22802 type = ParameterTypeAt(i);
22803 ASSERT(type.IsOld());
22804 ASSERT(type.IsCanonical());
22805 }
22806#endif
22807 return ptr();
22808 }
22809 auto isolate_group = thread->isolate_group();
22810 ObjectStore* object_store = isolate_group->object_store();
22811 FunctionType& sig = FunctionType::Handle(zone);
22812 {
22813 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22814 CanonicalFunctionTypeSet table(zone,
22815 object_store->canonical_function_types());
22816 sig ^= table.GetOrNull(key: CanonicalFunctionTypeKey(*this));
22817 ASSERT(object_store->canonical_function_types() == table.Release().ptr());
22818 }
22819 if (sig.IsNull()) {
22820 // The function type was not found in the table. It is not canonical yet.
22821 // Canonicalize its type parameters and types.
22822
22823 // Clone this function type to the old heap and update
22824 // owners of type parameters.
22825 FunctionType& new_sig = FunctionType::Handle(zone);
22826 if (this->IsNew()) {
22827 new_sig ^= FunctionType::Clone(orig: *this, space: Heap::kOld);
22828 } else {
22829 new_sig ^= this->ptr();
22830 }
22831 ASSERT(new_sig.IsOld());
22832
22833 if (new_sig.IsGeneric()) {
22834 const TypeParameters& type_params =
22835 TypeParameters::Handle(zone, ptr: new_sig.type_parameters());
22836 ASSERT(type_params.IsOld());
22837 TypeArguments& type_args = TypeArguments::Handle(zone);
22838 type_args = type_params.bounds();
22839 if (!type_args.IsCanonical()) {
22840 type_args = type_args.Canonicalize(thread);
22841 type_params.set_bounds(type_args);
22842 }
22843 type_args = type_params.defaults();
22844 if (!type_args.IsCanonical()) {
22845 type_args = type_args.Canonicalize(thread);
22846 type_params.set_defaults(type_args);
22847 }
22848 }
22849 AbstractType& type = AbstractType::Handle(zone);
22850 type = new_sig.result_type();
22851 if (!type.IsCanonical()) {
22852 type = type.Canonicalize(thread);
22853 new_sig.set_result_type(type);
22854 }
22855 ASSERT(Array::Handle(zone, new_sig.parameter_types()).IsOld());
22856 ASSERT(Array::Handle(zone, new_sig.named_parameter_names()).IsOld());
22857 const intptr_t num_params = new_sig.NumParameters();
22858 for (intptr_t i = 0; i < num_params; i++) {
22859 type = new_sig.ParameterTypeAt(index: i);
22860 if (!type.IsCanonical()) {
22861 type = type.Canonicalize(thread);
22862 new_sig.SetParameterTypeAt(index: i, value: type);
22863 }
22864 }
22865 // Check to see if the function type got added to canonical table
22866 // during canonicalization of its signature types.
22867 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22868 CanonicalFunctionTypeSet table(zone,
22869 object_store->canonical_function_types());
22870 sig ^= table.GetOrNull(key: CanonicalFunctionTypeKey(new_sig));
22871 if (sig.IsNull()) {
22872 // Add this function type into the canonical table of function types.
22873 sig = new_sig.ptr();
22874 ASSERT(sig.IsOld());
22875 sig.SetCanonical(); // Mark object as being canonical.
22876 bool present = table.Insert(key: sig);
22877 ASSERT(!present);
22878 }
22879 object_store->set_canonical_function_types(table.Release());
22880 }
22881 return sig.ptr();
22882}
22883
22884void FunctionType::EnumerateURIs(URIs* uris) const {
22885 Thread* thread = Thread::Current();
22886 Zone* zone = thread->zone();
22887 AbstractType& type = AbstractType::Handle(zone);
22888 const intptr_t num_params = NumParameters();
22889 for (intptr_t i = 0; i < num_params; i++) {
22890 type = ParameterTypeAt(index: i);
22891 type.EnumerateURIs(uris);
22892 }
22893 // Handle result type last, since it appears last in the user visible name.
22894 type = result_type();
22895 type.EnumerateURIs(uris);
22896}
22897
22898void FunctionType::PrintName(NameVisibility name_visibility,
22899 BaseTextBuffer* printer) const {
22900 const char* suffix = NullabilitySuffix(name_visibility);
22901 if (suffix[0] != '\0') {
22902 printer->AddString(s: "(");
22903 }
22904 FunctionType::Cast(obj: *this).Print(name_visibility, printer);
22905 if (suffix[0] != '\0') {
22906 printer->AddString(s: ")");
22907 printer->AddString(s: suffix);
22908 }
22909}
22910
22911TypeParameterPtr TypeParameter::ToNullability(Nullability value,
22912 Heap::Space space) const {
22913 if (nullability() == value) {
22914 return ptr();
22915 }
22916 // Clone type parameter and set new nullability.
22917 TypeParameter& type_parameter = TypeParameter::Handle();
22918 type_parameter ^= Object::Clone(orig: *this, space);
22919 type_parameter.set_nullability(value);
22920 type_parameter.SetHash(0);
22921 type_parameter.InitializeTypeTestingStubNonAtomic(stub: Code::Handle(
22922 ptr: TypeTestingStubGenerator::DefaultCodeForType(type: type_parameter)));
22923 if (IsCanonical()) {
22924 // Object::Clone does not clone canonical bit.
22925 ASSERT(!type_parameter.IsCanonical());
22926 ASSERT(IsFinalized());
22927 ASSERT(type_parameter.IsFinalized());
22928 type_parameter ^= type_parameter.Canonicalize(thread: Thread::Current());
22929 }
22930 return type_parameter.ptr();
22931}
22932
22933bool TypeParameter::IsInstantiated(Genericity genericity,
22934 intptr_t num_free_fun_type_params) const {
22935 // Bounds of class type parameters are ignored in the VM.
22936 if (IsClassTypeParameter()) {
22937 return genericity == kFunctions;
22938 }
22939 ASSERT(IsFunctionTypeParameter());
22940 return (genericity == kCurrentClass) || (index() >= num_free_fun_type_params);
22941}
22942
22943bool TypeParameter::IsEquivalent(
22944 const Instance& other,
22945 TypeEquality kind,
22946 FunctionTypeMapping* function_type_equivalence) const {
22947 TRACE_TYPE_CHECKS_VERBOSE(" TypeParameter::IsEquivalent(%s, %s, kind %d)\n",
22948 ToCString(), other.ToCString(),
22949 static_cast<int>(kind));
22950 if (ptr() == other.ptr()) {
22951 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
22952 return true;
22953 }
22954 if (!other.IsTypeParameter()) {
22955 TRACE_TYPE_CHECKS_VERBOSE(
22956 " - result: false (other is not a type parameter)\n");
22957 return false;
22958 }
22959 const TypeParameter& other_type_param = TypeParameter::Cast(obj: other);
22960 ASSERT(IsFinalized() && other_type_param.IsFinalized());
22961 // Compare index, base and owner.
22962 if (IsFunctionTypeParameter()) {
22963 if (!other_type_param.IsFunctionTypeParameter()) {
22964 TRACE_TYPE_CHECKS_VERBOSE(
22965 " - result: false (other is not a function type parameter)\n");
22966 return false;
22967 }
22968 if ((parameterized_function_type() !=
22969 other_type_param.parameterized_function_type()) &&
22970 ((function_type_equivalence == nullptr) ||
22971 !function_type_equivalence->ContainsOwnersOfTypeParameters(
22972 p1: *this, p2: other_type_param))) {
22973 TRACE_TYPE_CHECKS_VERBOSE(
22974 " - result: false (owners are not equivalent)\n");
22975 return false;
22976 }
22977 } else {
22978 if (!other_type_param.IsClassTypeParameter()) {
22979 TRACE_TYPE_CHECKS_VERBOSE(
22980 " - result: false (other is not a class type parameter)\n");
22981 return false;
22982 }
22983 if (parameterized_class_id() != other_type_param.parameterized_class_id()) {
22984 TRACE_TYPE_CHECKS_VERBOSE(
22985 " - result: false (parameterized class id)\n");
22986 return false;
22987 }
22988 }
22989 if (base() != other_type_param.base() ||
22990 index() != other_type_param.index()) {
22991 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch base/index)\n");
22992 return false;
22993 }
22994 if (!IsNullabilityEquivalent(thread: Thread::Current(), other_type: other_type_param, kind)) {
22995 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch nullability)\n");
22996 return false;
22997 }
22998 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
22999 return true;
23000}
23001
23002void TypeParameter::set_owner(const Object& value) const {
23003 ASSERT((IsFunctionTypeParameter() && value.IsFunctionType()) ||
23004 (IsClassTypeParameter() && value.IsSmi()));
23005 untag()->set_owner(value.ptr());
23006}
23007
23008classid_t TypeParameter::parameterized_class_id() const {
23009 if (IsClassTypeParameter()) {
23010 return Smi::Value(raw_smi: Smi::RawCast(raw: untag()->owner()));
23011 } else {
23012 return kFunctionCid;
23013 }
23014}
23015void TypeParameter::set_parameterized_class_id(classid_t value) const {
23016 ASSERT(IsClassTypeParameter());
23017 untag()->set_owner(Smi::New(value));
23018}
23019
23020ClassPtr TypeParameter::parameterized_class() const {
23021 if (IsClassTypeParameter()) {
23022 const classid_t cid = parameterized_class_id();
23023 if (cid != kIllegalCid) {
23024 return IsolateGroup::Current()->class_table()->At(cid);
23025 }
23026 }
23027 return Class::null();
23028}
23029
23030FunctionTypePtr TypeParameter::parameterized_function_type() const {
23031 ASSERT(IsFunctionTypeParameter());
23032 return FunctionType::RawCast(raw: untag()->owner());
23033}
23034
23035void TypeParameter::set_base(intptr_t value) const {
23036 ASSERT(value >= 0);
23037 ASSERT(Utils::IsUint(16, value));
23038 StoreNonPointer(addr: &untag()->base_, value);
23039}
23040
23041void TypeParameter::set_index(intptr_t value) const {
23042 ASSERT(value >= 0);
23043 ASSERT(Utils::IsUint(16, value));
23044 StoreNonPointer(addr: &untag()->index_, value);
23045}
23046
23047AbstractTypePtr TypeParameter::bound() const {
23048 if (IsFunctionTypeParameter()) {
23049 const auto& owner = FunctionType::Handle(ptr: parameterized_function_type());
23050 const auto& type_parameters =
23051 TypeParameters::Handle(ptr: owner.type_parameters());
23052 return type_parameters.BoundAt(index: index() - base());
23053 } else {
23054 const auto& owner = Class::Handle(ptr: parameterized_class());
23055 if (owner.IsNull()) {
23056 return IsolateGroup::Current()->object_store()->nullable_object_type();
23057 }
23058 const auto& type_parameters =
23059 TypeParameters::Handle(ptr: owner.type_parameters());
23060 return type_parameters.BoundAt(index: index() - base());
23061 }
23062}
23063
23064AbstractTypePtr TypeParameter::GetFromTypeArguments(
23065 const TypeArguments& instantiator_type_arguments,
23066 const TypeArguments& function_type_arguments) const {
23067 ASSERT(IsFinalized());
23068 const TypeArguments& type_args = IsFunctionTypeParameter()
23069 ? function_type_arguments
23070 : instantiator_type_arguments;
23071 return type_args.TypeAtNullSafe(index: index());
23072}
23073
23074AbstractTypePtr TypeParameter::InstantiateFrom(
23075 const TypeArguments& instantiator_type_arguments,
23076 const TypeArguments& function_type_arguments,
23077 intptr_t num_free_fun_type_params,
23078 Heap::Space space,
23079 FunctionTypeMapping* function_type_mapping,
23080 intptr_t num_parent_type_args_adjustment) const {
23081 Zone* zone = Thread::Current()->zone();
23082 AbstractType& result = AbstractType::Handle(zone);
23083 bool substituted = false;
23084 if (IsFunctionTypeParameter()) {
23085 ASSERT(IsFinalized());
23086 if (index() >= num_free_fun_type_params) {
23087 // Do not instantiate the function type parameter.
23088 // Get a replacement from the updated function type.
23089 ASSERT(function_type_mapping != nullptr);
23090 result = function_type_mapping->MapTypeParameter(type_param: *this);
23091 ASSERT(TypeParameter::Cast(result).index() ==
23092 index() - num_free_fun_type_params);
23093 ASSERT(TypeParameter::Cast(result).base() ==
23094 base() - num_free_fun_type_params);
23095 ASSERT(TypeParameter::Cast(result).nullability() == nullability());
23096 AbstractType& upper_bound = AbstractType::Handle(zone, ptr: bound());
23097 if (!upper_bound.IsInstantiated()) {
23098 upper_bound = upper_bound.InstantiateFrom(
23099 instantiator_type_arguments, function_type_arguments,
23100 num_free_fun_type_params, space, function_type_mapping,
23101 num_parent_type_args_adjustment);
23102 }
23103 if (upper_bound.ptr() == Type::NeverType()) {
23104 // Normalize 'X extends Never' to 'Never'.
23105 result = Type::NeverType();
23106 }
23107 } else if (function_type_arguments.IsNull()) {
23108 return Type::DynamicType();
23109 } else {
23110 result = function_type_arguments.TypeAt(index: index());
23111 substituted = true;
23112 }
23113 } else {
23114 ASSERT(IsClassTypeParameter());
23115 ASSERT(IsFinalized());
23116 if (instantiator_type_arguments.IsNull()) {
23117 return Type::DynamicType();
23118 }
23119 if (instantiator_type_arguments.Length() <= index()) {
23120 // InstantiateFrom can be invoked from a compilation pipeline with
23121 // mismatching type arguments vector. This can only happen for
23122 // a dynamically unreachable code - which compiler can't remove
23123 // statically for some reason.
23124 // To prevent crashes we return AbstractType::null(), understood by caller
23125 // (see AssertAssignableInstr::Canonicalize).
23126 return AbstractType::null();
23127 }
23128 result = instantiator_type_arguments.TypeAt(index: index());
23129 substituted = true;
23130 // Instantiating a class type parameter cannot result in a
23131 // function type parameter.
23132 // Bounds of class type parameters are ignored in the VM.
23133 }
23134 result = result.SetInstantiatedNullability(type_param: *this, space);
23135 if (substituted && (num_parent_type_args_adjustment != 0)) {
23136 // This type parameter is used inside a generic function type.
23137 // A type being substituted can have nested function types,
23138 // whose number of parent function type arguments should be adjusted
23139 // after the substitution.
23140 result = result.UpdateFunctionTypes(num_parent_type_args_adjustment,
23141 num_free_fun_type_params: kAllFree, space, function_type_mapping);
23142 }
23143 // Canonicalization is not part of instantiation.
23144 return result.NormalizeFutureOrType(space);
23145}
23146
23147AbstractTypePtr TypeParameter::UpdateFunctionTypes(
23148 intptr_t num_parent_type_args_adjustment,
23149 intptr_t num_free_fun_type_params,
23150 Heap::Space space,
23151 FunctionTypeMapping* function_type_mapping) const {
23152 ASSERT(IsFinalized());
23153 ASSERT(num_parent_type_args_adjustment >= 0);
23154 if (IsFunctionTypeParameter() && (index() >= num_free_fun_type_params)) {
23155 Zone* zone = Thread::Current()->zone();
23156 ASSERT(function_type_mapping != nullptr);
23157 const auto& new_tp = TypeParameter::Handle(
23158 zone, ptr: function_type_mapping->MapTypeParameter(type_param: *this));
23159 ASSERT(new_tp.base() == base() + num_parent_type_args_adjustment);
23160 ASSERT(new_tp.index() == index() + num_parent_type_args_adjustment);
23161 ASSERT(new_tp.nullability() == nullability());
23162 ASSERT(new_tp.IsFinalized());
23163 return new_tp.ptr();
23164 } else {
23165 return ptr();
23166 }
23167}
23168
23169AbstractTypePtr TypeParameter::Canonicalize(Thread* thread) const {
23170 ASSERT(IsFinalized());
23171 Zone* zone = thread->zone();
23172 if (IsCanonical()) {
23173#ifdef DEBUG
23174 if (IsFunctionTypeParameter()) {
23175 ASSERT(FunctionType::Handle(zone, parameterized_function_type()).IsOld());
23176 }
23177#endif
23178 return this->ptr();
23179 }
23180 auto isolate_group = thread->isolate_group();
23181 ObjectStore* object_store = isolate_group->object_store();
23182 TypeParameter& type_parameter = TypeParameter::Handle(zone);
23183 {
23184 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
23185 CanonicalTypeParameterSet table(zone,
23186 object_store->canonical_type_parameters());
23187 type_parameter ^= table.GetOrNull(key: CanonicalTypeParameterKey(*this));
23188 if (type_parameter.IsNull()) {
23189 // Add this type parameter into the canonical table of type parameters.
23190 if (this->IsNew()) {
23191 type_parameter ^= Object::Clone(orig: *this, space: Heap::kOld);
23192 } else {
23193 type_parameter = this->ptr();
23194 }
23195 ASSERT(type_parameter.IsOld());
23196 type_parameter.SetCanonical(); // Mark object as being canonical.
23197 bool present = table.Insert(key: type_parameter);
23198 ASSERT(!present);
23199 }
23200 object_store->set_canonical_type_parameters(table.Release());
23201 }
23202 return type_parameter.ptr();
23203}
23204
23205void TypeParameter::PrintName(NameVisibility name_visibility,
23206 BaseTextBuffer* printer) const {
23207 const TypeParameter& type_param = TypeParameter::Cast(obj: *this);
23208 // Type parameter names are meaningless after canonicalization.
23209 printer->AddString(s: type_param.CanonicalNameCString());
23210 printer->AddString(s: NullabilitySuffix(name_visibility));
23211}
23212
23213uword TypeParameter::ComputeHash() const {
23214 ASSERT(IsFinalized());
23215 uint32_t result = parameterized_class_id();
23216 result = CombineHashes(hash: result, other_hash: base());
23217 result = CombineHashes(hash: result, other_hash: index());
23218 // A legacy type should have the same hash as its non-nullable version to be
23219 // consistent with the definition of type equality in Dart code.
23220 Nullability type_param_nullability = nullability();
23221 if (type_param_nullability == Nullability::kLegacy) {
23222 type_param_nullability = Nullability::kNonNullable;
23223 }
23224 result = CombineHashes(hash: result, other_hash: static_cast<uint32_t>(type_param_nullability));
23225 result = FinalizeHash(hash: result, hashbits: kHashBits);
23226 SetHash(result);
23227 return result;
23228}
23229
23230TypeParameterPtr TypeParameter::New() {
23231 return Object::Allocate<TypeParameter>(space: Heap::kOld);
23232}
23233
23234TypeParameterPtr TypeParameter::New(const Object& owner,
23235 intptr_t base,
23236 intptr_t index,
23237 Nullability nullability) {
23238 ASSERT(owner.IsNull() || owner.IsClass() || owner.IsFunctionType());
23239 const bool is_function_type_parameter = owner.IsFunctionType();
23240 const uint32_t flags = UntaggedTypeParameter::IsFunctionTypeParameter::encode(
23241 value: is_function_type_parameter);
23242 Zone* Z = Thread::Current()->zone();
23243 const TypeParameter& result = TypeParameter::Handle(zone: Z, ptr: TypeParameter::New());
23244 result.set_flags(flags);
23245 if (is_function_type_parameter) {
23246 result.set_owner(owner);
23247 } else {
23248 result.set_parameterized_class_id(owner.IsNull() ? kIllegalCid
23249 : Class::Cast(obj: owner).id());
23250 }
23251 result.set_base(base);
23252 result.set_index(index);
23253 result.SetHash(0);
23254 result.set_nullability(nullability);
23255 result.set_type_state(UntaggedAbstractType::kAllocated);
23256
23257 result.InitializeTypeTestingStubNonAtomic(
23258 stub: Code::Handle(zone: Z, ptr: TypeTestingStubGenerator::DefaultCodeForType(type: result)));
23259 return result.ptr();
23260}
23261
23262const char* TypeParameter::CanonicalNameCString(bool is_class_type_parameter,
23263 intptr_t base,
23264 intptr_t index) {
23265 Thread* thread = Thread::Current();
23266 ZoneTextBuffer printer(thread->zone());
23267 const char* base_fmt = is_class_type_parameter ? "C%" Pd : "F%" Pd;
23268 const char* index_fmt = is_class_type_parameter ? "X%" Pd : "Y%" Pd;
23269 if (base != 0) {
23270 printer.Printf(format: base_fmt, base);
23271 }
23272 printer.Printf(format: index_fmt, index - base);
23273 return printer.buffer();
23274}
23275
23276const char* TypeParameter::ToCString() const {
23277 if (IsNull()) {
23278 return "TypeParameter: null";
23279 }
23280 Thread* thread = Thread::Current();
23281 ZoneTextBuffer printer(thread->zone());
23282 printer.Printf(format: "TypeParameter: ");
23283 printer.AddString(s: CanonicalNameCString());
23284 printer.AddString(s: NullabilitySuffix(name_visibility: kInternalName));
23285 return printer.buffer();
23286}
23287
23288const char* Number::ToCString() const {
23289 // Number is an interface. No instances of Number should exist.
23290 UNREACHABLE();
23291 return "Number";
23292}
23293
23294const char* Integer::ToCString() const {
23295 // Integer is an interface. No instances of Integer should exist except null.
23296 ASSERT(IsNull());
23297 return "nullptr Integer";
23298}
23299
23300IntegerPtr Integer::New(const String& str, Heap::Space space) {
23301 // We are not supposed to have integers represented as two byte strings.
23302 ASSERT(str.IsOneByteString());
23303 if (str.IsNull() || (str.Length() == 0)) {
23304 return Integer::null();
23305 }
23306 int64_t value = 0;
23307 const char* cstr = str.ToCString();
23308 if (!OS::StringToInt64(str: cstr, value: &value)) {
23309 // Out of range.
23310 return Integer::null();
23311 }
23312 return Integer::New(value, space);
23313}
23314
23315IntegerPtr Integer::NewCanonical(const String& str) {
23316 // We are not supposed to have integers represented as two byte strings.
23317 ASSERT(str.IsOneByteString());
23318 int64_t value = 0;
23319 const char* cstr = str.ToCString();
23320 if (!OS::StringToInt64(str: cstr, value: &value)) {
23321 // Out of range.
23322 return Integer::null();
23323 }
23324 return NewCanonical(value);
23325}
23326
23327IntegerPtr Integer::NewCanonical(int64_t value) {
23328 if (Smi::IsValid(value)) {
23329 return Smi::New(value: static_cast<intptr_t>(value));
23330 }
23331 return Mint::NewCanonical(value);
23332}
23333
23334IntegerPtr Integer::New(int64_t value, Heap::Space space) {
23335 const bool is_smi = Smi::IsValid(value);
23336 if (is_smi) {
23337 return Smi::New(value: static_cast<intptr_t>(value));
23338 }
23339 return Mint::New(value, space);
23340}
23341
23342IntegerPtr Integer::NewFromUint64(uint64_t value, Heap::Space space) {
23343 return Integer::New(value: static_cast<int64_t>(value), space);
23344}
23345
23346bool Integer::IsValueInRange(uint64_t value) {
23347 return (value <= static_cast<uint64_t>(Mint::kMaxValue));
23348}
23349
23350bool Integer::Equals(const Instance& other) const {
23351 // Integer is an abstract class.
23352 UNREACHABLE();
23353 return false;
23354}
23355
23356bool Integer::IsZero() const {
23357 // Integer is an abstract class.
23358 UNREACHABLE();
23359 return false;
23360}
23361
23362bool Integer::IsNegative() const {
23363 // Integer is an abstract class.
23364 UNREACHABLE();
23365 return false;
23366}
23367
23368double Integer::AsDoubleValue() const {
23369 // Integer is an abstract class.
23370 UNREACHABLE();
23371 return 0.0;
23372}
23373
23374int64_t Integer::AsInt64Value() const {
23375 // Integer is an abstract class.
23376 UNREACHABLE();
23377 return 0;
23378}
23379
23380uint32_t Integer::AsTruncatedUint32Value() const {
23381 // Integer is an abstract class.
23382 UNREACHABLE();
23383 return 0;
23384}
23385
23386bool Integer::FitsIntoSmi() const {
23387 // Integer is an abstract class.
23388 UNREACHABLE();
23389 return false;
23390}
23391
23392int Integer::CompareWith(const Integer& other) const {
23393 // Integer is an abstract class.
23394 UNREACHABLE();
23395 return 0;
23396}
23397
23398uint32_t Integer::CanonicalizeHash() const {
23399 return Multiply64Hash(value: AsInt64Value());
23400}
23401
23402IntegerPtr Integer::AsValidInteger() const {
23403 if (IsSmi()) return ptr();
23404 if (IsMint()) {
23405 Mint& mint = Mint::Handle();
23406 mint ^= ptr();
23407 if (Smi::IsValid(value: mint.value())) {
23408 return Smi::New(value: static_cast<intptr_t>(mint.value()));
23409 } else {
23410 return ptr();
23411 }
23412 }
23413 return ptr();
23414}
23415
23416const char* Integer::ToHexCString(Zone* zone) const {
23417 ASSERT(IsSmi() || IsMint());
23418 int64_t value = AsInt64Value();
23419 if (value < 0) {
23420 return OS::SCreate(zone, format: "-0x%" PX64, -static_cast<uint64_t>(value));
23421 } else {
23422 return OS::SCreate(zone, format: "0x%" PX64, static_cast<uint64_t>(value));
23423 }
23424}
23425
23426IntegerPtr Integer::ArithmeticOp(Token::Kind operation,
23427 const Integer& other,
23428 Heap::Space space) const {
23429 // In 32-bit mode, the result of any operation between two Smis will fit in a
23430 // 32-bit signed result, except the product of two Smis, which will be 64-bit.
23431 // In 64-bit mode, the result of any operation between two Smis will fit in a
23432 // 64-bit signed result, except the product of two Smis (see below).
23433 if (IsSmi() && other.IsSmi()) {
23434 const intptr_t left_value = Smi::Value(raw_smi: Smi::RawCast(raw: ptr()));
23435 const intptr_t right_value = Smi::Value(raw_smi: Smi::RawCast(raw: other.ptr()));
23436 switch (operation) {
23437 case Token::kADD:
23438 return Integer::New(value: left_value + right_value, space);
23439 case Token::kSUB:
23440 return Integer::New(value: left_value - right_value, space);
23441 case Token::kMUL:
23442 return Integer::New(
23443 value: Utils::MulWithWrapAround(a: static_cast<int64_t>(left_value),
23444 b: static_cast<int64_t>(right_value)),
23445 space);
23446 case Token::kTRUNCDIV:
23447 return Integer::New(value: left_value / right_value, space);
23448 case Token::kMOD: {
23449 const intptr_t remainder = left_value % right_value;
23450 if (remainder < 0) {
23451 if (right_value < 0) {
23452 return Integer::New(value: remainder - right_value, space);
23453 } else {
23454 return Integer::New(value: remainder + right_value, space);
23455 }
23456 }
23457 return Integer::New(value: remainder, space);
23458 }
23459 default:
23460 UNIMPLEMENTED();
23461 }
23462 }
23463 const int64_t left_value = AsInt64Value();
23464 const int64_t right_value = other.AsInt64Value();
23465 switch (operation) {
23466 case Token::kADD:
23467 return Integer::New(value: Utils::AddWithWrapAround(a: left_value, b: right_value),
23468 space);
23469
23470 case Token::kSUB:
23471 return Integer::New(value: Utils::SubWithWrapAround(a: left_value, b: right_value),
23472 space);
23473
23474 case Token::kMUL:
23475 return Integer::New(value: Utils::MulWithWrapAround(a: left_value, b: right_value),
23476 space);
23477
23478 case Token::kTRUNCDIV:
23479 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23480 // Division special case: overflow in int64_t.
23481 // MIN_VALUE / -1 = (MAX_VALUE + 1), which wraps around to MIN_VALUE
23482 return Integer::New(value: Mint::kMinValue, space);
23483 }
23484 return Integer::New(value: left_value / right_value, space);
23485
23486 case Token::kMOD: {
23487 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23488 // Modulo special case: overflow in int64_t.
23489 // MIN_VALUE % -1 = 0 for reason given above.
23490 return Integer::New(value: 0, space);
23491 }
23492 const int64_t remainder = left_value % right_value;
23493 if (remainder < 0) {
23494 if (right_value < 0) {
23495 return Integer::New(value: remainder - right_value, space);
23496 } else {
23497 return Integer::New(value: remainder + right_value, space);
23498 }
23499 }
23500 return Integer::New(value: remainder, space);
23501 }
23502 default:
23503 UNIMPLEMENTED();
23504 return Integer::null();
23505 }
23506}
23507
23508IntegerPtr Integer::BitOp(Token::Kind kind,
23509 const Integer& other,
23510 Heap::Space space) const {
23511 if (IsSmi() && other.IsSmi()) {
23512 intptr_t op1_value = Smi::Value(raw_smi: Smi::RawCast(raw: ptr()));
23513 intptr_t op2_value = Smi::Value(raw_smi: Smi::RawCast(raw: other.ptr()));
23514 intptr_t result = 0;
23515 switch (kind) {
23516 case Token::kBIT_AND:
23517 result = op1_value & op2_value;
23518 break;
23519 case Token::kBIT_OR:
23520 result = op1_value | op2_value;
23521 break;
23522 case Token::kBIT_XOR:
23523 result = op1_value ^ op2_value;
23524 break;
23525 default:
23526 UNIMPLEMENTED();
23527 }
23528 ASSERT(Smi::IsValid(result));
23529 return Smi::New(value: result);
23530 } else {
23531 int64_t a = AsInt64Value();
23532 int64_t b = other.AsInt64Value();
23533 switch (kind) {
23534 case Token::kBIT_AND:
23535 return Integer::New(value: a & b, space);
23536 case Token::kBIT_OR:
23537 return Integer::New(value: a | b, space);
23538 case Token::kBIT_XOR:
23539 return Integer::New(value: a ^ b, space);
23540 default:
23541 UNIMPLEMENTED();
23542 return Integer::null();
23543 }
23544 }
23545}
23546
23547IntegerPtr Integer::ShiftOp(Token::Kind kind,
23548 const Integer& other,
23549 Heap::Space space) const {
23550 int64_t a = AsInt64Value();
23551 int64_t b = other.AsInt64Value();
23552 ASSERT(b >= 0);
23553 switch (kind) {
23554 case Token::kSHL:
23555 return Integer::New(value: Utils::ShiftLeftWithTruncation(a, b), space);
23556 case Token::kSHR:
23557 return Integer::New(value: a >> Utils::Minimum<int64_t>(x: b, y: Mint::kBits), space);
23558 case Token::kUSHR:
23559 return Integer::New(
23560 value: (b >= kBitsPerInt64) ? 0 : static_cast<uint64_t>(a) >> b, space);
23561 default:
23562 UNIMPLEMENTED();
23563 return Integer::null();
23564 }
23565}
23566
23567bool Smi::Equals(const Instance& other) const {
23568 if (other.IsNull() || !other.IsSmi()) {
23569 return false;
23570 }
23571 return (this->Value() == Smi::Cast(obj: other).Value());
23572}
23573
23574double Smi::AsDoubleValue() const {
23575 return static_cast<double>(this->Value());
23576}
23577
23578int64_t Smi::AsInt64Value() const {
23579 return this->Value();
23580}
23581
23582uint32_t Smi::AsTruncatedUint32Value() const {
23583 return this->Value() & 0xFFFFFFFF;
23584}
23585
23586int Smi::CompareWith(const Integer& other) const {
23587 if (other.IsSmi()) {
23588 const Smi& other_smi = Smi::Cast(obj: other);
23589 if (this->Value() < other_smi.Value()) {
23590 return -1;
23591 } else if (this->Value() > other_smi.Value()) {
23592 return 1;
23593 } else {
23594 return 0;
23595 }
23596 }
23597 ASSERT(!other.FitsIntoSmi());
23598 if (other.IsMint()) {
23599 if (this->IsNegative() == other.IsNegative()) {
23600 return this->IsNegative() ? 1 : -1;
23601 }
23602 return this->IsNegative() ? -1 : 1;
23603 }
23604 UNREACHABLE();
23605 return 0;
23606}
23607
23608const char* Smi::ToCString() const {
23609 return OS::SCreate(zone: Thread::Current()->zone(), format: "%" Pd "", Value());
23610}
23611
23612ClassPtr Smi::Class() {
23613 return IsolateGroup::Current()->object_store()->smi_class();
23614}
23615
23616void Mint::set_value(int64_t value) const {
23617 StoreNonPointer(addr: &untag()->value_, value);
23618}
23619
23620MintPtr Mint::New(int64_t val, Heap::Space space) {
23621 // Do not allocate a Mint if Smi would do.
23622 ASSERT(!Smi::IsValid(val));
23623 ASSERT(IsolateGroup::Current()->object_store()->mint_class() !=
23624 Class::null());
23625 const auto& result = Mint::Handle(ptr: Object::Allocate<Mint>(space));
23626 result.set_value(val);
23627 return result.ptr();
23628}
23629
23630MintPtr Mint::NewCanonical(int64_t value) {
23631 Thread* thread = Thread::Current();
23632 Mint& mint = Mint::Handle(zone: thread->zone(), ptr: Mint::New(val: value, space: Heap::kOld));
23633 mint ^= mint.Canonicalize(thread);
23634 return mint.ptr();
23635}
23636
23637bool Mint::Equals(const Instance& other) const {
23638 if (this->ptr() == other.ptr()) {
23639 // Both handles point to the same raw instance.
23640 return true;
23641 }
23642 if (!other.IsMint() || other.IsNull()) {
23643 return false;
23644 }
23645 return value() == Mint::Cast(obj: other).value();
23646}
23647
23648double Mint::AsDoubleValue() const {
23649 return static_cast<double>(this->value());
23650}
23651
23652int64_t Mint::AsInt64Value() const {
23653 return this->value();
23654}
23655
23656uint32_t Mint::AsTruncatedUint32Value() const {
23657 return this->value() & 0xFFFFFFFF;
23658}
23659
23660bool Mint::FitsIntoSmi() const {
23661 return Smi::IsValid(value: AsInt64Value());
23662}
23663
23664int Mint::CompareWith(const Integer& other) const {
23665 ASSERT(!FitsIntoSmi());
23666 ASSERT(other.IsMint() || other.IsSmi());
23667 int64_t a = AsInt64Value();
23668 int64_t b = other.AsInt64Value();
23669 if (a < b) {
23670 return -1;
23671 } else if (a > b) {
23672 return 1;
23673 } else {
23674 return 0;
23675 }
23676}
23677
23678const char* Mint::ToCString() const {
23679 return OS::SCreate(zone: Thread::Current()->zone(), format: "%" Pd64 "", value());
23680}
23681
23682void Double::set_value(double value) const {
23683 StoreNonPointer(addr: &untag()->value_, value);
23684}
23685
23686bool Double::BitwiseEqualsToDouble(double value) const {
23687 intptr_t value_offset = Double::value_offset();
23688 void* this_addr = reinterpret_cast<void*>(
23689 reinterpret_cast<uword>(this->untag()) + value_offset);
23690 void* other_addr = reinterpret_cast<void*>(&value);
23691 return (memcmp(s1: this_addr, s2: other_addr, n: sizeof(value)) == 0);
23692}
23693
23694bool Double::OperatorEquals(const Instance& other) const {
23695 if (this->IsNull() || other.IsNull()) {
23696 return (this->IsNull() && other.IsNull());
23697 }
23698 if (!other.IsDouble()) {
23699 return false;
23700 }
23701 return this->value() == Double::Cast(obj: other).value();
23702}
23703
23704bool Double::CanonicalizeEquals(const Instance& other) const {
23705 if (this->ptr() == other.ptr()) {
23706 return true; // "===".
23707 }
23708 if (other.IsNull() || !other.IsDouble()) {
23709 return false;
23710 }
23711 return BitwiseEqualsToDouble(value: Double::Cast(obj: other).value());
23712}
23713
23714uint32_t Double::CanonicalizeHash() const {
23715 return Hash64To32(v: bit_cast<uint64_t>(source: value()));
23716}
23717
23718DoublePtr Double::New(double d, Heap::Space space) {
23719 ASSERT(IsolateGroup::Current()->object_store()->double_class() !=
23720 Class::null());
23721 const auto& result = Double::Handle(ptr: Object::Allocate<Double>(space));
23722 result.set_value(d);
23723 return result.ptr();
23724}
23725
23726DoublePtr Double::New(const String& str, Heap::Space space) {
23727 double double_value;
23728 if (!CStringToDouble(str: str.ToCString(), length: str.Length(), result: &double_value)) {
23729 return Double::Handle().ptr();
23730 }
23731 return New(d: double_value, space);
23732}
23733
23734DoublePtr Double::NewCanonical(double value) {
23735 Thread* thread = Thread::Current();
23736 Double& dbl = Double::Handle(zone: thread->zone(), ptr: Double::New(d: value, space: Heap::kOld));
23737 dbl ^= dbl.Canonicalize(thread);
23738 return dbl.ptr();
23739}
23740
23741DoublePtr Double::NewCanonical(const String& str) {
23742 double double_value;
23743 if (!CStringToDouble(str: str.ToCString(), length: str.Length(), result: &double_value)) {
23744 return Double::Handle().ptr();
23745 }
23746 return NewCanonical(value: double_value);
23747}
23748
23749StringPtr Number::ToString(Heap::Space space) const {
23750 // Refactoring can avoid Zone::Alloc and strlen, but gains are insignificant.
23751 const char* cstr = ToCString();
23752 intptr_t len = strlen(s: cstr);
23753// Resulting string is ASCII ...
23754#ifdef DEBUG
23755 for (intptr_t i = 0; i < len; ++i) {
23756 ASSERT(static_cast<uint8_t>(cstr[i]) < 128);
23757 }
23758#endif // DEBUG
23759 // ... which is a subset of Latin-1.
23760 return String::FromLatin1(latin1_array: reinterpret_cast<const uint8_t*>(cstr), array_len: len, space);
23761}
23762
23763const char* Double::ToCString() const {
23764 if (isnan(lcpp_x: value())) {
23765 return "NaN";
23766 }
23767 if (isinf(lcpp_x: value())) {
23768 return value() < 0 ? "-Infinity" : "Infinity";
23769 }
23770 const int kBufferSize = 128;
23771 char* buffer = Thread::Current()->zone()->Alloc<char>(len: kBufferSize);
23772 buffer[kBufferSize - 1] = '\0';
23773 DoubleToCString(d: value(), buffer, buffer_size: kBufferSize);
23774 return buffer;
23775}
23776
23777void StringHasher::Add(const String& str, intptr_t begin_index, intptr_t len) {
23778 ASSERT(begin_index >= 0);
23779 ASSERT(len >= 0);
23780 ASSERT((begin_index + len) <= str.Length());
23781 if (len == 0) {
23782 return;
23783 }
23784 if (str.IsOneByteString()) {
23785 NoSafepointScope no_safepoint;
23786 Add(code_units: OneByteString::CharAddr(str, index: begin_index), len);
23787 } else if (str.IsExternalOneByteString()) {
23788 NoSafepointScope no_safepoint;
23789 Add(code_units: ExternalOneByteString::CharAddr(str, index: begin_index), len);
23790 } else if (str.IsTwoByteString()) {
23791 NoSafepointScope no_safepoint;
23792 Add(code_units: TwoByteString::CharAddr(str, index: begin_index), len);
23793 } else if (str.IsExternalOneByteString()) {
23794 NoSafepointScope no_safepoint;
23795 Add(code_units: ExternalTwoByteString::CharAddr(str, index: begin_index), len);
23796 } else {
23797 UNREACHABLE();
23798 }
23799}
23800
23801uword String::Hash(const String& str, intptr_t begin_index, intptr_t len) {
23802 StringHasher hasher;
23803 hasher.Add(str, begin_index, len);
23804 return hasher.Finalize();
23805}
23806
23807uword String::HashConcat(const String& str1, const String& str2) {
23808 StringHasher hasher;
23809 hasher.Add(str: str1, begin_index: 0, len: str1.Length());
23810 hasher.Add(str: str2, begin_index: 0, len: str2.Length());
23811 return hasher.Finalize();
23812}
23813
23814uword String::Hash(StringPtr raw) {
23815 StringHasher hasher;
23816 uword length = Smi::Value(raw_smi: raw->untag()->length());
23817 if (raw->IsOneByteString() || raw->IsExternalOneByteString()) {
23818 const uint8_t* data;
23819 if (raw->IsOneByteString()) {
23820 data = static_cast<OneByteStringPtr>(raw)->untag()->data();
23821 } else {
23822 ASSERT(raw->IsExternalOneByteString());
23823 ExternalOneByteStringPtr str = static_cast<ExternalOneByteStringPtr>(raw);
23824 data = str->untag()->external_data_;
23825 }
23826 return String::Hash(characters: data, len: length);
23827 } else {
23828 const uint16_t* data;
23829 if (raw->IsTwoByteString()) {
23830 data = static_cast<TwoByteStringPtr>(raw)->untag()->data();
23831 } else {
23832 ASSERT(raw->IsExternalTwoByteString());
23833 ExternalTwoByteStringPtr str = static_cast<ExternalTwoByteStringPtr>(raw);
23834 data = str->untag()->external_data_;
23835 }
23836 return String::Hash(characters: data, len: length);
23837 }
23838}
23839
23840uword String::Hash(const char* characters, intptr_t len) {
23841 StringHasher hasher;
23842 hasher.Add(code_units: reinterpret_cast<const uint8_t*>(characters), len);
23843 return hasher.Finalize();
23844}
23845
23846uword String::Hash(const uint8_t* characters, intptr_t len) {
23847 StringHasher hasher;
23848 hasher.Add(code_units: characters, len);
23849 return hasher.Finalize();
23850}
23851
23852uword String::Hash(const uint16_t* characters, intptr_t len) {
23853 StringHasher hasher;
23854 hasher.Add(code_units: characters, len);
23855 return hasher.Finalize();
23856}
23857
23858intptr_t String::CharSize() const {
23859 intptr_t class_id = ptr()->GetClassId();
23860 if (class_id == kOneByteStringCid || class_id == kExternalOneByteStringCid) {
23861 return kOneByteChar;
23862 }
23863 ASSERT(class_id == kTwoByteStringCid ||
23864 class_id == kExternalTwoByteStringCid);
23865 return kTwoByteChar;
23866}
23867
23868void* String::GetPeer() const {
23869 intptr_t class_id = ptr()->GetClassId();
23870 if (class_id == kExternalOneByteStringCid) {
23871 return ExternalOneByteString::GetPeer(str: *this);
23872 }
23873 ASSERT(class_id == kExternalTwoByteStringCid);
23874 return ExternalTwoByteString::GetPeer(str: *this);
23875}
23876
23877bool String::Equals(const Instance& other) const {
23878 if (this->ptr() == other.ptr()) {
23879 // Both handles point to the same raw instance.
23880 return true;
23881 }
23882
23883 if (!other.IsString()) {
23884 return false;
23885 }
23886
23887 const String& other_string = String::Cast(obj: other);
23888 return Equals(str: other_string);
23889}
23890
23891bool String::Equals(const String& str,
23892 intptr_t begin_index,
23893 intptr_t len) const {
23894 ASSERT(begin_index >= 0);
23895 ASSERT((begin_index == 0) || (begin_index < str.Length()));
23896 ASSERT(len >= 0);
23897 ASSERT(len <= str.Length());
23898 if (len != this->Length()) {
23899 return false; // Lengths don't match.
23900 }
23901
23902 for (intptr_t i = 0; i < len; i++) {
23903 if (CharAt(index: i) != str.CharAt(index: begin_index + i)) {
23904 return false;
23905 }
23906 }
23907
23908 return true;
23909}
23910
23911bool String::Equals(const char* cstr) const {
23912 ASSERT(cstr != nullptr);
23913 CodePointIterator it(*this);
23914 intptr_t len = strlen(s: cstr);
23915 while (it.Next()) {
23916 if (*cstr == '\0') {
23917 // Lengths don't match.
23918 return false;
23919 }
23920 int32_t ch;
23921 intptr_t consumed =
23922 Utf8::Decode(utf8_array: reinterpret_cast<const uint8_t*>(cstr), array_len: len, ch: &ch);
23923 if (consumed == 0 || it.Current() != ch) {
23924 return false;
23925 }
23926 cstr += consumed;
23927 len -= consumed;
23928 }
23929 return *cstr == '\0';
23930}
23931
23932bool String::Equals(const uint8_t* latin1_array, intptr_t len) const {
23933 if (len != this->Length()) {
23934 // Lengths don't match.
23935 return false;
23936 }
23937
23938 for (intptr_t i = 0; i < len; i++) {
23939 if (this->CharAt(index: i) != latin1_array[i]) {
23940 return false;
23941 }
23942 }
23943 return true;
23944}
23945
23946bool String::Equals(const uint16_t* utf16_array, intptr_t len) const {
23947 if (len != this->Length()) {
23948 // Lengths don't match.
23949 return false;
23950 }
23951
23952 for (intptr_t i = 0; i < len; i++) {
23953 if (this->CharAt(index: i) != LoadUnaligned(ptr: &utf16_array[i])) {
23954 return false;
23955 }
23956 }
23957 return true;
23958}
23959
23960bool String::Equals(const int32_t* utf32_array, intptr_t len) const {
23961 if (len < 0) return false;
23962 intptr_t j = 0;
23963 for (intptr_t i = 0; i < len; ++i) {
23964 if (Utf::IsSupplementary(code_point: utf32_array[i])) {
23965 uint16_t encoded[2];
23966 Utf16::Encode(codepoint: utf32_array[i], dst: &encoded[0]);
23967 if (j + 1 >= Length()) return false;
23968 if (CharAt(index: j++) != encoded[0]) return false;
23969 if (CharAt(index: j++) != encoded[1]) return false;
23970 } else {
23971 if (j >= Length()) return false;
23972 if (CharAt(index: j++) != utf32_array[i]) return false;
23973 }
23974 }
23975 return j == Length();
23976}
23977
23978bool String::EqualsConcat(const String& str1, const String& str2) const {
23979 return (Length() == str1.Length() + str2.Length()) &&
23980 str1.Equals(str: *this, begin_index: 0, len: str1.Length()) &&
23981 str2.Equals(str: *this, begin_index: str1.Length(), len: str2.Length());
23982}
23983
23984intptr_t String::CompareTo(const String& other) const {
23985 const intptr_t this_len = this->Length();
23986 const intptr_t other_len = other.IsNull() ? 0 : other.Length();
23987 const intptr_t len = (this_len < other_len) ? this_len : other_len;
23988 for (intptr_t i = 0; i < len; i++) {
23989 uint16_t this_code_unit = this->CharAt(index: i);
23990 uint16_t other_code_unit = other.CharAt(index: i);
23991 if (this_code_unit < other_code_unit) {
23992 return -1;
23993 }
23994 if (this_code_unit > other_code_unit) {
23995 return 1;
23996 }
23997 }
23998 if (this_len < other_len) return -1;
23999 if (this_len > other_len) return 1;
24000 return 0;
24001}
24002
24003bool String::StartsWith(StringPtr str, StringPtr prefix) {
24004 if (prefix == String::null()) return false;
24005
24006 const intptr_t length = String::LengthOf(obj: str);
24007 const intptr_t prefix_length = String::LengthOf(obj: prefix);
24008 if (prefix_length > length) return false;
24009
24010 for (intptr_t i = 0; i < prefix_length; i++) {
24011 if (String::CharAt(str, index: i) != String::CharAt(str: prefix, index: i)) {
24012 return false;
24013 }
24014 }
24015 return true;
24016}
24017
24018bool String::EndsWith(const String& other) const {
24019 if (other.IsNull()) {
24020 return false;
24021 }
24022 const intptr_t len = this->Length();
24023 const intptr_t other_len = other.Length();
24024 const intptr_t offset = len - other_len;
24025
24026 if ((other_len == 0) || (other_len > len)) {
24027 return false;
24028 }
24029 for (int i = offset; i < len; i++) {
24030 if (this->CharAt(index: i) != other.CharAt(index: i - offset)) {
24031 return false;
24032 }
24033 }
24034 return true;
24035}
24036
24037InstancePtr String::CanonicalizeLocked(Thread* thread) const {
24038 if (IsCanonical()) {
24039 return this->ptr();
24040 }
24041 return Symbols::New(thread: Thread::Current(), str: *this);
24042}
24043
24044StringPtr String::New(const char* cstr, Heap::Space space) {
24045 ASSERT(cstr != nullptr);
24046 intptr_t array_len = strlen(s: cstr);
24047 const uint8_t* utf8_array = reinterpret_cast<const uint8_t*>(cstr);
24048 return String::FromUTF8(utf8_array, array_len, space);
24049}
24050
24051StringPtr String::FromUTF8(const uint8_t* utf8_array,
24052 intptr_t array_len,
24053 Heap::Space space) {
24054 Utf8::Type type;
24055 intptr_t len = Utf8::CodeUnitCount(utf8_array, array_len, type: &type);
24056 if (type == Utf8::kLatin1) {
24057 const String& strobj = String::Handle(ptr: OneByteString::New(len, space));
24058 if (len > 0) {
24059 NoSafepointScope no_safepoint;
24060 if (!Utf8::DecodeToLatin1(utf8_array, array_len,
24061 dst: OneByteString::DataStart(str: strobj), len)) {
24062 Utf8::ReportInvalidByte(utf8_array, array_len, len);
24063 return String::null();
24064 }
24065 }
24066 return strobj.ptr();
24067 }
24068 ASSERT((type == Utf8::kBMP) || (type == Utf8::kSupplementary));
24069 const String& strobj = String::Handle(ptr: TwoByteString::New(len, space));
24070 NoSafepointScope no_safepoint;
24071 if (!Utf8::DecodeToUTF16(utf8_array, array_len,
24072 dst: TwoByteString::DataStart(str: strobj), len)) {
24073 Utf8::ReportInvalidByte(utf8_array, array_len, len);
24074 return String::null();
24075 }
24076 return strobj.ptr();
24077}
24078
24079StringPtr String::FromLatin1(const uint8_t* latin1_array,
24080 intptr_t array_len,
24081 Heap::Space space) {
24082 return OneByteString::New(characters: latin1_array, len: array_len, space);
24083}
24084
24085StringPtr String::FromUTF16(const uint16_t* utf16_array,
24086 intptr_t array_len,
24087 Heap::Space space) {
24088 bool is_one_byte_string = true;
24089 for (intptr_t i = 0; i < array_len; ++i) {
24090 if (!Utf::IsLatin1(code_point: LoadUnaligned(ptr: &utf16_array[i]))) {
24091 is_one_byte_string = false;
24092 break;
24093 }
24094 }
24095 if (is_one_byte_string) {
24096 return OneByteString::New(characters: utf16_array, len: array_len, space);
24097 }
24098 return TwoByteString::New(characters: utf16_array, len: array_len, space);
24099}
24100
24101StringPtr String::FromUTF32(const int32_t* utf32_array,
24102 intptr_t array_len,
24103 Heap::Space space) {
24104 bool is_one_byte_string = true;
24105 intptr_t utf16_len = array_len;
24106 for (intptr_t i = 0; i < array_len; ++i) {
24107 if (!Utf::IsLatin1(code_point: utf32_array[i])) {
24108 is_one_byte_string = false;
24109 if (Utf::IsSupplementary(code_point: utf32_array[i])) {
24110 utf16_len += 1;
24111 }
24112 }
24113 }
24114 if (is_one_byte_string) {
24115 return OneByteString::New(characters: utf32_array, len: array_len, space);
24116 }
24117 return TwoByteString::New(utf16_len, characters: utf32_array, len: array_len, space);
24118}
24119
24120StringPtr String::New(const String& str, Heap::Space space) {
24121 // Currently this just creates a copy of the string in the correct space.
24122 // Once we have external string support, this will also create a heap copy of
24123 // the string if necessary. Some optimizations are possible, such as not
24124 // copying internal strings into the same space.
24125 intptr_t len = str.Length();
24126 String& result = String::Handle();
24127 intptr_t char_size = str.CharSize();
24128 if (char_size == kOneByteChar) {
24129 result = OneByteString::New(len, space);
24130 } else {
24131 ASSERT(char_size == kTwoByteChar);
24132 result = TwoByteString::New(len, space);
24133 }
24134 String::Copy(dst: result, dst_offset: 0, src: str, src_offset: 0, len);
24135 return result.ptr();
24136}
24137
24138StringPtr String::NewExternal(const uint8_t* characters,
24139 intptr_t len,
24140 void* peer,
24141 intptr_t external_allocation_size,
24142 Dart_HandleFinalizer callback,
24143 Heap::Space space) {
24144 return ExternalOneByteString::New(characters, len, peer,
24145 external_allocation_size, callback, space);
24146}
24147
24148StringPtr String::NewExternal(const uint16_t* characters,
24149 intptr_t len,
24150 void* peer,
24151 intptr_t external_allocation_size,
24152 Dart_HandleFinalizer callback,
24153 Heap::Space space) {
24154 return ExternalTwoByteString::New(characters, len, peer,
24155 external_allocation_size, callback, space);
24156}
24157
24158void String::Copy(const String& dst,
24159 intptr_t dst_offset,
24160 const uint8_t* characters,
24161 intptr_t len) {
24162 ASSERT(dst_offset >= 0);
24163 ASSERT(len >= 0);
24164 ASSERT(len <= (dst.Length() - dst_offset));
24165 if (dst.IsOneByteString()) {
24166 NoSafepointScope no_safepoint;
24167 if (len > 0) {
24168 memmove(dest: OneByteString::CharAddr(str: dst, index: dst_offset), src: characters, n: len);
24169 }
24170 } else if (dst.IsTwoByteString()) {
24171 for (intptr_t i = 0; i < len; ++i) {
24172 *TwoByteString::CharAddr(str: dst, index: i + dst_offset) = characters[i];
24173 }
24174 }
24175}
24176
24177void String::Copy(const String& dst,
24178 intptr_t dst_offset,
24179 const uint16_t* utf16_array,
24180 intptr_t array_len) {
24181 ASSERT(dst_offset >= 0);
24182 ASSERT(array_len >= 0);
24183 ASSERT(array_len <= (dst.Length() - dst_offset));
24184 if (dst.IsOneByteString()) {
24185 NoSafepointScope no_safepoint;
24186 for (intptr_t i = 0; i < array_len; ++i) {
24187 ASSERT(Utf::IsLatin1(LoadUnaligned(&utf16_array[i])));
24188 *OneByteString::CharAddr(str: dst, index: i + dst_offset) = utf16_array[i];
24189 }
24190 } else {
24191 ASSERT(dst.IsTwoByteString());
24192 NoSafepointScope no_safepoint;
24193 if (array_len > 0) {
24194 memmove(dest: TwoByteString::CharAddr(str: dst, index: dst_offset), src: utf16_array,
24195 n: array_len * 2);
24196 }
24197 }
24198}
24199
24200void String::Copy(const String& dst,
24201 intptr_t dst_offset,
24202 const String& src,
24203 intptr_t src_offset,
24204 intptr_t len) {
24205 ASSERT(dst_offset >= 0);
24206 ASSERT(src_offset >= 0);
24207 ASSERT(len >= 0);
24208 ASSERT(len <= (dst.Length() - dst_offset));
24209 ASSERT(len <= (src.Length() - src_offset));
24210 if (len > 0) {
24211 intptr_t char_size = src.CharSize();
24212 if (char_size == kOneByteChar) {
24213 if (src.IsOneByteString()) {
24214 NoSafepointScope no_safepoint;
24215 String::Copy(dst, dst_offset, characters: OneByteString::CharAddr(str: src, index: src_offset),
24216 len);
24217 } else {
24218 ASSERT(src.IsExternalOneByteString());
24219 NoSafepointScope no_safepoint;
24220 String::Copy(dst, dst_offset,
24221 characters: ExternalOneByteString::CharAddr(str: src, index: src_offset), len);
24222 }
24223 } else {
24224 ASSERT(char_size == kTwoByteChar);
24225 if (src.IsTwoByteString()) {
24226 NoSafepointScope no_safepoint;
24227 String::Copy(dst, dst_offset, utf16_array: TwoByteString::CharAddr(str: src, index: src_offset),
24228 array_len: len);
24229 } else {
24230 ASSERT(src.IsExternalTwoByteString());
24231 NoSafepointScope no_safepoint;
24232 String::Copy(dst, dst_offset,
24233 utf16_array: ExternalTwoByteString::CharAddr(str: src, index: src_offset), array_len: len);
24234 }
24235 }
24236 }
24237}
24238
24239StringPtr String::EscapeSpecialCharacters(const String& str) {
24240 if (str.IsOneByteString()) {
24241 return OneByteString::EscapeSpecialCharacters(str);
24242 }
24243 if (str.IsTwoByteString()) {
24244 return TwoByteString::EscapeSpecialCharacters(str);
24245 }
24246 if (str.IsExternalOneByteString()) {
24247 return ExternalOneByteString::EscapeSpecialCharacters(str);
24248 }
24249 ASSERT(str.IsExternalTwoByteString());
24250 // If EscapeSpecialCharacters is frequently called on external two byte
24251 // strings, we should implement it directly on ExternalTwoByteString rather
24252 // than first converting to a TwoByteString.
24253 return TwoByteString::EscapeSpecialCharacters(
24254 str: String::Handle(ptr: TwoByteString::New(str, space: Heap::kNew)));
24255}
24256
24257static bool IsPercent(int32_t c) {
24258 return c == '%';
24259}
24260
24261static bool IsHexCharacter(int32_t c) {
24262 if (c >= '0' && c <= '9') {
24263 return true;
24264 }
24265 if (c >= 'A' && c <= 'F') {
24266 return true;
24267 }
24268 return false;
24269}
24270
24271static bool IsURISafeCharacter(int32_t c) {
24272 if ((c >= '0') && (c <= '9')) {
24273 return true;
24274 }
24275 if ((c >= 'a') && (c <= 'z')) {
24276 return true;
24277 }
24278 if ((c >= 'A') && (c <= 'Z')) {
24279 return true;
24280 }
24281 return (c == '-') || (c == '_') || (c == '.') || (c == '~');
24282}
24283
24284static int32_t GetHexCharacter(int32_t c) {
24285 ASSERT(c >= 0);
24286 ASSERT(c < 16);
24287 const char* hex = "0123456789ABCDEF";
24288 return hex[c];
24289}
24290
24291static int32_t GetHexValue(int32_t c) {
24292 if (c >= '0' && c <= '9') {
24293 return c - '0';
24294 }
24295 if (c >= 'A' && c <= 'F') {
24296 return c - 'A' + 10;
24297 }
24298 UNREACHABLE();
24299 return 0;
24300}
24301
24302static int32_t MergeHexCharacters(int32_t c1, int32_t c2) {
24303 return GetHexValue(c: c1) << 4 | GetHexValue(c: c2);
24304}
24305
24306const char* String::EncodeIRI(const String& str) {
24307 const intptr_t len = Utf8::Length(str);
24308 Zone* zone = Thread::Current()->zone();
24309 uint8_t* utf8 = zone->Alloc<uint8_t>(len);
24310 str.ToUTF8(utf8_array: utf8, array_len: len);
24311 intptr_t num_escapes = 0;
24312 for (int i = 0; i < len; ++i) {
24313 uint8_t byte = utf8[i];
24314 if (!IsURISafeCharacter(c: byte)) {
24315 num_escapes += 2;
24316 }
24317 }
24318 intptr_t cstr_len = len + num_escapes + 1;
24319 char* cstr = zone->Alloc<char>(len: cstr_len);
24320 intptr_t index = 0;
24321 for (int i = 0; i < len; ++i) {
24322 uint8_t byte = utf8[i];
24323 if (!IsURISafeCharacter(c: byte)) {
24324 cstr[index++] = '%';
24325 cstr[index++] = GetHexCharacter(c: byte >> 4);
24326 cstr[index++] = GetHexCharacter(c: byte & 0xF);
24327 } else {
24328 ASSERT(byte <= 127);
24329 cstr[index++] = byte;
24330 }
24331 }
24332 cstr[index] = '\0';
24333 return cstr;
24334}
24335
24336StringPtr String::DecodeIRI(const String& str) {
24337 CodePointIterator cpi(str);
24338 intptr_t num_escapes = 0;
24339 intptr_t len = str.Length();
24340 {
24341 CodePointIterator cpi(str);
24342 while (cpi.Next()) {
24343 int32_t code_point = cpi.Current();
24344 if (IsPercent(c: code_point)) {
24345 // Verify that the two characters following the % are hex digits.
24346 if (!cpi.Next()) {
24347 return String::null();
24348 }
24349 int32_t code_point = cpi.Current();
24350 if (!IsHexCharacter(c: code_point)) {
24351 return String::null();
24352 }
24353 if (!cpi.Next()) {
24354 return String::null();
24355 }
24356 code_point = cpi.Current();
24357 if (!IsHexCharacter(c: code_point)) {
24358 return String::null();
24359 }
24360 num_escapes += 2;
24361 }
24362 }
24363 }
24364 intptr_t utf8_len = len - num_escapes;
24365 ASSERT(utf8_len >= 0);
24366 Zone* zone = Thread::Current()->zone();
24367 uint8_t* utf8 = zone->Alloc<uint8_t>(len: utf8_len);
24368 {
24369 intptr_t index = 0;
24370 CodePointIterator cpi(str);
24371 while (cpi.Next()) {
24372 ASSERT(index < utf8_len);
24373 int32_t code_point = cpi.Current();
24374 if (IsPercent(c: code_point)) {
24375 cpi.Next();
24376 int32_t ch1 = cpi.Current();
24377 cpi.Next();
24378 int32_t ch2 = cpi.Current();
24379 int32_t merged = MergeHexCharacters(c1: ch1, c2: ch2);
24380 ASSERT(merged >= 0 && merged < 256);
24381 utf8[index] = static_cast<uint8_t>(merged);
24382 } else {
24383 ASSERT(code_point >= 0 && code_point < 256);
24384 utf8[index] = static_cast<uint8_t>(code_point);
24385 }
24386 index++;
24387 }
24388 }
24389 return FromUTF8(utf8_array: utf8, array_len: utf8_len);
24390}
24391
24392StringPtr String::NewFormatted(const char* format, ...) {
24393 va_list args;
24394 va_start(args, format);
24395 StringPtr result = NewFormattedV(format, args);
24396 NoSafepointScope no_safepoint;
24397 va_end(args);
24398 return result;
24399}
24400
24401StringPtr String::NewFormatted(Heap::Space space, const char* format, ...) {
24402 va_list args;
24403 va_start(args, format);
24404 StringPtr result = NewFormattedV(format, args, space);
24405 NoSafepointScope no_safepoint;
24406 va_end(args);
24407 return result;
24408}
24409
24410StringPtr String::NewFormattedV(const char* format,
24411 va_list args,
24412 Heap::Space space) {
24413 va_list args_copy;
24414 va_copy(args_copy, args);
24415 intptr_t len = Utils::VSNPrint(str: nullptr, size: 0, format, args: args_copy);
24416 va_end(args_copy);
24417
24418 Zone* zone = Thread::Current()->zone();
24419 char* buffer = zone->Alloc<char>(len: len + 1);
24420 Utils::VSNPrint(str: buffer, size: (len + 1), format, args);
24421
24422 return String::New(cstr: buffer, space);
24423}
24424
24425StringPtr String::Concat(const String& str1,
24426 const String& str2,
24427 Heap::Space space) {
24428 ASSERT(!str1.IsNull() && !str2.IsNull());
24429 intptr_t char_size = Utils::Maximum(x: str1.CharSize(), y: str2.CharSize());
24430 if (char_size == kTwoByteChar) {
24431 return TwoByteString::Concat(str1, str2, space);
24432 }
24433 return OneByteString::Concat(str1, str2, space);
24434}
24435
24436StringPtr String::ConcatAll(const Array& strings, Heap::Space space) {
24437 return ConcatAllRange(strings, start: 0, end: strings.Length(), space);
24438}
24439
24440StringPtr String::ConcatAllRange(const Array& strings,
24441 intptr_t start,
24442 intptr_t end,
24443 Heap::Space space) {
24444 ASSERT(!strings.IsNull());
24445 ASSERT(start >= 0);
24446 ASSERT(end <= strings.Length());
24447 intptr_t result_len = 0;
24448 String& str = String::Handle();
24449 intptr_t char_size = kOneByteChar;
24450 // Compute 'char_size' and 'result_len'.
24451 for (intptr_t i = start; i < end; i++) {
24452 str ^= strings.At(index: i);
24453 const intptr_t str_len = str.Length();
24454 if ((kMaxElements - result_len) < str_len) {
24455 Exceptions::ThrowOOM();
24456 UNREACHABLE();
24457 }
24458 result_len += str_len;
24459 char_size = Utils::Maximum(x: char_size, y: str.CharSize());
24460 }
24461 if (char_size == kOneByteChar) {
24462 return OneByteString::ConcatAll(strings, start, end, len: result_len, space);
24463 }
24464 ASSERT(char_size == kTwoByteChar);
24465 return TwoByteString::ConcatAll(strings, start, end, len: result_len, space);
24466}
24467
24468StringPtr String::SubString(const String& str,
24469 intptr_t begin_index,
24470 Heap::Space space) {
24471 ASSERT(!str.IsNull());
24472 if (begin_index >= str.Length()) {
24473 return String::null();
24474 }
24475 return String::SubString(str, begin_index, length: (str.Length() - begin_index),
24476 space);
24477}
24478
24479StringPtr String::SubString(Thread* thread,
24480 const String& str,
24481 intptr_t begin_index,
24482 intptr_t length,
24483 Heap::Space space) {
24484 ASSERT(!str.IsNull());
24485 ASSERT(begin_index >= 0);
24486 ASSERT(length >= 0);
24487 if (begin_index <= str.Length() && length == 0) {
24488 return Symbols::Empty().ptr();
24489 }
24490 if (begin_index > str.Length()) {
24491 return String::null();
24492 }
24493 bool is_one_byte_string = true;
24494 intptr_t char_size = str.CharSize();
24495 if (char_size == kTwoByteChar) {
24496 for (intptr_t i = begin_index; i < begin_index + length; ++i) {
24497 if (!Utf::IsLatin1(code_point: str.CharAt(index: i))) {
24498 is_one_byte_string = false;
24499 break;
24500 }
24501 }
24502 }
24503 REUSABLE_STRING_HANDLESCOPE(thread);
24504 String& result = thread->StringHandle();
24505 if (is_one_byte_string) {
24506 result = OneByteString::New(len: length, space);
24507 } else {
24508 result = TwoByteString::New(len: length, space);
24509 }
24510 String::Copy(dst: result, dst_offset: 0, src: str, src_offset: begin_index, len: length);
24511 return result.ptr();
24512}
24513
24514const char* String::ToCString() const {
24515 if (IsNull()) {
24516 return "String: null";
24517 }
24518 const intptr_t len = Utf8::Length(str: *this);
24519 Zone* zone = Thread::Current()->zone();
24520 uint8_t* result = zone->Alloc<uint8_t>(len: len + 1);
24521 ToUTF8(utf8_array: result, array_len: len);
24522 result[len] = 0;
24523 return reinterpret_cast<const char*>(result);
24524}
24525
24526char* String::ToMallocCString() const {
24527 const intptr_t len = Utf8::Length(str: *this);
24528 uint8_t* result = reinterpret_cast<uint8_t*>(malloc(size: len + 1));
24529 ToUTF8(utf8_array: result, array_len: len);
24530 result[len] = 0;
24531 return reinterpret_cast<char*>(result);
24532}
24533
24534void String::ToUTF8(uint8_t* utf8_array, intptr_t array_len) const {
24535 ASSERT(array_len >= Utf8::Length(*this));
24536 Utf8::Encode(src: *this, dst: reinterpret_cast<char*>(utf8_array), len: array_len);
24537}
24538
24539const char* String::ToCString(Thread* thread, StringPtr ptr) {
24540 if (ptr == nullptr) return nullptr;
24541 REUSABLE_STRING_HANDLESCOPE(thread);
24542 String& str = reused_string_handle.Handle();
24543 str = ptr;
24544 return str.ToCString();
24545}
24546
24547static FinalizablePersistentHandle* AddFinalizer(const Object& referent,
24548 void* peer,
24549 Dart_HandleFinalizer callback,
24550 intptr_t external_size) {
24551 ASSERT(callback != nullptr);
24552 FinalizablePersistentHandle* finalizable_ref =
24553 FinalizablePersistentHandle::New(isolate_group: IsolateGroup::Current(), object: referent, peer,
24554 callback, external_size,
24555 /*auto_delete=*/true);
24556 ASSERT(finalizable_ref != nullptr);
24557 return finalizable_ref;
24558}
24559
24560StringPtr String::Transform(int32_t (*mapping)(int32_t ch),
24561 const String& str,
24562 Heap::Space space) {
24563 ASSERT(!str.IsNull());
24564 bool has_mapping = false;
24565 int32_t dst_max = 0;
24566 CodePointIterator it(str);
24567 while (it.Next()) {
24568 int32_t src = it.Current();
24569 int32_t dst = mapping(src);
24570 if (src != dst) {
24571 has_mapping = true;
24572 }
24573 dst_max = Utils::Maximum(x: dst_max, y: dst);
24574 }
24575 if (!has_mapping) {
24576 return str.ptr();
24577 }
24578 if (Utf::IsLatin1(code_point: dst_max)) {
24579 return OneByteString::Transform(mapping, str, space);
24580 }
24581 ASSERT(Utf::IsBmp(dst_max) || Utf::IsSupplementary(dst_max));
24582 return TwoByteString::Transform(mapping, str, space);
24583}
24584
24585StringPtr String::ToUpperCase(const String& str, Heap::Space space) {
24586 // TODO(cshapiro): create a fast-path for OneByteString instances.
24587 return Transform(mapping: CaseMapping::ToUpper, str, space);
24588}
24589
24590StringPtr String::ToLowerCase(const String& str, Heap::Space space) {
24591 // TODO(cshapiro): create a fast-path for OneByteString instances.
24592 return Transform(mapping: CaseMapping::ToLower, str, space);
24593}
24594
24595bool String::ParseDouble(const String& str,
24596 intptr_t start,
24597 intptr_t end,
24598 double* result) {
24599 ASSERT(0 <= start);
24600 ASSERT(start <= end);
24601 ASSERT(end <= str.Length());
24602 intptr_t length = end - start;
24603 NoSafepointScope no_safepoint;
24604 const uint8_t* startChar;
24605 if (str.IsOneByteString()) {
24606 startChar = OneByteString::CharAddr(str, index: start);
24607 } else if (str.IsExternalOneByteString()) {
24608 startChar = ExternalOneByteString::CharAddr(str, index: start);
24609 } else {
24610 uint8_t* chars = Thread::Current()->zone()->Alloc<uint8_t>(len: length);
24611 for (intptr_t i = 0; i < length; i++) {
24612 int32_t ch = str.CharAt(index: start + i);
24613 if (ch < 128) {
24614 chars[i] = ch;
24615 } else {
24616 return false; // Not ASCII, so definitely not valid double numeral.
24617 }
24618 }
24619 startChar = chars;
24620 }
24621 return CStringToDouble(str: reinterpret_cast<const char*>(startChar), length,
24622 result);
24623}
24624
24625// Check to see if 'str1' matches 'str2' as is or
24626// once the private key separator is stripped from str2.
24627//
24628// Things are made more complicated by the fact that constructors are
24629// added *after* the private suffix, so "foo@123.named" should match
24630// "foo.named".
24631//
24632// Also, the private suffix can occur more than once in the name, as in:
24633//
24634// _ReceivePortImpl@6be832b._internal@6be832b
24635//
24636template <typename T1, typename T2>
24637static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24638 intptr_t len = str1.Length();
24639 intptr_t str2_len = str2.Length();
24640 if (len == str2_len) {
24641 for (intptr_t i = 0; i < len; i++) {
24642 if (T1::CharAt(str1, i) != T2::CharAt(str2, i)) {
24643 return false;
24644 }
24645 }
24646 return true;
24647 }
24648 if (len < str2_len) {
24649 return false; // No way they can match.
24650 }
24651 intptr_t pos = 0;
24652 intptr_t str2_pos = 0;
24653 while (pos < len) {
24654 int32_t ch = T1::CharAt(str1, pos);
24655 pos++;
24656
24657 if ((str2_pos < str2_len) && (ch == T2::CharAt(str2, str2_pos))) {
24658 str2_pos++;
24659 continue;
24660 }
24661
24662 if (ch == Library::kPrivateKeySeparator) {
24663 // Consume a private key separator if str1 has it but str2 does not.
24664 while ((pos < len) && (T1::CharAt(str1, pos) != '.') &&
24665 (T1::CharAt(str1, pos) != '&')) {
24666 pos++;
24667 }
24668 // Resume matching characters.
24669 continue;
24670 }
24671
24672 return false;
24673 }
24674
24675 // We have reached the end of mangled_name string.
24676 ASSERT(pos == len);
24677 return (str2_pos == str2_len);
24678}
24679
24680#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2) \
24681 switch (class_id) { \
24682 case kOneByteStringCid: \
24683 return dart::EqualsIgnoringPrivateKey<type, OneByteString>(str1, str2); \
24684 case kTwoByteStringCid: \
24685 return dart::EqualsIgnoringPrivateKey<type, TwoByteString>(str1, str2); \
24686 case kExternalOneByteStringCid: \
24687 return dart::EqualsIgnoringPrivateKey<type, ExternalOneByteString>( \
24688 str1, str2); \
24689 case kExternalTwoByteStringCid: \
24690 return dart::EqualsIgnoringPrivateKey<type, ExternalTwoByteString>( \
24691 str1, str2); \
24692 } \
24693 UNREACHABLE();
24694
24695bool String::EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24696 if (str1.ptr() == str2.ptr()) {
24697 return true; // Both handles point to the same raw instance.
24698 }
24699 NoSafepointScope no_safepoint;
24700 intptr_t str1_class_id = str1.ptr()->GetClassId();
24701 intptr_t str2_class_id = str2.ptr()->GetClassId();
24702 switch (str1_class_id) {
24703 case kOneByteStringCid:
24704 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, OneByteString, str1, str2);
24705 break;
24706 case kTwoByteStringCid:
24707 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, TwoByteString, str1, str2);
24708 break;
24709 case kExternalOneByteStringCid:
24710 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, ExternalOneByteString, str1,
24711 str2);
24712 break;
24713 case kExternalTwoByteStringCid:
24714 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, ExternalTwoByteString, str1,
24715 str2);
24716 break;
24717 }
24718 UNREACHABLE();
24719 return false;
24720}
24721
24722bool String::CodePointIterator::Next() {
24723 ASSERT(index_ >= -1);
24724 intptr_t length = Utf16::Length(ch: ch_);
24725 if (index_ < (end_ - length)) {
24726 index_ += length;
24727 ch_ = str_.CharAt(index: index_);
24728 if (Utf16::IsLeadSurrogate(ch: ch_) && (index_ < (end_ - 1))) {
24729 int32_t ch2 = str_.CharAt(index: index_ + 1);
24730 if (Utf16::IsTrailSurrogate(ch: ch2)) {
24731 ch_ = Utf16::Decode(lead: ch_, trail: ch2);
24732 }
24733 }
24734 return true;
24735 }
24736 index_ = end_;
24737 return false;
24738}
24739
24740OneByteStringPtr OneByteString::EscapeSpecialCharacters(const String& str) {
24741 intptr_t len = str.Length();
24742 if (len > 0) {
24743 intptr_t num_escapes = 0;
24744 for (intptr_t i = 0; i < len; i++) {
24745 num_escapes += EscapeOverhead(c: CharAt(str, index: i));
24746 }
24747 const String& dststr =
24748 String::Handle(ptr: OneByteString::New(len: len + num_escapes, space: Heap::kNew));
24749 intptr_t index = 0;
24750 for (intptr_t i = 0; i < len; i++) {
24751 uint8_t ch = CharAt(str, index: i);
24752 if (IsSpecialCharacter(value: ch)) {
24753 SetCharAt(str: dststr, index, code_unit: '\\');
24754 SetCharAt(str: dststr, index: index + 1, code_unit: SpecialCharacter(value: ch));
24755 index += 2;
24756 } else if (IsAsciiNonprintable(c: ch)) {
24757 SetCharAt(str: dststr, index, code_unit: '\\');
24758 SetCharAt(str: dststr, index: index + 1, code_unit: 'x');
24759 SetCharAt(str: dststr, index: index + 2, code_unit: GetHexCharacter(c: ch >> 4));
24760 SetCharAt(str: dststr, index: index + 3, code_unit: GetHexCharacter(c: ch & 0xF));
24761 index += 4;
24762 } else {
24763 SetCharAt(str: dststr, index, code_unit: ch);
24764 index += 1;
24765 }
24766 }
24767 return OneByteString::raw(str: dststr);
24768 }
24769 return OneByteString::raw(str: Symbols::Empty());
24770}
24771
24772OneByteStringPtr ExternalOneByteString::EscapeSpecialCharacters(
24773 const String& str) {
24774 intptr_t len = str.Length();
24775 if (len > 0) {
24776 intptr_t num_escapes = 0;
24777 for (intptr_t i = 0; i < len; i++) {
24778 num_escapes += EscapeOverhead(c: CharAt(str, index: i));
24779 }
24780 const String& dststr =
24781 String::Handle(ptr: OneByteString::New(len: len + num_escapes, space: Heap::kNew));
24782 intptr_t index = 0;
24783 for (intptr_t i = 0; i < len; i++) {
24784 uint8_t ch = CharAt(str, index: i);
24785 if (IsSpecialCharacter(value: ch)) {
24786 OneByteString::SetCharAt(str: dststr, index, code_unit: '\\');
24787 OneByteString::SetCharAt(str: dststr, index: index + 1, code_unit: SpecialCharacter(value: ch));
24788 index += 2;
24789 } else if (IsAsciiNonprintable(c: ch)) {
24790 OneByteString::SetCharAt(str: dststr, index, code_unit: '\\');
24791 OneByteString::SetCharAt(str: dststr, index: index + 1, code_unit: 'x');
24792 OneByteString::SetCharAt(str: dststr, index: index + 2, code_unit: GetHexCharacter(c: ch >> 4));
24793 OneByteString::SetCharAt(str: dststr, index: index + 3, code_unit: GetHexCharacter(c: ch & 0xF));
24794 index += 4;
24795 } else {
24796 OneByteString::SetCharAt(str: dststr, index, code_unit: ch);
24797 index += 1;
24798 }
24799 }
24800 return OneByteString::raw(str: dststr);
24801 }
24802 return OneByteString::raw(str: Symbols::Empty());
24803}
24804
24805OneByteStringPtr OneByteString::New(intptr_t len, Heap::Space space) {
24806 ASSERT((IsolateGroup::Current() == Dart::vm_isolate_group()) ||
24807 ((IsolateGroup::Current()->object_store() != nullptr) &&
24808 (IsolateGroup::Current()->object_store()->one_byte_string_class() !=
24809 Class::null())));
24810 if (len < 0 || len > kMaxElements) {
24811 // This should be caught before we reach here.
24812 FATAL("Fatal error in OneByteString::New: invalid len %" Pd "\n", len);
24813 }
24814 auto result = Object::Allocate<OneByteString>(space, elements: len);
24815 NoSafepointScope no_safepoint;
24816 result->untag()->set_length(Smi::New(value: len));
24817#if !defined(HASH_IN_OBJECT_HEADER)
24818 result->untag()->set_hash(Smi::New(0));
24819#endif
24820 intptr_t size = OneByteString::UnroundedSize(str: result);
24821 ASSERT(size <= result->untag()->HeapSize());
24822 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: result) + size), c: 0,
24823 n: result->untag()->HeapSize() - size);
24824 return result;
24825}
24826
24827OneByteStringPtr OneByteString::New(const uint8_t* characters,
24828 intptr_t len,
24829 Heap::Space space) {
24830 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24831 if (len > 0) {
24832 NoSafepointScope no_safepoint;
24833 memmove(dest: DataStart(str: result), src: characters, n: len);
24834 }
24835 return OneByteString::raw(str: result);
24836}
24837
24838OneByteStringPtr OneByteString::New(const uint16_t* characters,
24839 intptr_t len,
24840 Heap::Space space) {
24841 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24842 NoSafepointScope no_safepoint;
24843 for (intptr_t i = 0; i < len; ++i) {
24844 ASSERT(Utf::IsLatin1(characters[i]));
24845 *CharAddr(str: result, index: i) = characters[i];
24846 }
24847 return OneByteString::raw(str: result);
24848}
24849
24850OneByteStringPtr OneByteString::New(const int32_t* characters,
24851 intptr_t len,
24852 Heap::Space space) {
24853 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24854 NoSafepointScope no_safepoint;
24855 for (intptr_t i = 0; i < len; ++i) {
24856 ASSERT(Utf::IsLatin1(characters[i]));
24857 *CharAddr(str: result, index: i) = characters[i];
24858 }
24859 return OneByteString::raw(str: result);
24860}
24861
24862OneByteStringPtr OneByteString::New(const String& str, Heap::Space space) {
24863 intptr_t len = str.Length();
24864 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24865 String::Copy(dst: result, dst_offset: 0, src: str, src_offset: 0, len);
24866 return OneByteString::raw(str: result);
24867}
24868
24869OneByteStringPtr OneByteString::New(const String& other_one_byte_string,
24870 intptr_t other_start_index,
24871 intptr_t other_len,
24872 Heap::Space space) {
24873 const String& result = String::Handle(ptr: OneByteString::New(len: other_len, space));
24874 ASSERT(other_one_byte_string.IsOneByteString());
24875 if (other_len > 0) {
24876 NoSafepointScope no_safepoint;
24877 memmove(dest: OneByteString::DataStart(str: result),
24878 src: OneByteString::CharAddr(str: other_one_byte_string, index: other_start_index),
24879 n: other_len);
24880 }
24881 return OneByteString::raw(str: result);
24882}
24883
24884OneByteStringPtr OneByteString::New(const TypedDataBase& other_typed_data,
24885 intptr_t other_start_index,
24886 intptr_t other_len,
24887 Heap::Space space) {
24888 const String& result = String::Handle(ptr: OneByteString::New(len: other_len, space));
24889 ASSERT(other_typed_data.ElementSizeInBytes() == 1);
24890 if (other_len > 0) {
24891 NoSafepointScope no_safepoint;
24892 memmove(dest: OneByteString::DataStart(str: result),
24893 src: other_typed_data.DataAddr(byte_offset: other_start_index), n: other_len);
24894 }
24895 return OneByteString::raw(str: result);
24896}
24897
24898OneByteStringPtr OneByteString::Concat(const String& str1,
24899 const String& str2,
24900 Heap::Space space) {
24901 intptr_t len1 = str1.Length();
24902 intptr_t len2 = str2.Length();
24903 intptr_t len = len1 + len2;
24904 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24905 String::Copy(dst: result, dst_offset: 0, src: str1, src_offset: 0, len: len1);
24906 String::Copy(dst: result, dst_offset: len1, src: str2, src_offset: 0, len: len2);
24907 return OneByteString::raw(str: result);
24908}
24909
24910OneByteStringPtr OneByteString::ConcatAll(const Array& strings,
24911 intptr_t start,
24912 intptr_t end,
24913 intptr_t len,
24914 Heap::Space space) {
24915 ASSERT(!strings.IsNull());
24916 ASSERT(start >= 0);
24917 ASSERT(end <= strings.Length());
24918 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24919 String& str = String::Handle();
24920 intptr_t pos = 0;
24921 for (intptr_t i = start; i < end; i++) {
24922 str ^= strings.At(index: i);
24923 const intptr_t str_len = str.Length();
24924 String::Copy(dst: result, dst_offset: pos, src: str, src_offset: 0, len: str_len);
24925 ASSERT((kMaxElements - pos) >= str_len);
24926 pos += str_len;
24927 }
24928 return OneByteString::raw(str: result);
24929}
24930
24931OneByteStringPtr OneByteString::Transform(int32_t (*mapping)(int32_t ch),
24932 const String& str,
24933 Heap::Space space) {
24934 ASSERT(!str.IsNull());
24935 intptr_t len = str.Length();
24936 const String& result = String::Handle(ptr: OneByteString::New(len, space));
24937 NoSafepointScope no_safepoint;
24938 for (intptr_t i = 0; i < len; ++i) {
24939 int32_t ch = mapping(str.CharAt(index: i));
24940 ASSERT(Utf::IsLatin1(ch));
24941 *CharAddr(str: result, index: i) = ch;
24942 }
24943 return OneByteString::raw(str: result);
24944}
24945
24946OneByteStringPtr OneByteString::SubStringUnchecked(const String& str,
24947 intptr_t begin_index,
24948 intptr_t length,
24949 Heap::Space space) {
24950 ASSERT(!str.IsNull() && str.IsOneByteString());
24951 ASSERT(begin_index >= 0);
24952 ASSERT(length >= 0);
24953 if (begin_index <= str.Length() && length == 0) {
24954 return OneByteString::raw(str: Symbols::Empty());
24955 }
24956 ASSERT(begin_index < str.Length());
24957 OneByteStringPtr result = OneByteString::New(len: length, space);
24958 NoSafepointScope no_safepoint;
24959 if (length > 0) {
24960 uint8_t* dest = &result->untag()->data()[0];
24961 const uint8_t* src = &untag(str)->data()[begin_index];
24962 memmove(dest: dest, src: src, n: length);
24963 }
24964 return result;
24965}
24966
24967TwoByteStringPtr TwoByteString::EscapeSpecialCharacters(const String& str) {
24968 intptr_t len = str.Length();
24969 if (len > 0) {
24970 intptr_t num_escapes = 0;
24971 for (intptr_t i = 0; i < len; i++) {
24972 num_escapes += EscapeOverhead(c: CharAt(str, index: i));
24973 }
24974 const String& dststr =
24975 String::Handle(ptr: TwoByteString::New(len: len + num_escapes, space: Heap::kNew));
24976 intptr_t index = 0;
24977 for (intptr_t i = 0; i < len; i++) {
24978 uint16_t ch = CharAt(str, index: i);
24979 if (IsSpecialCharacter(value: ch)) {
24980 SetCharAt(str: dststr, index, ch: '\\');
24981 SetCharAt(str: dststr, index: index + 1, ch: SpecialCharacter(value: ch));
24982 index += 2;
24983 } else if (IsAsciiNonprintable(c: ch)) {
24984 SetCharAt(str: dststr, index, ch: '\\');
24985 SetCharAt(str: dststr, index: index + 1, ch: 'x');
24986 SetCharAt(str: dststr, index: index + 2, ch: GetHexCharacter(c: ch >> 4));
24987 SetCharAt(str: dststr, index: index + 3, ch: GetHexCharacter(c: ch & 0xF));
24988 index += 4;
24989 } else {
24990 SetCharAt(str: dststr, index, ch);
24991 index += 1;
24992 }
24993 }
24994 return TwoByteString::raw(str: dststr);
24995 }
24996 return TwoByteString::New(len: 0, space: Heap::kNew);
24997}
24998
24999TwoByteStringPtr TwoByteString::New(intptr_t len, Heap::Space space) {
25000 ASSERT(IsolateGroup::Current()->object_store()->two_byte_string_class() !=
25001 nullptr);
25002 if (len < 0 || len > kMaxElements) {
25003 // This should be caught before we reach here.
25004 FATAL("Fatal error in TwoByteString::New: invalid len %" Pd "\n", len);
25005 }
25006 auto s = Object::Allocate<TwoByteString>(space, elements: len);
25007 NoSafepointScope no_safepoint;
25008 s->untag()->set_length(Smi::New(value: len));
25009#if !defined(HASH_IN_OBJECT_HEADER)
25010 s->untag()->set_hash(Smi::New(0));
25011#endif
25012 intptr_t size = TwoByteString::UnroundedSize(str: s);
25013 ASSERT(size <= s->untag()->HeapSize());
25014 memset(s: reinterpret_cast<void*>(UntaggedObject::ToAddr(raw_obj: s) + size), c: 0,
25015 n: s->untag()->HeapSize() - size);
25016 return s;
25017}
25018
25019TwoByteStringPtr TwoByteString::New(const uint16_t* utf16_array,
25020 intptr_t array_len,
25021 Heap::Space space) {
25022 ASSERT(array_len > 0);
25023 const String& result = String::Handle(ptr: TwoByteString::New(len: array_len, space));
25024 {
25025 NoSafepointScope no_safepoint;
25026 memmove(dest: DataStart(str: result), src: utf16_array, n: (array_len * 2));
25027 }
25028 return TwoByteString::raw(str: result);
25029}
25030
25031TwoByteStringPtr TwoByteString::New(intptr_t utf16_len,
25032 const int32_t* utf32_array,
25033 intptr_t array_len,
25034 Heap::Space space) {
25035 ASSERT((array_len > 0) && (utf16_len >= array_len));
25036 const String& result = String::Handle(ptr: TwoByteString::New(len: utf16_len, space));
25037 {
25038 NoSafepointScope no_safepoint;
25039 intptr_t j = 0;
25040 for (intptr_t i = 0; i < array_len; ++i) {
25041 if (Utf::IsSupplementary(code_point: utf32_array[i])) {
25042 ASSERT(j < (utf16_len - 1));
25043 Utf16::Encode(codepoint: utf32_array[i], dst: CharAddr(str: result, index: j));
25044 j += 2;
25045 } else {
25046 ASSERT(j < utf16_len);
25047 *CharAddr(str: result, index: j) = utf32_array[i];
25048 j += 1;
25049 }
25050 }
25051 }
25052 return TwoByteString::raw(str: result);
25053}
25054
25055TwoByteStringPtr TwoByteString::New(const String& str, Heap::Space space) {
25056 intptr_t len = str.Length();
25057 const String& result = String::Handle(ptr: TwoByteString::New(len, space));
25058 String::Copy(dst: result, dst_offset: 0, src: str, src_offset: 0, len);
25059 return TwoByteString::raw(str: result);
25060}
25061
25062TwoByteStringPtr TwoByteString::New(const TypedDataBase& other_typed_data,
25063 intptr_t other_start_index,
25064 intptr_t other_len,
25065 Heap::Space space) {
25066 const String& result = String::Handle(ptr: TwoByteString::New(len: other_len, space));
25067 if (other_len > 0) {
25068 NoSafepointScope no_safepoint;
25069 memmove(dest: TwoByteString::DataStart(str: result),
25070 src: other_typed_data.DataAddr(byte_offset: other_start_index),
25071 n: other_len * sizeof(uint16_t));
25072 }
25073 return TwoByteString::raw(str: result);
25074}
25075
25076TwoByteStringPtr TwoByteString::Concat(const String& str1,
25077 const String& str2,
25078 Heap::Space space) {
25079 intptr_t len1 = str1.Length();
25080 intptr_t len2 = str2.Length();
25081 intptr_t len = len1 + len2;
25082 const String& result = String::Handle(ptr: TwoByteString::New(len, space));
25083 String::Copy(dst: result, dst_offset: 0, src: str1, src_offset: 0, len: len1);
25084 String::Copy(dst: result, dst_offset: len1, src: str2, src_offset: 0, len: len2);
25085 return TwoByteString::raw(str: result);
25086}
25087
25088TwoByteStringPtr TwoByteString::ConcatAll(const Array& strings,
25089 intptr_t start,
25090 intptr_t end,
25091 intptr_t len,
25092 Heap::Space space) {
25093 ASSERT(!strings.IsNull());
25094 ASSERT(start >= 0);
25095 ASSERT(end <= strings.Length());
25096 const String& result = String::Handle(ptr: TwoByteString::New(len, space));
25097 String& str = String::Handle();
25098 intptr_t pos = 0;
25099 for (intptr_t i = start; i < end; i++) {
25100 str ^= strings.At(index: i);
25101 const intptr_t str_len = str.Length();
25102 String::Copy(dst: result, dst_offset: pos, src: str, src_offset: 0, len: str_len);
25103 ASSERT((kMaxElements - pos) >= str_len);
25104 pos += str_len;
25105 }
25106 return TwoByteString::raw(str: result);
25107}
25108
25109TwoByteStringPtr TwoByteString::Transform(int32_t (*mapping)(int32_t ch),
25110 const String& str,
25111 Heap::Space space) {
25112 ASSERT(!str.IsNull());
25113 intptr_t len = str.Length();
25114 const String& result = String::Handle(ptr: TwoByteString::New(len, space));
25115 String::CodePointIterator it(str);
25116 intptr_t i = 0;
25117 NoSafepointScope no_safepoint;
25118 while (it.Next()) {
25119 int32_t src = it.Current();
25120 int32_t dst = mapping(src);
25121 ASSERT(dst >= 0 && dst <= 0x10FFFF);
25122 intptr_t len = Utf16::Length(ch: dst);
25123 if (len == 1) {
25124 *CharAddr(str: result, index: i) = dst;
25125 } else {
25126 ASSERT(len == 2);
25127 Utf16::Encode(codepoint: dst, dst: CharAddr(str: result, index: i));
25128 }
25129 i += len;
25130 }
25131 return TwoByteString::raw(str: result);
25132}
25133
25134ExternalOneByteStringPtr ExternalOneByteString::New(
25135 const uint8_t* data,
25136 intptr_t len,
25137 void* peer,
25138 intptr_t external_allocation_size,
25139 Dart_HandleFinalizer callback,
25140 Heap::Space space) {
25141 ASSERT(IsolateGroup::Current()
25142 ->object_store()
25143 ->external_one_byte_string_class() != Class::null());
25144 if (len < 0 || len > kMaxElements) {
25145 // This should be caught before we reach here.
25146 FATAL("Fatal error in ExternalOneByteString::New: invalid len %" Pd "\n",
25147 len);
25148 }
25149 const auto& result =
25150 String::Handle(ptr: Object::Allocate<ExternalOneByteString>(space));
25151#if !defined(HASH_IN_OBJECT_HEADER)
25152 result.ptr()->untag()->set_hash(Smi::New(0));
25153#endif
25154 result.SetLength(len);
25155 SetExternalData(str: result, data, peer);
25156 AddFinalizer(referent: result, peer, callback, external_size: external_allocation_size);
25157 return ExternalOneByteString::raw(str: result);
25158}
25159
25160ExternalTwoByteStringPtr ExternalTwoByteString::New(
25161 const uint16_t* data,
25162 intptr_t len,
25163 void* peer,
25164 intptr_t external_allocation_size,
25165 Dart_HandleFinalizer callback,
25166 Heap::Space space) {
25167 ASSERT(IsolateGroup::Current()
25168 ->object_store()
25169 ->external_two_byte_string_class() != Class::null());
25170 if (len < 0 || len > kMaxElements) {
25171 // This should be caught before we reach here.
25172 FATAL("Fatal error in ExternalTwoByteString::New: invalid len %" Pd "\n",
25173 len);
25174 }
25175 const auto& result =
25176 String::Handle(ptr: Object::Allocate<ExternalTwoByteString>(space));
25177#if !defined(HASH_IN_OBJECT_HEADER)
25178 result.ptr()->untag()->set_hash(Smi::New(0));
25179#endif
25180 result.SetLength(len);
25181 SetExternalData(str: result, data, peer);
25182 AddFinalizer(referent: result, peer, callback, external_size: external_allocation_size);
25183 return ExternalTwoByteString::raw(str: result);
25184}
25185
25186const char* Bool::ToCString() const {
25187 return value() ? "true" : "false";
25188}
25189
25190bool Array::CanonicalizeEquals(const Instance& other) const {
25191 if (this->ptr() == other.ptr()) {
25192 // Both handles point to the same raw instance.
25193 return true;
25194 }
25195
25196 // An Array may be compared to an ImmutableArray.
25197 if (!other.IsArray() || other.IsNull()) {
25198 return false;
25199 }
25200
25201 // First check if both arrays have the same length and elements.
25202 const Array& other_arr = Array::Cast(obj: other);
25203
25204 intptr_t len = this->Length();
25205 if (len != other_arr.Length()) {
25206 return false;
25207 }
25208
25209 for (intptr_t i = 0; i < len; i++) {
25210 if (this->At(index: i) != other_arr.At(index: i)) {
25211 return false;
25212 }
25213 }
25214
25215 // Now check if both arrays have the same type arguments.
25216 if (GetTypeArguments() == other.GetTypeArguments()) {
25217 return true;
25218 }
25219 const TypeArguments& type_args = TypeArguments::Handle(ptr: GetTypeArguments());
25220 const TypeArguments& other_type_args =
25221 TypeArguments::Handle(ptr: other.GetTypeArguments());
25222 if (!type_args.Equals(other: other_type_args)) {
25223 return false;
25224 }
25225 return true;
25226}
25227
25228uint32_t Array::CanonicalizeHash() const {
25229 intptr_t len = Length();
25230 if (len == 0) {
25231 return 1;
25232 }
25233 Thread* thread = Thread::Current();
25234 uint32_t hash = thread->heap()->GetCanonicalHash(raw_obj: ptr());
25235 if (hash != 0) {
25236 return hash;
25237 }
25238 hash = len;
25239 Instance& member = Instance::Handle(ptr: GetTypeArguments());
25240 hash = CombineHashes(hash, other_hash: member.CanonicalizeHash());
25241 for (intptr_t i = 0; i < len; i++) {
25242 member ^= At(index: i);
25243 hash = CombineHashes(hash, other_hash: member.CanonicalizeHash());
25244 }
25245 hash = FinalizeHash(hash, hashbits: kHashBits);
25246 thread->heap()->SetCanonicalHash(raw_obj: ptr(), hash);
25247 return hash;
25248}
25249
25250ArrayPtr Array::New(intptr_t len,
25251 const AbstractType& element_type,
25252 Heap::Space space) {
25253 const Array& result = Array::Handle(ptr: Array::New(len, space));
25254 if (!element_type.IsDynamicType()) {
25255 TypeArguments& type_args = TypeArguments::Handle(ptr: TypeArguments::New(len: 1));
25256 type_args.SetTypeAt(index: 0, value: element_type);
25257 type_args = type_args.Canonicalize(thread: Thread::Current());
25258 result.SetTypeArguments(type_args);
25259 }
25260 return result.ptr();
25261}
25262
25263ArrayPtr Array::NewUninitialized(intptr_t class_id,
25264 intptr_t len,
25265 Heap::Space space) {
25266 if (!IsValidLength(len)) {
25267 // This should be caught before we reach here.
25268 FATAL("Fatal error in Array::New: invalid len %" Pd "\n", len);
25269 }
25270 auto raw = Object::AllocateVariant<Array>(class_id, space, elements: len);
25271 NoSafepointScope no_safepoint;
25272 raw->untag()->set_length(Smi::New(value: len));
25273 if (UseCardMarkingForAllocation(array_length: len)) {
25274 ASSERT(raw->IsOldObject());
25275 raw->untag()->SetCardRememberedBitUnsynchronized();
25276 }
25277 return raw;
25278}
25279
25280ArrayPtr Array::New(intptr_t class_id, intptr_t len, Heap::Space space) {
25281 if (!UseCardMarkingForAllocation(array_length: len)) {
25282 return NewUninitialized(class_id, len, space);
25283 }
25284
25285 Thread* thread = Thread::Current();
25286 Array& result =
25287 Array::Handle(zone: thread->zone(), ptr: NewUninitialized(class_id, len, space));
25288 result.SetTypeArguments(Object::null_type_arguments());
25289 for (intptr_t i = 0; i < len; i++) {
25290 result.SetAt(index: i, value: Object::null_object(), thread);
25291 if (((i + 1) % KB) == 0) {
25292 thread->CheckForSafepoint();
25293 }
25294 }
25295 return result.ptr();
25296}
25297
25298ArrayPtr Array::Slice(intptr_t start,
25299 intptr_t count,
25300 bool with_type_argument) const {
25301 Thread* thread = Thread::Current();
25302 Zone* zone = thread->zone();
25303 const Array& dest = Array::Handle(zone, ptr: Array::NewUninitialized(len: count));
25304 if (with_type_argument) {
25305 dest.SetTypeArguments(TypeArguments::Handle(zone, ptr: GetTypeArguments()));
25306 } else {
25307 dest.SetTypeArguments(Object::null_type_arguments());
25308 }
25309 if (!UseCardMarkingForAllocation(array_length: count)) {
25310 NoSafepointScope no_safepoint(thread);
25311 for (int i = 0; i < count; i++) {
25312 dest.untag()->set_element(index: i, value: untag()->element(index: i + start), thread);
25313 }
25314 } else {
25315 for (int i = 0; i < count; i++) {
25316 dest.untag()->set_element(index: i, value: untag()->element(index: i + start), thread);
25317 if (((i + 1) % KB) == 0) {
25318 thread->CheckForSafepoint();
25319 }
25320 }
25321 }
25322 return dest.ptr();
25323}
25324
25325void Array::MakeImmutable() const {
25326 if (IsImmutable()) return;
25327 ASSERT(!IsCanonical());
25328 untag()->SetClassId(kImmutableArrayCid);
25329}
25330
25331const char* Array::ToCString() const {
25332 if (IsNull()) {
25333 return IsImmutable() ? "_ImmutableList nullptr" : "_List nullptr";
25334 }
25335 Zone* zone = Thread::Current()->zone();
25336 const char* format =
25337 IsImmutable() ? "_ImmutableList len:%" Pd : "_List len:%" Pd;
25338 return zone->PrintToString(format, Length());
25339}
25340
25341ArrayPtr Array::Grow(const Array& source,
25342 intptr_t new_length,
25343 Heap::Space space) {
25344 Thread* thread = Thread::Current();
25345 Zone* zone = thread->zone();
25346 const Array& result =
25347 Array::Handle(zone, ptr: Array::NewUninitialized(len: new_length, space));
25348 intptr_t old_length = 0;
25349 if (!source.IsNull()) {
25350 old_length = source.Length();
25351 result.SetTypeArguments(
25352 TypeArguments::Handle(zone, ptr: source.GetTypeArguments()));
25353 } else {
25354 result.SetTypeArguments(Object::null_type_arguments());
25355 }
25356 ASSERT(new_length > old_length); // Unnecessary copying of array.
25357 if (!UseCardMarkingForAllocation(array_length: new_length)) {
25358 NoSafepointScope no_safepoint(thread);
25359 for (intptr_t i = 0; i < old_length; i++) {
25360 result.untag()->set_element(index: i, value: source.untag()->element(index: i), thread);
25361 }
25362 for (intptr_t i = old_length; i < new_length; i++) {
25363 ASSERT(result.untag()->element(i) == Object::null());
25364 }
25365 } else {
25366 for (intptr_t i = 0; i < old_length; i++) {
25367 result.untag()->set_element(index: i, value: source.untag()->element(index: i), thread);
25368 if (((i + 1) % KB) == 0) {
25369 thread->CheckForSafepoint();
25370 }
25371 }
25372 for (intptr_t i = old_length; i < new_length; i++) {
25373 result.untag()->set_element(index: i, value: Object::null(), thread);
25374 if (((i + 1) % KB) == 0) {
25375 thread->CheckForSafepoint();
25376 }
25377 }
25378 }
25379 return result.ptr();
25380}
25381
25382void Array::Truncate(intptr_t new_len) const {
25383 if (IsNull()) {
25384 return;
25385 }
25386 Thread* thread = Thread::Current();
25387 Zone* zone = thread->zone();
25388 const Array& array = Array::Handle(zone, ptr: this->ptr());
25389
25390 intptr_t old_len = array.Length();
25391 ASSERT(new_len <= old_len);
25392 if (old_len == new_len) {
25393 return;
25394 }
25395 intptr_t old_size = Array::InstanceSize(len: old_len);
25396 intptr_t new_size = Array::InstanceSize(len: new_len);
25397
25398 NoSafepointScope no_safepoint;
25399
25400 // If there is any left over space fill it with either an Array object or
25401 // just a plain object (depending on the amount of left over space) so
25402 // that it can be traversed over successfully during garbage collection.
25403 Object::MakeUnusedSpaceTraversable(obj: array, original_size: old_size, used_size: new_size);
25404
25405 // Update the size in the header field and length of the array object.
25406 // These release operations are balanced by acquire operations in the
25407 // concurrent sweeper.
25408 uword old_tags = array.untag()->tags_;
25409 uword new_tags;
25410 ASSERT(kArrayCid == UntaggedObject::ClassIdTag::decode(old_tags));
25411 do {
25412 new_tags = UntaggedObject::SizeTag::update(size: new_size, tag: old_tags);
25413 } while (!array.untag()->tags_.compare_exchange_weak(
25414 old_tags, new_tags, order: std::memory_order_release));
25415
25416 // Between the CAS of the header above and the SetLength below, the array is
25417 // temporarily in an inconsistent state. The header is considered the
25418 // overriding source of object size by UntaggedObject::HeapSize, but the
25419 // ASSERTs in UntaggedObject::HeapSizeFromClass must handle this special case.
25420 array.SetLengthRelease(new_len);
25421}
25422
25423ArrayPtr Array::MakeFixedLength(const GrowableObjectArray& growable_array,
25424 bool unique) {
25425 ASSERT(!growable_array.IsNull());
25426 Thread* thread = Thread::Current();
25427 Zone* zone = thread->zone();
25428 intptr_t used_len = growable_array.Length();
25429 // Get the type arguments and prepare to copy them.
25430 const TypeArguments& type_arguments =
25431 TypeArguments::Handle(ptr: growable_array.GetTypeArguments());
25432 if (used_len == 0) {
25433 if (type_arguments.IsNull() && !unique) {
25434 // This is a raw List (as in no type arguments), so we can return the
25435 // simple empty array.
25436 return Object::empty_array().ptr();
25437 }
25438
25439 // The backing array may be a shared instance, or may not have correct
25440 // type parameters. Create a new empty array.
25441 Heap::Space space = thread->IsDartMutatorThread() ? Heap::kNew : Heap::kOld;
25442 Array& array = Array::Handle(zone, ptr: Array::New(len: 0, space));
25443 array.SetTypeArguments(type_arguments);
25444 return array.ptr();
25445 }
25446 const Array& array = Array::Handle(zone, ptr: growable_array.data());
25447 ASSERT(array.IsArray());
25448 array.SetTypeArguments(type_arguments);
25449
25450 // Null the GrowableObjectArray, we are removing its backing array.
25451 growable_array.SetLength(0);
25452 growable_array.SetData(Object::empty_array());
25453
25454 // Truncate the old backing array and return it.
25455 array.Truncate(new_len: used_len);
25456 return array.ptr();
25457}
25458
25459void Array::CanonicalizeFieldsLocked(Thread* thread) const {
25460 intptr_t len = Length();
25461 if (len > 0) {
25462 Zone* zone = thread->zone();
25463 Instance& obj = Instance::Handle(zone);
25464 for (intptr_t i = 0; i < len; i++) {
25465 obj ^= At(index: i);
25466 obj = obj.CanonicalizeLocked(thread);
25467 this->SetAt(index: i, value: obj);
25468 }
25469 }
25470}
25471
25472ImmutableArrayPtr ImmutableArray::New(intptr_t len, Heap::Space space) {
25473 ASSERT(IsolateGroup::Current()->object_store()->immutable_array_class() !=
25474 Class::null());
25475 return static_cast<ImmutableArrayPtr>(Array::New(class_id: kClassId, len, space));
25476}
25477
25478void GrowableObjectArray::Add(const Object& value, Heap::Space space) const {
25479 ASSERT(!IsNull());
25480 if (Length() == Capacity()) {
25481 // Grow from 0 to 3, and then double + 1.
25482 intptr_t new_capacity = (Capacity() * 2) | 3;
25483 if (new_capacity <= Capacity()) {
25484 Exceptions::ThrowOOM();
25485 UNREACHABLE();
25486 }
25487 Grow(new_capacity, space);
25488 }
25489 ASSERT(Length() < Capacity());
25490 intptr_t index = Length();
25491 SetLength(index + 1);
25492 SetAt(index, value);
25493}
25494
25495void GrowableObjectArray::Grow(intptr_t new_capacity, Heap::Space space) const {
25496 ASSERT(new_capacity > Capacity());
25497 const Array& contents = Array::Handle(ptr: data());
25498 const Array& new_contents =
25499 Array::Handle(ptr: Array::Grow(source: contents, new_length: new_capacity, space));
25500 untag()->set_data(new_contents.ptr());
25501}
25502
25503ObjectPtr GrowableObjectArray::RemoveLast() const {
25504 ASSERT(!IsNull());
25505 ASSERT(Length() > 0);
25506 intptr_t index = Length() - 1;
25507 const Array& contents = Array::Handle(ptr: data());
25508 const PassiveObject& obj = PassiveObject::Handle(ptr: contents.At(index));
25509 contents.SetAt(index, value: Object::null_object());
25510 SetLength(index);
25511 return obj.ptr();
25512}
25513
25514GrowableObjectArrayPtr GrowableObjectArray::New(intptr_t capacity,
25515 Heap::Space space) {
25516 ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().ptr()
25517 : Array::New(len: capacity, space);
25518 const Array& data = Array::Handle(ptr: raw_data);
25519 return New(array: data, space);
25520}
25521
25522GrowableObjectArrayPtr GrowableObjectArray::New(const Array& array,
25523 Heap::Space space) {
25524 ASSERT(
25525 IsolateGroup::Current()->object_store()->growable_object_array_class() !=
25526 Class::null());
25527 const auto& result =
25528 GrowableObjectArray::Handle(ptr: Object::Allocate<GrowableObjectArray>(space));
25529 result.SetLength(0);
25530 result.SetData(array);
25531 return result.ptr();
25532}
25533
25534const char* GrowableObjectArray::ToCString() const {
25535 if (IsNull()) {
25536 return "_GrowableList: null";
25537 }
25538 return OS::SCreate(zone: Thread::Current()->zone(),
25539 format: "Instance(length:%" Pd ") of '_GrowableList'", Length());
25540}
25541
25542// Equivalent to Dart's operator "==" and hashCode.
25543class DefaultHashTraits {
25544 public:
25545 static const char* Name() { return "DefaultHashTraits"; }
25546 static bool ReportStats() { return false; }
25547
25548 static bool IsMatch(const Object& a, const Object& b) {
25549 if (a.IsNull() || b.IsNull()) {
25550 return (a.IsNull() && b.IsNull());
25551 } else {
25552 return Instance::Cast(obj: a).OperatorEquals(other: Instance::Cast(obj: b));
25553 }
25554 }
25555 static uword Hash(const Object& obj) {
25556 if (obj.IsNull()) {
25557 return 0;
25558 }
25559 // TODO(koda): Ensure VM classes only produce Smi hash codes, and remove
25560 // non-Smi cases once Dart-side implementation is complete.
25561 Thread* thread = Thread::Current();
25562 REUSABLE_INSTANCE_HANDLESCOPE(thread);
25563 Instance& hash_code = thread->InstanceHandle();
25564 hash_code ^= Instance::Cast(obj).HashCode();
25565 if (hash_code.IsSmi()) {
25566 // May waste some bits on 64-bit, to ensure consistency with non-Smi case.
25567 return static_cast<uword>(Smi::Cast(obj: hash_code).AsTruncatedUint32Value());
25568 } else if (hash_code.IsInteger()) {
25569 return static_cast<uword>(
25570 Integer::Cast(obj: hash_code).AsTruncatedUint32Value());
25571 } else {
25572 return 0;
25573 }
25574 }
25575};
25576
25577MapPtr Map::NewDefault(intptr_t class_id, Heap::Space space) {
25578 const Array& data = Array::Handle(ptr: Array::New(len: kInitialIndexSize, space));
25579 const TypedData& index = TypedData::Handle(
25580 ptr: TypedData::New(class_id: kTypedDataUint32ArrayCid, len: kInitialIndexSize, space));
25581 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25582 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25583 const intptr_t kInitialHashMask =
25584 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25585 return Map::New(class_id, data, index, hash_mask: kInitialHashMask, used_data: 0, deleted_keys: 0, space);
25586}
25587
25588MapPtr Map::New(intptr_t class_id,
25589 const Array& data,
25590 const TypedData& index,
25591 intptr_t hash_mask,
25592 intptr_t used_data,
25593 intptr_t deleted_keys,
25594 Heap::Space space) {
25595 ASSERT(class_id == kMapCid || class_id == kConstMapCid);
25596 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25597 Class::null());
25598 Map& result = Map::Handle(ptr: Map::NewUninitialized(class_id, space));
25599 result.set_data(data);
25600 result.set_index(index);
25601 result.set_hash_mask(hash_mask);
25602 result.set_used_data(used_data);
25603 result.set_deleted_keys(deleted_keys);
25604 return result.ptr();
25605}
25606
25607MapPtr Map::NewUninitialized(intptr_t class_id, Heap::Space space) {
25608 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25609 Class::null());
25610 return Object::AllocateVariant<Map>(class_id, space);
25611}
25612
25613const char* Map::ToCString() const {
25614 Zone* zone = Thread::Current()->zone();
25615 return zone->PrintToString(
25616 format: "%s len:%" Pd, GetClassId() == kConstMapCid ? "_ConstMap" : "_Map",
25617 Length());
25618}
25619
25620void LinkedHashBase::ComputeAndSetHashMask() const {
25621 ASSERT(IsImmutable());
25622 ASSERT_EQUAL(Smi::Value(deleted_keys()), 0);
25623 Thread* const thread = Thread::Current();
25624 Zone* const zone = thread->zone();
25625
25626 const auto& data_array = Array::Handle(zone, ptr: data());
25627 const intptr_t data_length = Utils::RoundUpToPowerOfTwo(x: data_array.Length());
25628 const intptr_t index_size_mult = IsMap() ? 1 : 2;
25629 const intptr_t index_size = Utils::Maximum(x: LinkedHashBase::kInitialIndexSize,
25630 y: data_length * index_size_mult);
25631 ASSERT(Utils::IsPowerOfTwo(index_size));
25632
25633 const intptr_t hash_mask = IndexSizeToHashMask(index_size);
25634 set_hash_mask(hash_mask);
25635}
25636
25637bool LinkedHashBase::CanonicalizeEquals(const Instance& other) const {
25638 ASSERT(IsImmutable());
25639
25640 if (this->ptr() == other.ptr()) {
25641 // Both handles point to the same raw instance.
25642 return true;
25643 }
25644 if (other.IsNull()) {
25645 return false;
25646 }
25647 if (GetClassId() != other.GetClassId()) {
25648 return false;
25649 }
25650
25651 Zone* zone = Thread::Current()->zone();
25652
25653 const LinkedHashBase& other_map = LinkedHashBase::Cast(obj: other);
25654
25655 if (!Smi::Handle(zone, ptr: used_data())
25656 .Equals(other: Smi::Handle(zone, ptr: other_map.used_data()))) {
25657 return false;
25658 }
25659
25660 // Immutable maps and sets do not have deleted keys.
25661 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25662
25663 if (!Array::Handle(zone, ptr: data())
25664 .CanonicalizeEquals(other: Array::Handle(zone, ptr: other_map.data()))) {
25665 return false;
25666 }
25667
25668 if (GetTypeArguments() == other.GetTypeArguments()) {
25669 return true;
25670 }
25671 const TypeArguments& type_args =
25672 TypeArguments::Handle(zone, ptr: GetTypeArguments());
25673 const TypeArguments& other_type_args =
25674 TypeArguments::Handle(zone, ptr: other.GetTypeArguments());
25675 return type_args.Equals(other: other_type_args);
25676}
25677
25678uint32_t LinkedHashBase::CanonicalizeHash() const {
25679 ASSERT(IsImmutable());
25680
25681 Thread* thread = Thread::Current();
25682 uint32_t hash = thread->heap()->GetCanonicalHash(raw_obj: ptr());
25683 if (hash != 0) {
25684 return hash;
25685 }
25686
25687 // Immutable maps and sets do not have deleted keys.
25688 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25689
25690 Zone* zone = thread->zone();
25691 auto& member = Instance::Handle(zone, ptr: GetTypeArguments());
25692 hash = member.CanonicalizeHash();
25693 member = data();
25694 hash = CombineHashes(hash, other_hash: member.CanonicalizeHash());
25695 member = used_data();
25696 hash = CombineHashes(hash, other_hash: member.CanonicalizeHash());
25697 hash = FinalizeHash(hash, hashbits: kHashBits);
25698 thread->heap()->SetCanonicalHash(raw_obj: ptr(), hash);
25699 return hash;
25700}
25701
25702void LinkedHashBase::CanonicalizeFieldsLocked(Thread* thread) const {
25703 ASSERT(IsImmutable());
25704
25705 Zone* zone = thread->zone();
25706
25707 TypeArguments& type_args = TypeArguments::Handle(zone, ptr: GetTypeArguments());
25708 if (!type_args.IsNull()) {
25709 type_args = type_args.Canonicalize(thread);
25710 SetTypeArguments(type_args);
25711 }
25712
25713 auto& data_array = Array::Handle(zone, ptr: data());
25714 data_array.MakeImmutable();
25715 data_array ^= data_array.CanonicalizeLocked(thread);
25716 set_data(data_array);
25717
25718 // The index should not be set yet. It is populated lazily on first read.
25719 const auto& index_td = TypedData::Handle(zone, ptr: index());
25720 ASSERT(index_td.IsNull());
25721}
25722
25723ConstMapPtr ConstMap::NewDefault(Heap::Space space) {
25724 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25725 Class::null());
25726 return static_cast<ConstMapPtr>(Map::NewDefault(class_id: kClassId, space));
25727}
25728
25729ConstMapPtr ConstMap::NewUninitialized(Heap::Space space) {
25730 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25731 Class::null());
25732 return static_cast<ConstMapPtr>(Map::NewUninitialized(class_id: kClassId, space));
25733}
25734
25735SetPtr Set::New(intptr_t class_id,
25736 const Array& data,
25737 const TypedData& index,
25738 intptr_t hash_mask,
25739 intptr_t used_data,
25740 intptr_t deleted_keys,
25741 Heap::Space space) {
25742 ASSERT(class_id == kSetCid || class_id == kConstSetCid);
25743 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25744 Class::null());
25745 Set& result = Set::Handle(ptr: Set::NewUninitialized(class_id, space));
25746 result.set_data(data);
25747 result.set_index(index);
25748 result.set_hash_mask(hash_mask);
25749 result.set_used_data(used_data);
25750 result.set_deleted_keys(deleted_keys);
25751 return result.ptr();
25752}
25753
25754SetPtr Set::NewDefault(intptr_t class_id, Heap::Space space) {
25755 const Array& data = Array::Handle(ptr: Array::New(len: kInitialIndexSize, space));
25756 const TypedData& index = TypedData::Handle(
25757 ptr: TypedData::New(class_id: kTypedDataUint32ArrayCid, len: kInitialIndexSize, space));
25758 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25759 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25760 const intptr_t kInitialHashMask =
25761 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25762 return Set::New(class_id, data, index, hash_mask: kInitialHashMask, used_data: 0, deleted_keys: 0, space);
25763}
25764
25765SetPtr Set::NewUninitialized(intptr_t class_id, Heap::Space space) {
25766 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25767 Class::null());
25768 return Object::AllocateVariant<Set>(class_id, space);
25769}
25770
25771ConstSetPtr ConstSet::NewDefault(Heap::Space space) {
25772 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25773 Class::null());
25774 return static_cast<ConstSetPtr>(Set::NewDefault(class_id: kClassId, space));
25775}
25776
25777ConstSetPtr ConstSet::NewUninitialized(Heap::Space space) {
25778 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25779 Class::null());
25780 return static_cast<ConstSetPtr>(Set::NewUninitialized(class_id: kClassId, space));
25781}
25782
25783const char* Set::ToCString() const {
25784 Zone* zone = Thread::Current()->zone();
25785 return zone->PrintToString(
25786 format: "%s len:%" Pd, GetClassId() == kConstSetCid ? "_ConstSet" : "_Set",
25787 Length());
25788}
25789
25790const char* FutureOr::ToCString() const {
25791 // FutureOr is an abstract class.
25792 UNREACHABLE();
25793}
25794
25795Float32x4Ptr Float32x4::New(float v0,
25796 float v1,
25797 float v2,
25798 float v3,
25799 Heap::Space space) {
25800 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25801 Class::null());
25802 const auto& result = Float32x4::Handle(ptr: Object::Allocate<Float32x4>(space));
25803 result.set_x(v0);
25804 result.set_y(v1);
25805 result.set_z(v2);
25806 result.set_w(v3);
25807 return result.ptr();
25808}
25809
25810Float32x4Ptr Float32x4::New(simd128_value_t value, Heap::Space space) {
25811 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25812 Class::null());
25813 const auto& result = Float32x4::Handle(ptr: Object::Allocate<Float32x4>(space));
25814 result.set_value(value);
25815 return result.ptr();
25816}
25817
25818simd128_value_t Float32x4::value() const {
25819 return LoadUnaligned(
25820 ptr: reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25821}
25822
25823void Float32x4::set_value(simd128_value_t value) const {
25824 StoreUnaligned(ptr: reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25825 value);
25826}
25827
25828void Float32x4::set_x(float value) const {
25829 StoreNonPointer(addr: &untag()->value_[0], value);
25830}
25831
25832void Float32x4::set_y(float value) const {
25833 StoreNonPointer(addr: &untag()->value_[1], value);
25834}
25835
25836void Float32x4::set_z(float value) const {
25837 StoreNonPointer(addr: &untag()->value_[2], value);
25838}
25839
25840void Float32x4::set_w(float value) const {
25841 StoreNonPointer(addr: &untag()->value_[3], value);
25842}
25843
25844float Float32x4::x() const {
25845 return untag()->value_[0];
25846}
25847
25848float Float32x4::y() const {
25849 return untag()->value_[1];
25850}
25851
25852float Float32x4::z() const {
25853 return untag()->value_[2];
25854}
25855
25856float Float32x4::w() const {
25857 return untag()->value_[3];
25858}
25859
25860const char* Float32x4::ToCString() const {
25861 float _x = x();
25862 float _y = y();
25863 float _z = z();
25864 float _w = w();
25865 return OS::SCreate(zone: Thread::Current()->zone(), format: "[%f, %f, %f, %f]", _x, _y, _z,
25866 _w);
25867}
25868
25869Int32x4Ptr Int32x4::New(int32_t v0,
25870 int32_t v1,
25871 int32_t v2,
25872 int32_t v3,
25873 Heap::Space space) {
25874 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25875 Class::null());
25876 const auto& result = Int32x4::Handle(ptr: Object::Allocate<Int32x4>(space));
25877 result.set_x(v0);
25878 result.set_y(v1);
25879 result.set_z(v2);
25880 result.set_w(v3);
25881 return result.ptr();
25882}
25883
25884Int32x4Ptr Int32x4::New(simd128_value_t value, Heap::Space space) {
25885 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25886 Class::null());
25887 const auto& result = Int32x4::Handle(ptr: Object::Allocate<Int32x4>(space));
25888 result.set_value(value);
25889 return result.ptr();
25890}
25891
25892void Int32x4::set_x(int32_t value) const {
25893 StoreNonPointer(addr: &untag()->value_[0], value);
25894}
25895
25896void Int32x4::set_y(int32_t value) const {
25897 StoreNonPointer(addr: &untag()->value_[1], value);
25898}
25899
25900void Int32x4::set_z(int32_t value) const {
25901 StoreNonPointer(addr: &untag()->value_[2], value);
25902}
25903
25904void Int32x4::set_w(int32_t value) const {
25905 StoreNonPointer(addr: &untag()->value_[3], value);
25906}
25907
25908int32_t Int32x4::x() const {
25909 return untag()->value_[0];
25910}
25911
25912int32_t Int32x4::y() const {
25913 return untag()->value_[1];
25914}
25915
25916int32_t Int32x4::z() const {
25917 return untag()->value_[2];
25918}
25919
25920int32_t Int32x4::w() const {
25921 return untag()->value_[3];
25922}
25923
25924simd128_value_t Int32x4::value() const {
25925 return LoadUnaligned(
25926 ptr: reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25927}
25928
25929void Int32x4::set_value(simd128_value_t value) const {
25930 StoreUnaligned(ptr: reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25931 value);
25932}
25933
25934const char* Int32x4::ToCString() const {
25935 int32_t _x = x();
25936 int32_t _y = y();
25937 int32_t _z = z();
25938 int32_t _w = w();
25939 return OS::SCreate(zone: Thread::Current()->zone(), format: "[%08x, %08x, %08x, %08x]", _x,
25940 _y, _z, _w);
25941}
25942
25943Float64x2Ptr Float64x2::New(double value0, double value1, Heap::Space space) {
25944 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25945 Class::null());
25946 const auto& result = Float64x2::Handle(ptr: Object::Allocate<Float64x2>(space));
25947 result.set_x(value0);
25948 result.set_y(value1);
25949 return result.ptr();
25950}
25951
25952Float64x2Ptr Float64x2::New(simd128_value_t value, Heap::Space space) {
25953 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25954 Class::null());
25955 const auto& result = Float64x2::Handle(ptr: Object::Allocate<Float64x2>(space));
25956 result.set_value(value);
25957 return result.ptr();
25958}
25959
25960double Float64x2::x() const {
25961 return untag()->value_[0];
25962}
25963
25964double Float64x2::y() const {
25965 return untag()->value_[1];
25966}
25967
25968void Float64x2::set_x(double x) const {
25969 StoreNonPointer(addr: &untag()->value_[0], value: x);
25970}
25971
25972void Float64x2::set_y(double y) const {
25973 StoreNonPointer(addr: &untag()->value_[1], value: y);
25974}
25975
25976simd128_value_t Float64x2::value() const {
25977 return simd128_value_t().readFrom(v: &untag()->value_[0]);
25978}
25979
25980void Float64x2::set_value(simd128_value_t value) const {
25981 StoreSimd128(addr: &untag()->value_[0], value);
25982}
25983
25984const char* Float64x2::ToCString() const {
25985 double _x = x();
25986 double _y = y();
25987 return OS::SCreate(zone: Thread::Current()->zone(), format: "[%f, %f]", _x, _y);
25988}
25989
25990const intptr_t
25991 TypedDataBase::element_size_table[TypedDataBase::kNumElementSizes] = {
25992 1, // kTypedDataInt8ArrayCid.
25993 1, // kTypedDataUint8ArrayCid.
25994 1, // kTypedDataUint8ClampedArrayCid.
25995 2, // kTypedDataInt16ArrayCid.
25996 2, // kTypedDataUint16ArrayCid.
25997 4, // kTypedDataInt32ArrayCid.
25998 4, // kTypedDataUint32ArrayCid.
25999 8, // kTypedDataInt64ArrayCid.
26000 8, // kTypedDataUint64ArrayCid.
26001 4, // kTypedDataFloat32ArrayCid.
26002 8, // kTypedDataFloat64ArrayCid.
26003 16, // kTypedDataFloat32x4ArrayCid.
26004 16, // kTypedDataInt32x4ArrayCid.
26005 16, // kTypedDataFloat64x2ArrayCid,
26006};
26007
26008bool TypedData::CanonicalizeEquals(const Instance& other) const {
26009 if (this->ptr() == other.ptr()) {
26010 // Both handles point to the same raw instance.
26011 return true;
26012 }
26013
26014 if (!other.IsTypedData() || other.IsNull()) {
26015 return false;
26016 }
26017
26018 const TypedData& other_typed_data = TypedData::Cast(obj: other);
26019
26020 if (this->ElementType() != other_typed_data.ElementType()) {
26021 return false;
26022 }
26023
26024 const intptr_t len = this->LengthInBytes();
26025 if (len != other_typed_data.LengthInBytes()) {
26026 return false;
26027 }
26028 NoSafepointScope no_safepoint;
26029 return (len == 0) ||
26030 (memcmp(s1: DataAddr(byte_offset: 0), s2: other_typed_data.DataAddr(byte_offset: 0), n: len) == 0);
26031}
26032
26033uint32_t TypedData::CanonicalizeHash() const {
26034 const intptr_t len = this->LengthInBytes();
26035 if (len == 0) {
26036 return 1;
26037 }
26038 uint32_t hash = len;
26039 for (intptr_t i = 0; i < len; i++) {
26040 hash = CombineHashes(hash: len, other_hash: GetUint8(byte_offset: i));
26041 }
26042 return FinalizeHash(hash, hashbits: kHashBits);
26043}
26044
26045TypedDataPtr TypedData::New(intptr_t class_id,
26046 intptr_t len,
26047 Heap::Space space) {
26048 if (len < 0 || len > TypedData::MaxElements(class_id)) {
26049 FATAL("Fatal error in TypedData::New: invalid len %" Pd "\n", len);
26050 }
26051 auto raw = Object::AllocateVariant<TypedData>(
26052 class_id, space, elements: len * ElementSizeInBytes(cid: class_id));
26053 NoSafepointScope no_safepoint;
26054 raw->untag()->set_length(Smi::New(value: len));
26055 raw->untag()->RecomputeDataField();
26056 return raw;
26057}
26058
26059TypedDataPtr TypedData::Grow(const TypedData& current,
26060 intptr_t len,
26061 Heap::Space space) {
26062 ASSERT(len > current.Length());
26063 const auto& new_td =
26064 TypedData::Handle(ptr: TypedData::New(class_id: current.GetClassId(), len, space));
26065 {
26066 NoSafepointScope no_safepoint_scope;
26067 memcpy(dest: new_td.DataAddr(byte_offset: 0), src: current.DataAddr(byte_offset: 0), n: current.LengthInBytes());
26068 }
26069 return new_td.ptr();
26070}
26071
26072const char* TypedData::ToCString() const {
26073 const Class& cls = Class::Handle(ptr: clazz());
26074 return cls.ScrubbedNameCString();
26075}
26076
26077FinalizablePersistentHandle* ExternalTypedData::AddFinalizer(
26078 void* peer,
26079 Dart_HandleFinalizer callback,
26080 intptr_t external_size) const {
26081 return dart::AddFinalizer(referent: *this, peer, callback, external_size);
26082}
26083
26084ExternalTypedDataPtr ExternalTypedData::New(
26085 intptr_t class_id,
26086 uint8_t* data,
26087 intptr_t len,
26088 Heap::Space space,
26089 bool perform_eager_msan_initialization_check) {
26090 if (len < 0 || len > ExternalTypedData::MaxElements(class_id)) {
26091 FATAL("Fatal error in ExternalTypedData::New: invalid len %" Pd "\n", len);
26092 }
26093
26094 if (perform_eager_msan_initialization_check) {
26095 // Once the TypedData is created, Dart might read this memory. Check for
26096 // initialization at construction to make it easier to track the source.
26097 MSAN_CHECK_INITIALIZED(data, len);
26098 }
26099
26100 const auto& result = ExternalTypedData::Handle(
26101 ptr: Object::AllocateVariant<ExternalTypedData>(class_id, space));
26102 result.SetLength(len);
26103 result.SetData(data);
26104 return result.ptr();
26105}
26106
26107ExternalTypedDataPtr ExternalTypedData::NewFinalizeWithFree(uint8_t* data,
26108 intptr_t len) {
26109 ExternalTypedData& result = ExternalTypedData::Handle(ptr: ExternalTypedData::New(
26110 class_id: kExternalTypedDataUint8ArrayCid, data, len, space: Heap::kOld));
26111 result.AddFinalizer(
26112 peer: data, callback: [](void* isolate_callback_data, void* data) { free(ptr: data); }, external_size: len);
26113 return result.ptr();
26114}
26115
26116TypedDataViewPtr TypedDataView::New(intptr_t class_id, Heap::Space space) {
26117 return Object::AllocateVariant<TypedDataView>(class_id, space);
26118}
26119
26120TypedDataViewPtr TypedDataView::New(intptr_t class_id,
26121 const TypedDataBase& typed_data,
26122 intptr_t offset_in_bytes,
26123 intptr_t length,
26124 Heap::Space space) {
26125 auto& result = TypedDataView::Handle(ptr: TypedDataView::New(class_id, space));
26126 result.InitializeWith(typed_data, offset_in_bytes, length);
26127 return result.ptr();
26128}
26129
26130bool TypedDataBase::IsExternalOrExternalView() const {
26131 if (IsExternalTypedData()) return true;
26132 if (IsTypedDataView()) {
26133 const auto& backing =
26134 TypedDataBase::Handle(ptr: TypedDataView::Cast(obj: *this).typed_data());
26135 return backing.IsExternalTypedData();
26136 }
26137 return false;
26138}
26139
26140TypedDataViewPtr TypedDataBase::ViewFromTo(intptr_t start,
26141 intptr_t end,
26142 Heap::Space space) const {
26143 const intptr_t len = end - start;
26144 ASSERT(0 <= len);
26145 ASSERT(start < Length());
26146 ASSERT((start + len) <= Length());
26147
26148 const intptr_t cid = GetClassId();
26149
26150 if (IsTypedDataView()) {
26151 const auto& view = TypedDataView::Cast(obj: *this);
26152 const auto& td = TypedDataBase::Handle(ptr: view.typed_data());
26153 const intptr_t view_offset = Smi::Value(raw_smi: view.offset_in_bytes());
26154 ASSERT(IsTypedDataViewClassId(cid));
26155 return TypedDataView::New(class_id: cid, typed_data: ExternalTypedData::Cast(obj: td),
26156 offset_in_bytes: view_offset + start, length: len, space: Heap::kOld);
26157 } else if (IsExternalTypedData()) {
26158 ASSERT(IsExternalTypedDataClassId(cid));
26159 ASSERT(IsTypedDataViewClassId(cid - 1));
26160 return TypedDataView::New(class_id: cid - 1, typed_data: *this, offset_in_bytes: start, length: len, space: Heap::kOld);
26161 }
26162 RELEASE_ASSERT(IsTypedData());
26163 ASSERT(IsExternalTypedDataClassId(cid));
26164 ASSERT(IsTypedDataViewClassId(cid + 1));
26165 return TypedDataView::New(class_id: cid + 1, typed_data: *this, offset_in_bytes: start, length: len, space: Heap::kOld);
26166}
26167
26168const char* TypedDataBase::ToCString() const {
26169 // There are no instances of UntaggedTypedDataBase.
26170 UNREACHABLE();
26171 return nullptr;
26172}
26173
26174const char* TypedDataView::ToCString() const {
26175 const Class& cls = Class::Handle(ptr: clazz());
26176 return cls.ScrubbedNameCString();
26177}
26178
26179const char* ExternalTypedData::ToCString() const {
26180 const Class& cls = Class::Handle(ptr: clazz());
26181 return cls.ScrubbedNameCString();
26182}
26183
26184PointerPtr Pointer::New(uword native_address, Heap::Space space) {
26185 Thread* thread = Thread::Current();
26186 Zone* zone = thread->zone();
26187
26188 TypeArguments& type_args = TypeArguments::Handle(
26189 zone, ptr: IsolateGroup::Current()->object_store()->type_argument_never());
26190
26191 const Class& cls =
26192 Class::Handle(ptr: IsolateGroup::Current()->class_table()->At(cid: kPointerCid));
26193 cls.EnsureIsAllocateFinalized(thread: Thread::Current());
26194
26195 const auto& result = Pointer::Handle(zone, ptr: Object::Allocate<Pointer>(space));
26196 result.SetTypeArguments(type_args);
26197 result.SetNativeAddress(native_address);
26198
26199 return result.ptr();
26200}
26201
26202const char* Pointer::ToCString() const {
26203 return OS::SCreate(zone: Thread::Current()->zone(), format: "Pointer: address=0x%" Px,
26204 NativeAddress());
26205}
26206
26207DynamicLibraryPtr DynamicLibrary::New(void* handle,
26208 bool canBeClosed,
26209 Heap::Space space) {
26210 const auto& result =
26211 DynamicLibrary::Handle(ptr: Object::Allocate<DynamicLibrary>(space));
26212 ASSERT_EQUAL(result.IsClosed(), false);
26213 result.SetHandle(handle);
26214 result.SetCanBeClosed(canBeClosed);
26215 return result.ptr();
26216}
26217
26218bool Pointer::IsPointer(const Instance& obj) {
26219 return IsFfiPointerClassId(index: obj.ptr()->GetClassId());
26220}
26221
26222bool Instance::IsPointer() const {
26223 return Pointer::IsPointer(obj: *this);
26224}
26225
26226const char* DynamicLibrary::ToCString() const {
26227 return OS::SCreate(zone: Thread::Current()->zone(), format: "DynamicLibrary: handle=0x%" Px,
26228 reinterpret_cast<uintptr_t>(GetHandle()));
26229}
26230
26231CapabilityPtr Capability::New(uint64_t id, Heap::Space space) {
26232 const auto& result = Capability::Handle(ptr: Object::Allocate<Capability>(space));
26233 result.StoreNonPointer(addr: &result.untag()->id_, value: id);
26234 return result.ptr();
26235}
26236
26237const char* Capability::ToCString() const {
26238 return "Capability";
26239}
26240
26241ReceivePortPtr ReceivePort::New(Dart_Port id,
26242 const String& debug_name,
26243 bool is_control_port,
26244 Heap::Space space) {
26245 ASSERT(id != ILLEGAL_PORT);
26246 Thread* thread = Thread::Current();
26247 Zone* zone = thread->zone();
26248 const SendPort& send_port =
26249 SendPort::Handle(zone, ptr: SendPort::New(id, origin_id: thread->isolate()->origin_id()));
26250#if !defined(PRODUCT)
26251 const StackTrace& allocation_location_ =
26252 HasStack() ? GetCurrentStackTrace(skip_frames: 0) : StackTrace::Handle();
26253#endif // !defined(PRODUCT)
26254
26255 const auto& result =
26256 ReceivePort::Handle(zone, ptr: Object::Allocate<ReceivePort>(space));
26257 result.untag()->set_send_port(send_port.ptr());
26258#if !defined(PRODUCT)
26259 result.untag()->set_debug_name(debug_name.ptr());
26260 result.untag()->set_allocation_location(allocation_location_.ptr());
26261#endif // !defined(PRODUCT)
26262 PortMap::SetPortState(
26263 id, kind: is_control_port ? PortMap::kControlPort : PortMap::kLivePort);
26264 return result.ptr();
26265}
26266
26267const char* ReceivePort::ToCString() const {
26268 return "ReceivePort";
26269}
26270
26271SendPortPtr SendPort::New(Dart_Port id, Heap::Space space) {
26272 return New(id, ILLEGAL_PORT, space);
26273}
26274
26275SendPortPtr SendPort::New(Dart_Port id,
26276 Dart_Port origin_id,
26277 Heap::Space space) {
26278 ASSERT(id != ILLEGAL_PORT);
26279 const auto& result = SendPort::Handle(ptr: Object::Allocate<SendPort>(space));
26280 result.StoreNonPointer(addr: &result.untag()->id_, value: id);
26281 result.StoreNonPointer(addr: &result.untag()->origin_id_, value: origin_id);
26282 return result.ptr();
26283}
26284
26285const char* SendPort::ToCString() const {
26286 return "SendPort";
26287}
26288
26289static void TransferableTypedDataFinalizer(void* isolate_callback_data,
26290 void* peer) {
26291 delete (reinterpret_cast<TransferableTypedDataPeer*>(peer));
26292}
26293
26294TransferableTypedDataPtr TransferableTypedData::New(uint8_t* data,
26295 intptr_t length) {
26296 auto* const peer = new TransferableTypedDataPeer(data, length);
26297
26298 Thread* thread = Thread::Current();
26299 const auto& result =
26300 TransferableTypedData::Handle(ptr: Object::Allocate<TransferableTypedData>(
26301 space: thread->heap()->SpaceForExternal(size: length)));
26302 thread->heap()->SetPeer(raw_obj: result.ptr(), peer);
26303
26304 // Set up finalizer so it frees allocated memory if handle is
26305 // garbage-collected.
26306 FinalizablePersistentHandle* finalizable_ref =
26307 FinalizablePersistentHandle::New(isolate_group: thread->isolate_group(), object: result, peer,
26308 callback: &TransferableTypedDataFinalizer, external_size: length,
26309 /*auto_delete=*/true);
26310 ASSERT(finalizable_ref != nullptr);
26311 peer->set_handle(finalizable_ref);
26312
26313 return result.ptr();
26314}
26315
26316const char* TransferableTypedData::ToCString() const {
26317 return "TransferableTypedData";
26318}
26319
26320bool Closure::CanonicalizeEquals(const Instance& other) const {
26321 if (!other.IsClosure()) return false;
26322
26323 const Closure& other_closure = Closure::Cast(obj: other);
26324 return (instantiator_type_arguments() ==
26325 other_closure.instantiator_type_arguments()) &&
26326 (function_type_arguments() ==
26327 other_closure.function_type_arguments()) &&
26328 (delayed_type_arguments() == other_closure.delayed_type_arguments()) &&
26329 (function() == other_closure.function()) &&
26330 (context() == other_closure.context());
26331}
26332
26333void Closure::CanonicalizeFieldsLocked(Thread* thread) const {
26334 TypeArguments& type_args = TypeArguments::Handle();
26335 type_args = instantiator_type_arguments();
26336 if (!type_args.IsNull()) {
26337 type_args = type_args.Canonicalize(thread);
26338 set_instantiator_type_arguments(type_args);
26339 }
26340 type_args = function_type_arguments();
26341 if (!type_args.IsNull()) {
26342 type_args = type_args.Canonicalize(thread);
26343 set_function_type_arguments(type_args);
26344 }
26345 type_args = delayed_type_arguments();
26346 if (!type_args.IsNull()) {
26347 type_args = type_args.Canonicalize(thread);
26348 set_delayed_type_arguments(type_args);
26349 }
26350 // Ignore function, context, hash.
26351}
26352
26353const char* Closure::ToCString() const {
26354 auto const thread = Thread::Current();
26355 auto const zone = thread->zone();
26356 ZoneTextBuffer buffer(zone);
26357 buffer.AddString(s: "Closure: ");
26358 const Function& fun = Function::Handle(zone, ptr: function());
26359 const FunctionType& sig =
26360 FunctionType::Handle(zone, ptr: GetInstantiatedSignature(zone));
26361 sig.Print(name_visibility: kUserVisibleName, printer: &buffer);
26362 if (fun.IsImplicitClosureFunction()) {
26363 buffer.Printf(format: " from %s", fun.ToCString());
26364 }
26365 return buffer.buffer();
26366}
26367
26368uword Closure::ComputeHash() const {
26369 Thread* thread = Thread::Current();
26370 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
26371 Zone* zone = thread->zone();
26372 const Function& func = Function::Handle(zone, ptr: function());
26373 uint32_t result = 0;
26374 if (func.IsImplicitClosureFunction() || func.IsGeneric()) {
26375 // Combine function's hash code, delayed type arguments hash code
26376 // (if generic), and identityHashCode of cached receiver (if implicit
26377 // instance closure).
26378 result = static_cast<uint32_t>(func.Hash());
26379 if (func.IsGeneric()) {
26380 const TypeArguments& delayed_type_args =
26381 TypeArguments::Handle(zone, ptr: delayed_type_arguments());
26382 result = CombineHashes(hash: result, other_hash: delayed_type_args.Hash());
26383 }
26384 if (func.IsImplicitInstanceClosureFunction()) {
26385 const Context& context = Context::Handle(zone, ptr: this->context());
26386 const Instance& receiver =
26387 Instance::Handle(zone, ptr: Instance::RawCast(raw: context.At(context_index: 0)));
26388 const Integer& receiverHash =
26389 Integer::Handle(zone, ptr: receiver.IdentityHashCode(thread));
26390 result = CombineHashes(hash: result, other_hash: receiverHash.AsTruncatedUint32Value());
26391 }
26392 } else {
26393 // Non-implicit closures of non-generic functions are unique,
26394 // so identityHashCode of closure object is good enough.
26395 const Integer& identityHash =
26396 Integer::Handle(zone, ptr: this->IdentityHashCode(thread));
26397 result = identityHash.AsTruncatedUint32Value();
26398 }
26399 return FinalizeHash(hash: result, hashbits: String::kHashBits);
26400}
26401
26402ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
26403 const TypeArguments& function_type_arguments,
26404 const Function& function,
26405 const Context& context,
26406 Heap::Space space) {
26407 // We store null delayed type arguments, not empty ones, in closures with
26408 // non-generic functions a) to make method extraction slightly faster and
26409 // b) to make the Closure::IsGeneric check fast.
26410 // Keep in sync with StubCodeCompiler::GenerateBuildMethodExtractorStub.
26411 return Closure::New(instantiator_type_arguments, function_type_arguments,
26412 delayed_type_arguments: function.IsGeneric() ? Object::empty_type_arguments()
26413 : Object::null_type_arguments(),
26414 function, context, space);
26415}
26416
26417ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
26418 const TypeArguments& function_type_arguments,
26419 const TypeArguments& delayed_type_arguments,
26420 const Function& function,
26421 const Context& context,
26422 Heap::Space space) {
26423 ASSERT(instantiator_type_arguments.IsCanonical());
26424 ASSERT(function_type_arguments.IsCanonical());
26425 ASSERT(delayed_type_arguments.IsCanonical());
26426 ASSERT(FunctionType::Handle(function.signature()).IsCanonical());
26427 const auto& result = Closure::Handle(ptr: Object::Allocate<Closure>(space));
26428 result.untag()->set_instantiator_type_arguments(
26429 instantiator_type_arguments.ptr());
26430 result.untag()->set_function_type_arguments(function_type_arguments.ptr());
26431 result.untag()->set_delayed_type_arguments(delayed_type_arguments.ptr());
26432 result.untag()->set_function(function.ptr());
26433 result.untag()->set_context(context.ptr());
26434#if defined(DART_PRECOMPILED_RUNTIME)
26435 result.set_entry_point(function.entry_point());
26436#endif
26437 return result.ptr();
26438}
26439
26440FunctionTypePtr Closure::GetInstantiatedSignature(Zone* zone) const {
26441 const Function& fun = Function::Handle(zone, ptr: function());
26442 FunctionType& sig = FunctionType::Handle(zone, ptr: fun.signature());
26443 TypeArguments& fn_type_args =
26444 TypeArguments::Handle(zone, ptr: function_type_arguments());
26445 const TypeArguments& delayed_type_args =
26446 TypeArguments::Handle(zone, ptr: delayed_type_arguments());
26447 const TypeArguments& inst_type_args =
26448 TypeArguments::Handle(zone, ptr: instantiator_type_arguments());
26449
26450 // We detect the case of a partial tearoff type application and substitute the
26451 // type arguments for the type parameters of the function.
26452 intptr_t num_free_params;
26453 if (!IsGeneric() && fun.IsGeneric()) {
26454 num_free_params = kCurrentAndEnclosingFree;
26455 fn_type_args = delayed_type_args.Prepend(
26456 zone, other: fn_type_args, other_length: sig.NumParentTypeArguments(),
26457 total_length: sig.NumTypeParameters() + sig.NumParentTypeArguments());
26458 } else {
26459 num_free_params = kAllFree;
26460 }
26461 if (num_free_params == kCurrentAndEnclosingFree || !sig.IsInstantiated()) {
26462 sig ^= sig.InstantiateFrom(instantiator_type_arguments: inst_type_args, function_type_arguments: fn_type_args, num_free_fun_type_params: num_free_params,
26463 space: Heap::kOld);
26464 }
26465 return sig.ptr();
26466}
26467
26468bool StackTrace::skip_sync_start_in_parent_stack() const {
26469 return untag()->skip_sync_start_in_parent_stack;
26470}
26471
26472void StackTrace::set_skip_sync_start_in_parent_stack(bool value) const {
26473 StoreNonPointer(addr: &untag()->skip_sync_start_in_parent_stack, value);
26474}
26475
26476intptr_t StackTrace::Length() const {
26477 const Array& code_array = Array::Handle(ptr: untag()->code_array());
26478 return code_array.Length();
26479}
26480
26481ObjectPtr StackTrace::CodeAtFrame(intptr_t frame_index) const {
26482 const Array& code_array = Array::Handle(ptr: untag()->code_array());
26483 return code_array.At(index: frame_index);
26484}
26485
26486void StackTrace::SetCodeAtFrame(intptr_t frame_index,
26487 const Object& code) const {
26488 const Array& code_array = Array::Handle(ptr: untag()->code_array());
26489 code_array.SetAt(index: frame_index, value: code);
26490}
26491
26492uword StackTrace::PcOffsetAtFrame(intptr_t frame_index) const {
26493 const TypedData& pc_offset_array =
26494 TypedData::Handle(ptr: untag()->pc_offset_array());
26495 return pc_offset_array.GetUintPtr(byte_offset: frame_index * kWordSize);
26496}
26497
26498void StackTrace::SetPcOffsetAtFrame(intptr_t frame_index,
26499 uword pc_offset) const {
26500 const TypedData& pc_offset_array =
26501 TypedData::Handle(ptr: untag()->pc_offset_array());
26502 pc_offset_array.SetUintPtr(byte_offset: frame_index * kWordSize, value: pc_offset);
26503}
26504
26505void StackTrace::set_async_link(const StackTrace& async_link) const {
26506 untag()->set_async_link(async_link.ptr());
26507}
26508
26509void StackTrace::set_code_array(const Array& code_array) const {
26510 untag()->set_code_array(code_array.ptr());
26511}
26512
26513void StackTrace::set_pc_offset_array(const TypedData& pc_offset_array) const {
26514 untag()->set_pc_offset_array(pc_offset_array.ptr());
26515}
26516
26517void StackTrace::set_expand_inlined(bool value) const {
26518 StoreNonPointer(addr: &untag()->expand_inlined_, value);
26519}
26520
26521bool StackTrace::expand_inlined() const {
26522 return untag()->expand_inlined_;
26523}
26524
26525StackTracePtr StackTrace::New(const Array& code_array,
26526 const TypedData& pc_offset_array,
26527 Heap::Space space) {
26528 const auto& result = StackTrace::Handle(ptr: Object::Allocate<StackTrace>(space));
26529 result.set_code_array(code_array);
26530 result.set_pc_offset_array(pc_offset_array);
26531 result.set_expand_inlined(true); // default.
26532 ASSERT_EQUAL(result.skip_sync_start_in_parent_stack(), false);
26533 return result.ptr();
26534}
26535
26536StackTracePtr StackTrace::New(const Array& code_array,
26537 const TypedData& pc_offset_array,
26538 const StackTrace& async_link,
26539 bool skip_sync_start_in_parent_stack,
26540 Heap::Space space) {
26541 const auto& result = StackTrace::Handle(ptr: Object::Allocate<StackTrace>(space));
26542 result.set_async_link(async_link);
26543 result.set_code_array(code_array);
26544 result.set_pc_offset_array(pc_offset_array);
26545 result.set_expand_inlined(true); // default.
26546 result.set_skip_sync_start_in_parent_stack(skip_sync_start_in_parent_stack);
26547 return result.ptr();
26548}
26549
26550#if defined(DART_PRECOMPILED_RUNTIME)
26551// Prints the best representation(s) for the call address.
26552static void PrintNonSymbolicStackFrameBody(BaseTextBuffer* buffer,
26553 uword call_addr,
26554 uword isolate_instructions,
26555 uword vm_instructions) {
26556 const Image vm_image(reinterpret_cast<const void*>(vm_instructions));
26557 const Image isolate_image(
26558 reinterpret_cast<const void*>(isolate_instructions));
26559
26560 if (isolate_image.contains(call_addr)) {
26561 auto const symbol_name = kIsolateSnapshotInstructionsAsmSymbol;
26562 auto const offset = call_addr - isolate_instructions;
26563 // Only print the relocated address of the call when we know the saved
26564 // debugging information (if any) will have the same relocated address.
26565 if (isolate_image.compiled_to_elf()) {
26566 const uword relocated_section_start =
26567 isolate_image.instructions_relocated_address();
26568 buffer->Printf(" virt %" Pp "", relocated_section_start + offset);
26569 }
26570 buffer->Printf(" %s+0x%" Px "", symbol_name, offset);
26571 } else if (vm_image.contains(call_addr)) {
26572 auto const offset = call_addr - vm_instructions;
26573 // We currently don't print 'virt' entries for vm addresses, even if
26574 // they were compiled to ELF, as we should never encounter these in
26575 // non-symbolic stack traces (since stub addresses are stripped).
26576 //
26577 // In case they leak due to code issues elsewhere, we still print them as
26578 // <vm symbol>+<offset>, just to distinguish from other cases.
26579 buffer->Printf(" %s+0x%" Px "", kVmSnapshotInstructionsAsmSymbol, offset);
26580 } else {
26581 // This case should never happen, since these are not addresses within the
26582 // VM or app isolate instructions sections, so make it easy to notice.
26583 buffer->Printf(" <invalid Dart instruction address>");
26584 }
26585 buffer->Printf("\n");
26586}
26587#endif
26588
26589static void PrintSymbolicStackFrameIndex(BaseTextBuffer* buffer,
26590 intptr_t frame_index) {
26591 buffer->Printf(format: "#%-6" Pd "", frame_index);
26592}
26593
26594static void PrintSymbolicStackFrameBody(BaseTextBuffer* buffer,
26595 const char* function_name,
26596 const char* url,
26597 intptr_t line = -1,
26598 intptr_t column = -1) {
26599 buffer->Printf(format: " %s (%s", function_name, url);
26600 if (line >= 0) {
26601 buffer->Printf(format: ":%" Pd "", line);
26602 if (column >= 0) {
26603 buffer->Printf(format: ":%" Pd "", column);
26604 }
26605 }
26606 buffer->Printf(format: ")\n");
26607}
26608
26609static void PrintSymbolicStackFrame(Zone* zone,
26610 BaseTextBuffer* buffer,
26611 const Function& function,
26612 TokenPosition token_pos_or_line,
26613 intptr_t frame_index,
26614 bool is_line = false) {
26615 ASSERT(!function.IsNull());
26616 const auto& script = Script::Handle(zone, ptr: function.script());
26617 const char* function_name = function.QualifiedUserVisibleNameCString();
26618 const char* url = script.IsNull()
26619 ? "Kernel"
26620 : String::Handle(zone, ptr: script.url()).ToCString();
26621
26622 // If the URI starts with "data:application/dart;" this is a URI encoded
26623 // script so we shouldn't print the entire URI because it could be very long.
26624 if (strstr(s1: url, s2: "data:application/dart;") == url) {
26625 url = "<data:application/dart>";
26626 }
26627
26628 intptr_t line = -1;
26629 intptr_t column = -1;
26630 if (is_line) {
26631 ASSERT(token_pos_or_line.IsNoSource() || token_pos_or_line.IsReal());
26632 if (token_pos_or_line.IsReal()) {
26633 line = token_pos_or_line.Pos();
26634 }
26635 } else {
26636 ASSERT(!script.IsNull());
26637 script.GetTokenLocation(token_pos: token_pos_or_line, line: &line, column: &column);
26638 }
26639 PrintSymbolicStackFrameIndex(buffer, frame_index);
26640 PrintSymbolicStackFrameBody(buffer, function_name, url, line, column);
26641}
26642
26643static bool IsVisibleAsFutureListener(const Function& function) {
26644 if (function.is_visible()) {
26645 return true;
26646 }
26647
26648 if (function.IsImplicitClosureFunction()) {
26649 return function.parent_function() == Function::null() ||
26650 Function::is_visible(f: function.parent_function());
26651 }
26652
26653 return false;
26654}
26655
26656const char* StackTrace::ToCString() const {
26657 auto const T = Thread::Current();
26658 auto const zone = T->zone();
26659 auto& stack_trace = StackTrace::Handle(zone, ptr: this->ptr());
26660 auto& owner = Object::Handle(zone);
26661 auto& function = Function::Handle(zone);
26662 auto& code_object = Object::Handle(zone);
26663 auto& code = Code::Handle(zone);
26664
26665 NoSafepointScope no_allocation;
26666 GrowableArray<const Function*> inlined_functions;
26667 GrowableArray<TokenPosition> inlined_token_positions;
26668
26669#if defined(DART_PRECOMPILED_RUNTIME)
26670 GrowableArray<void*> addresses(10);
26671 const bool have_footnote_callback =
26672 FLAG_dwarf_stack_traces_mode &&
26673 Dart::dwarf_stacktrace_footnote_callback() != nullptr;
26674#endif
26675
26676 ZoneTextBuffer buffer(zone, 1024);
26677
26678#if defined(DART_PRECOMPILED_RUNTIME)
26679 auto const isolate_instructions = reinterpret_cast<uword>(
26680 T->isolate_group()->source()->snapshot_instructions);
26681 auto const vm_instructions = reinterpret_cast<uword>(
26682 Dart::vm_isolate_group()->source()->snapshot_instructions);
26683 if (FLAG_dwarf_stack_traces_mode) {
26684 const Image isolate_instructions_image(
26685 reinterpret_cast<const void*>(isolate_instructions));
26686 const Image vm_instructions_image(
26687 reinterpret_cast<const void*>(vm_instructions));
26688 auto const isolate_relocated_address =
26689 isolate_instructions_image.instructions_relocated_address();
26690 auto const vm_relocated_address =
26691 vm_instructions_image.instructions_relocated_address();
26692 // This prologue imitates Android's debuggerd to make it possible to paste
26693 // the stack trace into ndk-stack.
26694 buffer.Printf(
26695 "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***\n");
26696 OSThread* thread = OSThread::Current();
26697 buffer.Printf("pid: %" Pd ", tid: %" Pd ", name %s\n", OS::ProcessId(),
26698 OSThread::ThreadIdToIntPtr(thread->id()), thread->name());
26699#if defined(DART_COMPRESSED_POINTERS)
26700 const char kCompressedPointers[] = "yes";
26701#else
26702 const char kCompressedPointers[] = "no";
26703#endif
26704#if defined(USING_SIMULATOR)
26705 const char kUsingSimulator[] = "yes";
26706#else
26707 const char kUsingSimulator[] = "no";
26708#endif
26709 buffer.Printf("os: %s arch: %s comp: %s sim: %s\n",
26710 kHostOperatingSystemName, kTargetArchitectureName,
26711 kCompressedPointers, kUsingSimulator);
26712 const OS::BuildId& build_id =
26713 OS::GetAppBuildId(T->isolate_group()->source()->snapshot_instructions);
26714 if (build_id.data != nullptr) {
26715 ASSERT(build_id.len > 0);
26716 buffer.Printf("build_id: '");
26717 for (intptr_t i = 0; i < build_id.len; i++) {
26718 buffer.Printf("%2.2x", build_id.data[i]);
26719 }
26720 buffer.Printf("'\n");
26721 }
26722 // Print the dso_base of the VM and isolate_instructions. We print both here
26723 // as the VM and isolate may be loaded from different snapshot images.
26724 buffer.Printf("isolate_dso_base: %" Px "",
26725 isolate_instructions - isolate_relocated_address);
26726 buffer.Printf(", vm_dso_base: %" Px "\n",
26727 vm_instructions - vm_relocated_address);
26728 buffer.Printf("isolate_instructions: %" Px "", isolate_instructions);
26729 buffer.Printf(", vm_instructions: %" Px "\n", vm_instructions);
26730 }
26731#endif
26732
26733 // Iterate through the stack frames and create C string description
26734 // for each frame.
26735 intptr_t frame_index = 0;
26736 uint32_t frame_skip = 0;
26737 // If we're already in a gap, don't print multiple gap markers.
26738 bool in_gap = false;
26739 do {
26740 for (intptr_t i = frame_skip; i < stack_trace.Length(); i++) {
26741 code_object = stack_trace.CodeAtFrame(frame_index: i);
26742 if (code_object.IsNull()) {
26743 // Check for a null function, which indicates a gap in a StackOverflow
26744 // or OutOfMemory trace.
26745 if ((i < (stack_trace.Length() - 1)) &&
26746 (stack_trace.CodeAtFrame(frame_index: i + 1) != Code::null())) {
26747 buffer.AddString(s: "...\n...\n");
26748 // To account for gap frames.
26749 frame_index += stack_trace.PcOffsetAtFrame(frame_index: i);
26750 }
26751 continue;
26752 }
26753
26754 if (code_object.ptr() == StubCode::AsynchronousGapMarker().ptr()) {
26755 if (!in_gap) {
26756 buffer.AddString(s: "<asynchronous suspension>\n");
26757 }
26758 in_gap = true;
26759 continue;
26760 }
26761
26762 const uword pc_offset = stack_trace.PcOffsetAtFrame(frame_index: i);
26763 ASSERT(code_object.IsCode());
26764 code ^= code_object.ptr();
26765 ASSERT(code.IsFunctionCode());
26766 owner = code.owner();
26767 if (owner.IsFunction()) {
26768 function ^= owner.ptr();
26769 } else {
26770 function = Function::null();
26771 }
26772 const uword pc = code.PayloadStart() + pc_offset;
26773
26774 const bool is_future_listener =
26775 pc_offset == StackTraceUtils::kFutureListenerPcOffset;
26776
26777 // A visible frame ends any gap we might be in.
26778 in_gap = false;
26779
26780#if defined(DART_PRECOMPILED_RUNTIME)
26781 // When printing non-symbolic frames, we normally print call
26782 // addresses, not return addresses, by subtracting one from the PC to
26783 // get an address within the preceding instruction.
26784 //
26785 // The one exception is a normal closure registered as a listener on a
26786 // future. In this case, the returned pc_offset will be pointing to the
26787 // entry pooint of the function, which will be invoked when the future
26788 // completes. To make things more uniform stack unwinding code offets
26789 // pc_offset by 1 for such cases.
26790 const uword call_addr = pc - 1;
26791
26792 if (FLAG_dwarf_stack_traces_mode) {
26793 if (have_footnote_callback) {
26794 addresses.Add(reinterpret_cast<void*>(call_addr));
26795 }
26796
26797 // This output is formatted like Android's debuggerd. Note debuggerd
26798 // prints call addresses instead of return addresses.
26799 buffer.Printf(" #%02" Pd " abs %" Pp "", frame_index, call_addr);
26800 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26801 vm_instructions);
26802 frame_index++;
26803 continue;
26804 }
26805
26806 if (function.IsNull()) {
26807 in_gap = false;
26808 // We can't print the symbolic information since the owner was not
26809 // retained, so instead print the static symbol + offset like the
26810 // non-symbolic stack traces.
26811 PrintSymbolicStackFrameIndex(&buffer, frame_index);
26812 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26813 vm_instructions);
26814 frame_index++;
26815 continue;
26816 }
26817#endif
26818
26819 if (code.is_optimized() && stack_trace.expand_inlined() &&
26820 (FLAG_precompiled_mode || !is_future_listener)) {
26821 // Note: In AOT mode EmitFunctionEntrySourcePositionDescriptorIfNeeded
26822 // will take care of emitting a descriptor that would allow us to
26823 // symbolize stack frame with 0 offset.
26824 code.GetInlinedFunctionsAtReturnAddress(
26825 pc_offset: is_future_listener ? 0 : pc_offset, functions: &inlined_functions,
26826 token_positions: &inlined_token_positions);
26827 ASSERT(inlined_functions.length() >= 1);
26828 for (intptr_t j = inlined_functions.length() - 1; j >= 0; j--) {
26829 function = inlined_functions[j]->ptr();
26830 auto const pos = inlined_token_positions[j];
26831 if (is_future_listener && function.IsImplicitClosureFunction()) {
26832 function = function.parent_function();
26833 }
26834 if (FLAG_show_invisible_frames || function.is_visible()) {
26835 PrintSymbolicStackFrame(zone, buffer: &buffer, function, token_pos_or_line: pos, frame_index,
26836 /*is_line=*/FLAG_precompiled_mode);
26837 frame_index++;
26838 }
26839 }
26840 continue;
26841 }
26842
26843 if (FLAG_show_invisible_frames || function.is_visible() ||
26844 (is_future_listener && IsVisibleAsFutureListener(function))) {
26845 auto const pos = is_future_listener ? function.token_pos()
26846 : code.GetTokenIndexOfPC(pc);
26847 PrintSymbolicStackFrame(zone, buffer: &buffer, function, token_pos_or_line: pos, frame_index);
26848 frame_index++;
26849 }
26850 }
26851
26852 // Follow the link.
26853 frame_skip = stack_trace.skip_sync_start_in_parent_stack()
26854 ? StackTrace::kSyncAsyncCroppedFrames
26855 : 0;
26856 stack_trace = stack_trace.async_link();
26857 } while (!stack_trace.IsNull());
26858
26859#if defined(DART_PRECOMPILED_RUNTIME)
26860 if (have_footnote_callback) {
26861 char* footnote = Dart::dwarf_stacktrace_footnote_callback()(
26862 &addresses[0], addresses.length());
26863 if (footnote != nullptr) {
26864 buffer.AddString(footnote);
26865 free(footnote);
26866 }
26867 }
26868#endif
26869
26870 return buffer.buffer();
26871}
26872
26873static void DwarfStackTracesHandler(bool value) {
26874 FLAG_dwarf_stack_traces_mode = value;
26875
26876#if defined(PRODUCT)
26877 // We can safely remove function objects in precompiled snapshots if the
26878 // runtime will generate DWARF stack traces and we don't have runtime
26879 // debugging options like the observatory available.
26880 if (value) {
26881 FLAG_retain_function_objects = false;
26882 FLAG_retain_code_objects = false;
26883 }
26884#endif
26885}
26886
26887DEFINE_FLAG_HANDLER(DwarfStackTracesHandler,
26888 dwarf_stack_traces,
26889 "Omit CodeSourceMaps in precompiled snapshots and don't "
26890 "symbolize stack traces in the precompiled runtime.");
26891
26892SuspendStatePtr SuspendState::New(intptr_t frame_size,
26893 const Instance& function_data,
26894 Heap::Space space) {
26895 ASSERT(frame_size >= 0);
26896 const intptr_t num_elements = frame_size + SuspendState::FrameSizeGrowthGap();
26897#if !defined(DART_PRECOMPILED_RUNTIME)
26898 // Include heap object alignment overhead into the frame capacity.
26899 const intptr_t instance_size = SuspendState::InstanceSize(frame_capacity: num_elements);
26900 const intptr_t frame_capacity =
26901 instance_size - SuspendState::payload_offset();
26902 ASSERT(SuspendState::InstanceSize(frame_capacity) == instance_size);
26903 ASSERT(frame_size <= frame_capacity);
26904#endif
26905 auto raw = Object::Allocate<SuspendState>(space, elements: num_elements);
26906 NoSafepointScope no_safepoint;
26907 ASSERT_EQUAL(raw->untag()->pc_, 0);
26908#if !defined(DART_PRECOMPILED_RUNTIME)
26909 raw->untag()->frame_capacity_ = frame_capacity;
26910#endif
26911 raw->untag()->frame_size_ = frame_size;
26912 raw->untag()->set_function_data(function_data.ptr());
26913 return raw;
26914}
26915
26916SuspendStatePtr SuspendState::Clone(Thread* thread,
26917 const SuspendState& src,
26918 Heap::Space space) {
26919 ASSERT(src.pc() != 0);
26920 Zone* zone = thread->zone();
26921 const intptr_t frame_size = src.frame_size();
26922 const SuspendState& dst = SuspendState::Handle(
26923 zone,
26924 ptr: SuspendState::New(frame_size, function_data: Instance::Handle(zone, ptr: src.function_data()),
26925 space));
26926 dst.set_then_callback(Closure::Handle(zone, ptr: src.then_callback()));
26927 dst.set_error_callback(Closure::Handle(zone, ptr: src.error_callback()));
26928 {
26929 NoSafepointScope no_safepoint;
26930 memmove(dest: dst.payload(), src: src.payload(), n: frame_size);
26931 // Update value of :suspend_state variable in the copied frame.
26932 const uword fp = reinterpret_cast<uword>(dst.payload() + frame_size);
26933 *reinterpret_cast<ObjectPtr*>(
26934 LocalVarAddress(fp, index: runtime_frame_layout.FrameSlotForVariableIndex(
26935 index: kSuspendStateVarIndex))) = dst.ptr();
26936 dst.set_pc(src.pc());
26937 // Trigger write barrier if needed.
26938 if (dst.ptr()->IsOldObject()) {
26939 if (!dst.untag()->IsRemembered()) {
26940 dst.untag()->EnsureInRememberedSet(thread);
26941 }
26942 if (thread->is_marking()) {
26943 thread->DeferredMarkingStackAddObject(obj: dst.ptr());
26944 }
26945 }
26946 }
26947 return dst.ptr();
26948}
26949
26950#if !defined(DART_PRECOMPILED_RUNTIME)
26951void SuspendState::set_frame_capacity(intptr_t frame_capcity) const {
26952 ASSERT(frame_capcity >= 0);
26953 StoreNonPointer(addr: &untag()->frame_capacity_, value: frame_capcity);
26954}
26955#endif
26956
26957void SuspendState::set_frame_size(intptr_t frame_size) const {
26958 ASSERT(frame_size >= 0);
26959 StoreNonPointer(addr: &untag()->frame_size_, value: frame_size);
26960}
26961
26962void SuspendState::set_pc(uword pc) const {
26963 StoreNonPointer(addr: &untag()->pc_, value: pc);
26964}
26965
26966void SuspendState::set_function_data(const Instance& function_data) const {
26967 untag()->set_function_data(function_data.ptr());
26968}
26969
26970void SuspendState::set_then_callback(const Closure& then_callback) const {
26971 untag()->set_then_callback(then_callback.ptr());
26972}
26973
26974void SuspendState::set_error_callback(const Closure& error_callback) const {
26975 untag()->set_error_callback(error_callback.ptr());
26976}
26977
26978const char* SuspendState::ToCString() const {
26979 return "SuspendState";
26980}
26981
26982CodePtr SuspendState::GetCodeObject() const {
26983 ASSERT(pc() != 0);
26984#if defined(DART_PRECOMPILED_RUNTIME)
26985 NoSafepointScope no_safepoint;
26986 CodePtr code = ReversePc::Lookup(IsolateGroup::Current(), pc(),
26987 /*is_return_address=*/true);
26988 ASSERT(code != Code::null());
26989 return code;
26990#else
26991 ObjectPtr code = *(reinterpret_cast<ObjectPtr*>(
26992 untag()->payload() + untag()->frame_size_ +
26993 runtime_frame_layout.code_from_fp * kWordSize));
26994 return Code::RawCast(raw: code);
26995#endif // defined(DART_PRECOMPILED_RUNTIME)
26996}
26997
26998void RegExp::set_pattern(const String& pattern) const {
26999 untag()->set_pattern(pattern.ptr());
27000}
27001
27002void RegExp::set_function(intptr_t cid,
27003 bool sticky,
27004 const Function& value) const {
27005 if (sticky) {
27006 switch (cid) {
27007 case kOneByteStringCid:
27008 return untag()->set_one_byte_sticky(value.ptr());
27009 case kTwoByteStringCid:
27010 return untag()->set_two_byte_sticky(value.ptr());
27011 case kExternalOneByteStringCid:
27012 return untag()->set_external_one_byte_sticky(value.ptr());
27013 case kExternalTwoByteStringCid:
27014 return untag()->set_external_two_byte_sticky(value.ptr());
27015 }
27016 } else {
27017 switch (cid) {
27018 case kOneByteStringCid:
27019 return untag()->set_one_byte(value.ptr());
27020 case kTwoByteStringCid:
27021 return untag()->set_two_byte(value.ptr());
27022 case kExternalOneByteStringCid:
27023 return untag()->set_external_one_byte(value.ptr());
27024 case kExternalTwoByteStringCid:
27025 return untag()->set_external_two_byte(value.ptr());
27026 }
27027 }
27028}
27029
27030void RegExp::set_bytecode(bool is_one_byte,
27031 bool sticky,
27032 const TypedData& bytecode) const {
27033 if (sticky) {
27034 if (is_one_byte) {
27035 untag()->set_one_byte_sticky<std::memory_order_release>(bytecode.ptr());
27036 } else {
27037 untag()->set_two_byte_sticky<std::memory_order_release>(bytecode.ptr());
27038 }
27039 } else {
27040 if (is_one_byte) {
27041 untag()->set_one_byte<std::memory_order_release>(bytecode.ptr());
27042 } else {
27043 untag()->set_two_byte<std::memory_order_release>(bytecode.ptr());
27044 }
27045 }
27046}
27047
27048void RegExp::set_num_bracket_expressions(intptr_t value) const {
27049 untag()->num_bracket_expressions_ = value;
27050}
27051
27052void RegExp::set_capture_name_map(const Array& array) const {
27053 untag()->set_capture_name_map(array.ptr());
27054}
27055
27056RegExpPtr RegExp::New(Zone* zone, Heap::Space space) {
27057 const auto& result = RegExp::Handle(ptr: Object::Allocate<RegExp>(space));
27058 ASSERT_EQUAL(result.type(), kUninitialized);
27059 ASSERT(result.flags() == RegExpFlags());
27060 result.set_num_bracket_expressions(-1);
27061 result.set_num_registers(/*is_one_byte=*/false, value: -1);
27062 result.set_num_registers(/*is_one_byte=*/true, value: -1);
27063
27064 if (!FLAG_interpret_irregexp) {
27065 auto thread = Thread::Current();
27066 const Library& lib = Library::Handle(zone, ptr: Library::CoreLibrary());
27067 const Class& owner =
27068 Class::Handle(zone, ptr: lib.LookupClass(name: Symbols::RegExp()));
27069
27070 for (intptr_t cid = kOneByteStringCid; cid <= kExternalTwoByteStringCid;
27071 cid++) {
27072 CreateSpecializedFunction(thread, zone, regexp: result, specialization_cid: cid, /*sticky=*/false,
27073 owner);
27074 CreateSpecializedFunction(thread, zone, regexp: result, specialization_cid: cid, /*sticky=*/true,
27075 owner);
27076 }
27077 }
27078 return result.ptr();
27079}
27080
27081const char* RegExpFlags::ToCString() const {
27082 switch (value_ & ~kGlobal) {
27083 case kIgnoreCase | kMultiLine | kDotAll | kUnicode:
27084 return "imsu";
27085 case kIgnoreCase | kMultiLine | kDotAll:
27086 return "ims";
27087 case kIgnoreCase | kMultiLine | kUnicode:
27088 return "imu";
27089 case kIgnoreCase | kUnicode | kDotAll:
27090 return "ius";
27091 case kMultiLine | kDotAll | kUnicode:
27092 return "msu";
27093 case kIgnoreCase | kMultiLine:
27094 return "im";
27095 case kIgnoreCase | kDotAll:
27096 return "is";
27097 case kIgnoreCase | kUnicode:
27098 return "iu";
27099 case kMultiLine | kDotAll:
27100 return "ms";
27101 case kMultiLine | kUnicode:
27102 return "mu";
27103 case kDotAll | kUnicode:
27104 return "su";
27105 case kIgnoreCase:
27106 return "i";
27107 case kMultiLine:
27108 return "m";
27109 case kDotAll:
27110 return "s";
27111 case kUnicode:
27112 return "u";
27113 default:
27114 break;
27115 }
27116 return "";
27117}
27118
27119bool RegExp::CanonicalizeEquals(const Instance& other) const {
27120 if (this->ptr() == other.ptr()) {
27121 return true; // "===".
27122 }
27123 if (other.IsNull() || !other.IsRegExp()) {
27124 return false;
27125 }
27126 const RegExp& other_js = RegExp::Cast(obj: other);
27127 // Match the pattern.
27128 const String& str1 = String::Handle(ptr: pattern());
27129 const String& str2 = String::Handle(ptr: other_js.pattern());
27130 if (!str1.Equals(str: str2)) {
27131 return false;
27132 }
27133 // Match the flags.
27134 if (flags() != other_js.flags()) {
27135 return false;
27136 }
27137 return true;
27138}
27139
27140uint32_t RegExp::CanonicalizeHash() const {
27141 // Must agree with RegExpKey::Hash.
27142 return CombineHashes(hash: String::Hash(raw: pattern()), other_hash: flags().value());
27143}
27144
27145const char* RegExp::ToCString() const {
27146 const String& str = String::Handle(ptr: pattern());
27147 return OS::SCreate(zone: Thread::Current()->zone(), format: "RegExp: pattern=%s flags=%s",
27148 str.ToCString(), flags().ToCString());
27149}
27150
27151WeakPropertyPtr WeakProperty::New(Heap::Space space) {
27152 ASSERT(IsolateGroup::Current()->object_store()->weak_property_class() !=
27153 Class::null());
27154 return Object::Allocate<WeakProperty>(space);
27155}
27156
27157const char* WeakProperty::ToCString() const {
27158 return "_WeakProperty";
27159}
27160
27161WeakReferencePtr WeakReference::New(Heap::Space space) {
27162 ASSERT(IsolateGroup::Current()->object_store()->weak_reference_class() !=
27163 Class::null());
27164 return Object::Allocate<WeakReference>(space);
27165}
27166const char* WeakReference::ToCString() const {
27167 TypeArguments& type_args = TypeArguments::Handle(ptr: GetTypeArguments());
27168 String& type_args_name = String::Handle(ptr: type_args.UserVisibleName());
27169 return OS::SCreate(zone: Thread::Current()->zone(), format: "_WeakReference%s",
27170 type_args_name.ToCString());
27171}
27172
27173const char* FinalizerBase::ToCString() const {
27174 return "FinalizerBase";
27175}
27176
27177FinalizerPtr Finalizer::New(Heap::Space space) {
27178 ASSERT(IsolateGroup::Current()->object_store()->finalizer_class() !=
27179 Class::null());
27180 ASSERT(
27181 Class::Handle(IsolateGroup::Current()->object_store()->finalizer_class())
27182 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
27183 return Object::Allocate<Finalizer>(space);
27184}
27185
27186const char* Finalizer::ToCString() const {
27187 TypeArguments& type_args = TypeArguments::Handle(ptr: GetTypeArguments());
27188 String& type_args_name = String::Handle(ptr: type_args.UserVisibleName());
27189 return OS::SCreate(zone: Thread::Current()->zone(), format: "_FinalizerImpl%s",
27190 type_args_name.ToCString());
27191}
27192
27193NativeFinalizerPtr NativeFinalizer::New(Heap::Space space) {
27194 ASSERT(IsolateGroup::Current()->object_store()->native_finalizer_class() !=
27195 Class::null());
27196 ASSERT(Class::Handle(
27197 IsolateGroup::Current()->object_store()->native_finalizer_class())
27198 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
27199 return Object::Allocate<NativeFinalizer>(space);
27200}
27201
27202// Runs the finalizer if not detached, detaches the value and set external size
27203// to 0.
27204// TODO(http://dartbug.com/47777): Can this be merged with
27205// RunNativeFinalizerCallback?
27206void NativeFinalizer::RunCallback(const FinalizerEntry& entry,
27207 const char* trace_context) const {
27208 Thread* const thread = Thread::Current();
27209 Zone* const zone = thread->zone();
27210 IsolateGroup* const group = thread->isolate_group();
27211 const intptr_t external_size = entry.external_size();
27212 const auto& token_object = Object::Handle(zone, ptr: entry.token());
27213 const auto& callback_pointer = Pointer::Handle(zone, ptr: this->callback());
27214 const auto callback = reinterpret_cast<NativeFinalizer::Callback>(
27215 callback_pointer.NativeAddress());
27216 if (token_object.IsFinalizerEntry()) {
27217 // Detached from Dart code.
27218 ASSERT(token_object.ptr() == entry.ptr());
27219 ASSERT(external_size == 0);
27220 if (FLAG_trace_finalizers) {
27221 THR_Print(
27222 "%s: Not running native finalizer %p callback %p, "
27223 "detached\n",
27224 trace_context, ptr()->untag(), callback);
27225 }
27226 } else {
27227 const auto& token = Pointer::Cast(obj: token_object);
27228 void* peer = reinterpret_cast<void*>(token.NativeAddress());
27229 if (FLAG_trace_finalizers) {
27230 THR_Print(
27231 "%s: Running native finalizer %p callback %p "
27232 "with token %p\n",
27233 trace_context, ptr()->untag(), callback, peer);
27234 }
27235 entry.set_token(entry);
27236 callback(peer);
27237 if (external_size > 0) {
27238 ASSERT(!entry.value()->IsSmi());
27239 Heap::Space space =
27240 entry.value()->IsOldObject() ? Heap::kOld : Heap::kNew;
27241 if (FLAG_trace_finalizers) {
27242 THR_Print("%s: Clearing external size %" Pd " bytes in %s space\n",
27243 trace_context, external_size, space == 0 ? "new" : "old");
27244 }
27245 group->heap()->FreedExternal(size: external_size, space);
27246 entry.set_external_size(0);
27247 }
27248 }
27249}
27250
27251const char* NativeFinalizer::ToCString() const {
27252 const auto& pointer = Pointer::Handle(ptr: callback());
27253 return OS::SCreate(zone: Thread::Current()->zone(), format: "_NativeFinalizer %s",
27254 pointer.ToCString());
27255}
27256
27257FinalizerEntryPtr FinalizerEntry::New(const FinalizerBase& finalizer,
27258 Heap::Space space) {
27259 ASSERT(IsolateGroup::Current()->object_store()->finalizer_entry_class() !=
27260 Class::null());
27261 const auto& entry =
27262 FinalizerEntry::Handle(ptr: Object::Allocate<FinalizerEntry>(space));
27263 ASSERT_EQUAL(entry.external_size(), 0);
27264 entry.set_finalizer(finalizer);
27265 return entry.ptr();
27266}
27267
27268void FinalizerEntry::set_finalizer(const FinalizerBase& value) const {
27269 untag()->set_finalizer(value.ptr());
27270}
27271
27272const char* FinalizerEntry::ToCString() const {
27273 return "FinalizerEntry";
27274}
27275
27276AbstractTypePtr MirrorReference::GetAbstractTypeReferent() const {
27277 ASSERT(Object::Handle(referent()).IsAbstractType());
27278 return AbstractType::Cast(obj: Object::Handle(ptr: referent())).ptr();
27279}
27280
27281ClassPtr MirrorReference::GetClassReferent() const {
27282 ASSERT(Object::Handle(referent()).IsClass());
27283 return Class::Cast(obj: Object::Handle(ptr: referent())).ptr();
27284}
27285
27286FieldPtr MirrorReference::GetFieldReferent() const {
27287 ASSERT(Object::Handle(referent()).IsField());
27288 return Field::Cast(obj: Object::Handle(ptr: referent())).ptr();
27289}
27290
27291FunctionPtr MirrorReference::GetFunctionReferent() const {
27292 ASSERT(Object::Handle(referent()).IsFunction());
27293 return Function::Cast(obj: Object::Handle(ptr: referent())).ptr();
27294}
27295
27296FunctionTypePtr MirrorReference::GetFunctionTypeReferent() const {
27297 ASSERT(Object::Handle(referent()).IsFunctionType());
27298 return FunctionType::Cast(obj: Object::Handle(ptr: referent())).ptr();
27299}
27300
27301LibraryPtr MirrorReference::GetLibraryReferent() const {
27302 ASSERT(Object::Handle(referent()).IsLibrary());
27303 return Library::Cast(obj: Object::Handle(ptr: referent())).ptr();
27304}
27305
27306TypeParameterPtr MirrorReference::GetTypeParameterReferent() const {
27307 ASSERT(Object::Handle(referent()).IsTypeParameter());
27308 return TypeParameter::Cast(obj: Object::Handle(ptr: referent())).ptr();
27309}
27310
27311MirrorReferencePtr MirrorReference::New(const Object& referent,
27312 Heap::Space space) {
27313 const auto& result =
27314 MirrorReference::Handle(ptr: Object::Allocate<MirrorReference>(space));
27315 result.set_referent(referent);
27316 return result.ptr();
27317}
27318
27319const char* MirrorReference::ToCString() const {
27320 return "_MirrorReference";
27321}
27322
27323UserTagPtr UserTag::MakeActive() const {
27324 Isolate* isolate = Isolate::Current();
27325 ASSERT(isolate != nullptr);
27326 UserTag& old = UserTag::Handle(ptr: isolate->current_tag());
27327 isolate->set_current_tag(*this);
27328
27329#if !defined(PRODUCT)
27330 // Notify VM service clients that the current UserTag has changed.
27331 if (Service::profiler_stream.enabled()) {
27332 ServiceEvent event(isolate, ServiceEvent::kUserTagChanged);
27333 String& name = String::Handle(ptr: old.label());
27334 event.set_previous_tag(name.ToCString());
27335 name ^= label();
27336 event.set_updated_tag(name.ToCString());
27337 Service::HandleEvent(event: &event);
27338 }
27339#endif // !defined(PRODUCT)
27340
27341 return old.ptr();
27342}
27343
27344UserTagPtr UserTag::New(const String& label, Heap::Space space) {
27345 Thread* thread = Thread::Current();
27346 Isolate* isolate = thread->isolate();
27347 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27348 // Canonicalize by name.
27349 UserTag& result = UserTag::Handle(ptr: FindTagInIsolate(thread, label));
27350 if (!result.IsNull()) {
27351 // Tag already exists, return existing instance.
27352 return result.ptr();
27353 }
27354 if (TagTableIsFull(thread)) {
27355 const String& error = String::Handle(ptr: String::NewFormatted(
27356 format: "UserTag instance limit (%" Pd ") reached.", UserTags::kMaxUserTags));
27357 const Array& args = Array::Handle(ptr: Array::New(len: 1));
27358 args.SetAt(index: 0, value: error);
27359 Exceptions::ThrowByType(type: Exceptions::kUnsupported, arguments: args);
27360 }
27361 // No tag with label exists, create and register with isolate tag table.
27362 result = Object::Allocate<UserTag>(space);
27363 result.set_label(label);
27364 result.set_streamable(UserTags::IsTagNameStreamable(tag: label.ToCString()));
27365 AddTagToIsolate(thread, tag: result);
27366 return result.ptr();
27367}
27368
27369UserTagPtr UserTag::DefaultTag() {
27370 Thread* thread = Thread::Current();
27371 Zone* zone = thread->zone();
27372 Isolate* isolate = thread->isolate();
27373 ASSERT(isolate != nullptr);
27374 if (isolate->default_tag() != UserTag::null()) {
27375 // Already created.
27376 return isolate->default_tag();
27377 }
27378 // Create default tag.
27379 const UserTag& result =
27380 UserTag::Handle(zone, ptr: UserTag::New(label: Symbols::Default()));
27381 ASSERT(result.tag() == UserTags::kDefaultUserTag);
27382 isolate->set_default_tag(result);
27383 return result.ptr();
27384}
27385
27386UserTagPtr UserTag::FindTagInIsolate(Isolate* isolate,
27387 Thread* thread,
27388 const String& label) {
27389 Zone* zone = thread->zone();
27390 if (isolate->tag_table() == GrowableObjectArray::null()) {
27391 return UserTag::null();
27392 }
27393 const GrowableObjectArray& tag_table =
27394 GrowableObjectArray::Handle(zone, ptr: isolate->tag_table());
27395 UserTag& other = UserTag::Handle(zone);
27396 String& tag_label = String::Handle(zone);
27397 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27398 other ^= tag_table.At(index: i);
27399 ASSERT(!other.IsNull());
27400 tag_label = other.label();
27401 ASSERT(!tag_label.IsNull());
27402 if (tag_label.Equals(str: label)) {
27403 return other.ptr();
27404 }
27405 }
27406 return UserTag::null();
27407}
27408
27409UserTagPtr UserTag::FindTagInIsolate(Thread* thread, const String& label) {
27410 Isolate* isolate = thread->isolate();
27411 return FindTagInIsolate(isolate, thread, label);
27412}
27413
27414void UserTag::AddTagToIsolate(Thread* thread, const UserTag& tag) {
27415 Isolate* isolate = thread->isolate();
27416 Zone* zone = thread->zone();
27417 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27418 const GrowableObjectArray& tag_table =
27419 GrowableObjectArray::Handle(zone, ptr: isolate->tag_table());
27420 ASSERT(!TagTableIsFull(thread));
27421#if defined(DEBUG)
27422 // Verify that no existing tag has the same tag id.
27423 UserTag& other = UserTag::Handle(thread->zone());
27424 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27425 other ^= tag_table.At(i);
27426 ASSERT(!other.IsNull());
27427 ASSERT(tag.tag() != other.tag());
27428 }
27429#endif
27430 // Generate the UserTag tag id by taking the length of the isolate's
27431 // tag table + kUserTagIdOffset.
27432 uword tag_id = tag_table.Length() + UserTags::kUserTagIdOffset;
27433 ASSERT(tag_id >= UserTags::kUserTagIdOffset);
27434 ASSERT(tag_id < (UserTags::kUserTagIdOffset + UserTags::kMaxUserTags));
27435 tag.set_tag(tag_id);
27436 tag_table.Add(value: tag);
27437}
27438
27439bool UserTag::TagTableIsFull(Thread* thread) {
27440 Isolate* isolate = thread->isolate();
27441 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27442 const GrowableObjectArray& tag_table =
27443 GrowableObjectArray::Handle(zone: thread->zone(), ptr: isolate->tag_table());
27444 ASSERT(tag_table.Length() <= UserTags::kMaxUserTags);
27445 return tag_table.Length() == UserTags::kMaxUserTags;
27446}
27447
27448UserTagPtr UserTag::FindTagById(uword tag_id) {
27449 Thread* thread = Thread::Current();
27450 Zone* zone = thread->zone();
27451 Isolate* isolate = thread->isolate();
27452 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27453 const GrowableObjectArray& tag_table =
27454 GrowableObjectArray::Handle(zone, ptr: isolate->tag_table());
27455 UserTag& tag = UserTag::Handle(zone);
27456 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27457 tag ^= tag_table.At(index: i);
27458 if (tag.tag() == tag_id) {
27459 return tag.ptr();
27460 }
27461 }
27462 return UserTag::null();
27463}
27464
27465const char* UserTag::ToCString() const {
27466 const String& tag_label = String::Handle(ptr: label());
27467 return tag_label.ToCString();
27468}
27469
27470void DumpTypeTable(Isolate* isolate) {
27471 OS::PrintErr(format: "canonical types:\n");
27472 CanonicalTypeSet table(isolate->group()->object_store()->canonical_types());
27473 table.Dump();
27474 table.Release();
27475}
27476
27477void DumpFunctionTypeTable(Isolate* isolate) {
27478 OS::PrintErr(format: "canonical function types:\n");
27479 CanonicalFunctionTypeSet table(
27480 isolate->group()->object_store()->canonical_function_types());
27481 table.Dump();
27482 table.Release();
27483}
27484
27485void DumpRecordTypeTable(Isolate* isolate) {
27486 OS::PrintErr(format: "canonical record types:\n");
27487 CanonicalRecordTypeSet table(
27488 isolate->group()->object_store()->canonical_record_types());
27489 table.Dump();
27490 table.Release();
27491}
27492
27493void DumpTypeParameterTable(Isolate* isolate) {
27494 OS::PrintErr(format: "canonical type parameters (cloned from declarations):\n");
27495 CanonicalTypeParameterSet table(
27496 isolate->group()->object_store()->canonical_type_parameters());
27497 table.Dump();
27498 table.Release();
27499}
27500
27501void DumpTypeArgumentsTable(Isolate* isolate) {
27502 OS::PrintErr(format: "canonical type arguments:\n");
27503 CanonicalTypeArgumentsSet table(
27504 isolate->group()->object_store()->canonical_type_arguments());
27505 table.Dump();
27506 table.Release();
27507}
27508
27509EntryPointPragma FindEntryPointPragma(IsolateGroup* IG,
27510 const Array& metadata,
27511 Field* reusable_field_handle,
27512 Object* pragma) {
27513 for (intptr_t i = 0; i < metadata.Length(); i++) {
27514 *pragma = metadata.At(index: i);
27515 if (pragma->clazz() != IG->object_store()->pragma_class()) {
27516 continue;
27517 }
27518 *reusable_field_handle = IG->object_store()->pragma_name();
27519 if (Instance::Cast(obj: *pragma).GetField(field: *reusable_field_handle) !=
27520 Symbols::vm_entry_point().ptr()) {
27521 continue;
27522 }
27523 *reusable_field_handle = IG->object_store()->pragma_options();
27524 *pragma = Instance::Cast(obj: *pragma).GetField(field: *reusable_field_handle);
27525 if (pragma->ptr() == Bool::null() || pragma->ptr() == Bool::True().ptr()) {
27526 return EntryPointPragma::kAlways;
27527 break;
27528 }
27529 if (pragma->ptr() == Symbols::get().ptr()) {
27530 return EntryPointPragma::kGetterOnly;
27531 }
27532 if (pragma->ptr() == Symbols::set().ptr()) {
27533 return EntryPointPragma::kSetterOnly;
27534 }
27535 if (pragma->ptr() == Symbols::call().ptr()) {
27536 return EntryPointPragma::kCallOnly;
27537 }
27538 }
27539 return EntryPointPragma::kNever;
27540}
27541
27542DART_WARN_UNUSED_RESULT
27543ErrorPtr VerifyEntryPoint(
27544 const Library& lib,
27545 const Object& member,
27546 const Object& annotated,
27547 std::initializer_list<EntryPointPragma> allowed_kinds) {
27548#if defined(DART_PRECOMPILED_RUNTIME)
27549 // Annotations are discarded in the AOT snapshot, so we can't determine
27550 // precisely if this member was marked as an entry-point. Instead, we use
27551 // "has_pragma()" as a proxy, since that bit is usually retained.
27552 bool is_marked_entrypoint = true;
27553 if (annotated.IsClass() && !Class::Cast(annotated).has_pragma()) {
27554 is_marked_entrypoint = false;
27555 } else if (annotated.IsField() && !Field::Cast(annotated).has_pragma()) {
27556 is_marked_entrypoint = false;
27557 } else if (annotated.IsFunction() &&
27558 !Function::Cast(annotated).has_pragma()) {
27559 is_marked_entrypoint = false;
27560 }
27561#else
27562 Object& metadata = Object::Handle(ptr: Object::empty_array().ptr());
27563 if (!annotated.IsNull()) {
27564 metadata = lib.GetMetadata(declaration: annotated);
27565 }
27566 if (metadata.IsError()) return Error::RawCast(raw: metadata.ptr());
27567 ASSERT(!metadata.IsNull() && metadata.IsArray());
27568 EntryPointPragma pragma =
27569 FindEntryPointPragma(IG: IsolateGroup::Current(), metadata: Array::Cast(obj: metadata),
27570 reusable_field_handle: &Field::Handle(), pragma: &Object::Handle());
27571 bool is_marked_entrypoint = pragma == EntryPointPragma::kAlways;
27572 if (!is_marked_entrypoint) {
27573 for (const auto allowed_kind : allowed_kinds) {
27574 if (pragma == allowed_kind) {
27575 is_marked_entrypoint = true;
27576 break;
27577 }
27578 }
27579 }
27580#endif
27581 if (!is_marked_entrypoint) {
27582 return EntryPointMemberInvocationError(member);
27583 }
27584 return Error::null();
27585}
27586
27587DART_WARN_UNUSED_RESULT
27588ErrorPtr EntryPointFieldInvocationError(const String& getter_name) {
27589 if (!FLAG_verify_entry_points) return Error::null();
27590
27591 char const* error = OS::SCreate(
27592 zone: Thread::Current()->zone(),
27593 format: "ERROR: Entry-points do not allow invoking fields "
27594 "(failure to resolve '%s')\n"
27595 "ERROR: See "
27596 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27597 "aot/entry_point_pragma.md\n",
27598 getter_name.ToCString());
27599 OS::PrintErr(format: "%s", error);
27600 return ApiError::New(message: String::Handle(ptr: String::New(cstr: error)));
27601}
27602
27603DART_WARN_UNUSED_RESULT
27604ErrorPtr EntryPointMemberInvocationError(const Object& member) {
27605 const char* member_cstring =
27606 member.IsFunction()
27607 ? OS::SCreate(
27608 zone: Thread::Current()->zone(), format: "%s (kind %s)",
27609 Function::Cast(obj: member).ToLibNamePrefixedQualifiedCString(),
27610 Function::KindToCString(kind: Function::Cast(obj: member).kind()))
27611 : member.ToCString();
27612 if (!FLAG_verify_entry_points) {
27613 // Print a warning, but do not return an error.
27614 char const* warning = OS::SCreate(
27615 zone: Thread::Current()->zone(),
27616 format: "WARNING: '%s' is accessed through Dart C API without being marked as "
27617 "an entry point; its tree-shaken signature cannot be verified.\n"
27618 "WARNING: See "
27619 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27620 "aot/entry_point_pragma.md\n",
27621 member_cstring);
27622 OS::PrintErr(format: "%s", warning);
27623 return Error::null();
27624 }
27625 char const* error = OS::SCreate(
27626 zone: Thread::Current()->zone(),
27627 format: "ERROR: It is illegal to access '%s' through Dart C API.\n"
27628 "ERROR: See "
27629 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27630 "aot/entry_point_pragma.md\n",
27631 member_cstring);
27632 OS::PrintErr(format: "%s", error);
27633 return ApiError::New(message: String::Handle(ptr: String::New(cstr: error)));
27634}
27635
27636ErrorPtr Function::VerifyCallEntryPoint() const {
27637 if (!FLAG_verify_entry_points) return Error::null();
27638
27639 const Class& cls = Class::Handle(ptr: Owner());
27640 const Library& lib = Library::Handle(ptr: cls.library());
27641 switch (kind()) {
27642 case UntaggedFunction::kRegularFunction:
27643 case UntaggedFunction::kSetterFunction:
27644 case UntaggedFunction::kConstructor:
27645 return dart::VerifyEntryPoint(lib, member: *this, annotated: *this,
27646 allowed_kinds: {EntryPointPragma::kCallOnly});
27647 break;
27648 case UntaggedFunction::kGetterFunction:
27649 return dart::VerifyEntryPoint(
27650 lib, member: *this, annotated: *this,
27651 allowed_kinds: {EntryPointPragma::kCallOnly, EntryPointPragma::kGetterOnly});
27652 break;
27653 case UntaggedFunction::kImplicitGetter:
27654 return dart::VerifyEntryPoint(lib, member: *this, annotated: Field::Handle(ptr: accessor_field()),
27655 allowed_kinds: {EntryPointPragma::kGetterOnly});
27656 break;
27657 case UntaggedFunction::kImplicitSetter:
27658 return dart::VerifyEntryPoint(lib, member: *this, annotated: Field::Handle(ptr: accessor_field()),
27659 allowed_kinds: {EntryPointPragma::kSetterOnly});
27660 case UntaggedFunction::kMethodExtractor:
27661 return Function::Handle(ptr: extracted_method_closure())
27662 .VerifyClosurizedEntryPoint();
27663 break;
27664 default:
27665 return dart::VerifyEntryPoint(lib, member: *this, annotated: Object::Handle(), allowed_kinds: {});
27666 break;
27667 }
27668}
27669
27670ErrorPtr Function::VerifyClosurizedEntryPoint() const {
27671 if (!FLAG_verify_entry_points) return Error::null();
27672
27673 const Class& cls = Class::Handle(ptr: Owner());
27674 const Library& lib = Library::Handle(ptr: cls.library());
27675 switch (kind()) {
27676 case UntaggedFunction::kRegularFunction:
27677 return dart::VerifyEntryPoint(lib, member: *this, annotated: *this,
27678 allowed_kinds: {EntryPointPragma::kGetterOnly});
27679 case UntaggedFunction::kImplicitClosureFunction: {
27680 const Function& parent = Function::Handle(ptr: parent_function());
27681 return dart::VerifyEntryPoint(lib, member: parent, annotated: parent,
27682 allowed_kinds: {EntryPointPragma::kGetterOnly});
27683 }
27684 default:
27685 UNREACHABLE();
27686 }
27687}
27688
27689ErrorPtr Field::VerifyEntryPoint(EntryPointPragma pragma) const {
27690 if (!FLAG_verify_entry_points) return Error::null();
27691 const Class& cls = Class::Handle(ptr: Owner());
27692 const Library& lib = Library::Handle(ptr: cls.library());
27693 return dart::VerifyEntryPoint(lib, member: *this, annotated: *this, allowed_kinds: {pragma});
27694}
27695
27696ErrorPtr Class::VerifyEntryPoint() const {
27697 if (!FLAG_verify_entry_points) return Error::null();
27698 const Library& lib = Library::Handle(ptr: library());
27699 if (!lib.IsNull()) {
27700 return dart::VerifyEntryPoint(lib, member: *this, annotated: *this, allowed_kinds: {});
27701 } else {
27702 return Error::null();
27703 }
27704}
27705
27706AbstractTypePtr RecordType::FieldTypeAt(intptr_t index) const {
27707 const Array& field_types = Array::Handle(ptr: untag()->field_types());
27708 return AbstractType::RawCast(raw: field_types.At(index));
27709}
27710
27711void RecordType::SetFieldTypeAt(intptr_t index,
27712 const AbstractType& value) const {
27713 ASSERT(!value.IsNull());
27714 const Array& field_types = Array::Handle(ptr: untag()->field_types());
27715 field_types.SetAt(index, value);
27716}
27717
27718void RecordType::set_field_types(const Array& value) const {
27719 ASSERT(!value.IsNull());
27720 untag()->set_field_types(value.ptr());
27721}
27722
27723void RecordType::set_shape(RecordShape shape) const {
27724 untag()->set_shape(shape.AsSmi());
27725}
27726
27727ArrayPtr RecordType::GetFieldNames(Thread* thread) const {
27728 return shape().GetFieldNames(thread);
27729}
27730
27731void RecordType::Print(NameVisibility name_visibility,
27732 BaseTextBuffer* printer) const {
27733 if (IsNull()) {
27734 printer->AddString(s: "null");
27735 return;
27736 }
27737 Thread* thread = Thread::Current();
27738 Zone* zone = thread->zone();
27739 AbstractType& type = AbstractType::Handle(zone);
27740 String& name = String::Handle(zone);
27741 const intptr_t num_fields = NumFields();
27742 const Array& field_names = Array::Handle(zone, ptr: GetFieldNames(thread));
27743 const intptr_t num_positional_fields = num_fields - field_names.Length();
27744 printer->AddString(s: "(");
27745 for (intptr_t i = 0; i < num_fields; ++i) {
27746 if (i != 0) {
27747 printer->AddString(s: ", ");
27748 }
27749 if (i == num_positional_fields) {
27750 printer->AddString(s: "{");
27751 }
27752 type = FieldTypeAt(index: i);
27753 type.PrintName(name_visibility, printer);
27754 if (i >= num_positional_fields) {
27755 printer->AddString(s: " ");
27756 name ^= field_names.At(index: i - num_positional_fields);
27757 printer->AddString(s: name.ToCString());
27758 }
27759 }
27760 if (num_positional_fields < num_fields) {
27761 printer->AddString(s: "}");
27762 }
27763 printer->AddString(s: ")");
27764 printer->AddString(s: NullabilitySuffix(name_visibility));
27765}
27766
27767const char* RecordType::ToCString() const {
27768 Zone* zone = Thread::Current()->zone();
27769 ZoneTextBuffer printer(zone);
27770 Print(name_visibility: kInternalName, printer: &printer);
27771 return printer.buffer();
27772}
27773
27774bool RecordType::IsInstantiated(Genericity genericity,
27775 intptr_t num_free_fun_type_params) const {
27776 AbstractType& type = AbstractType::Handle();
27777 const intptr_t num_fields = NumFields();
27778 for (intptr_t i = 0; i < num_fields; ++i) {
27779 type = FieldTypeAt(index: i);
27780 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
27781 return false;
27782 }
27783 }
27784 return true;
27785}
27786
27787RecordTypePtr RecordType::New(Heap::Space space) {
27788 return Object::Allocate<RecordType>(space);
27789}
27790
27791RecordTypePtr RecordType::New(RecordShape shape,
27792 const Array& field_types,
27793 Nullability nullability,
27794 Heap::Space space) {
27795 Zone* Z = Thread::Current()->zone();
27796 const RecordType& result = RecordType::Handle(zone: Z, ptr: RecordType::New(space));
27797 result.set_shape(shape);
27798 result.set_field_types(field_types);
27799 result.SetHash(0);
27800 result.set_flags(0);
27801 result.set_nullability(nullability);
27802 result.set_type_state(UntaggedAbstractType::kAllocated);
27803 result.InitializeTypeTestingStubNonAtomic(
27804 stub: Code::Handle(zone: Z, ptr: TypeTestingStubGenerator::DefaultCodeForType(type: result)));
27805 return result.ptr();
27806}
27807
27808RecordTypePtr RecordType::ToNullability(Nullability value,
27809 Heap::Space space) const {
27810 if (nullability() == value) {
27811 return ptr();
27812 }
27813 // Clone record type and set new nullability.
27814 RecordType& type = RecordType::Handle();
27815 // Always cloning in old space and removing space parameter would not satisfy
27816 // currently existing requests for type instantiation in new space.
27817 type ^= Object::Clone(orig: *this, space);
27818 type.set_nullability(value);
27819 type.SetHash(0);
27820 type.InitializeTypeTestingStubNonAtomic(
27821 stub: Code::Handle(ptr: TypeTestingStubGenerator::DefaultCodeForType(type)));
27822 if (IsCanonical()) {
27823 // Object::Clone does not clone canonical bit.
27824 ASSERT(!type.IsCanonical());
27825 type ^= type.Canonicalize(thread: Thread::Current());
27826 }
27827 return type.ptr();
27828}
27829
27830bool RecordType::IsEquivalent(
27831 const Instance& other,
27832 TypeEquality kind,
27833 FunctionTypeMapping* function_type_equivalence) const {
27834 ASSERT(!IsNull());
27835 if (ptr() == other.ptr()) {
27836 return true;
27837 }
27838 if (!other.IsRecordType()) {
27839 return false;
27840 }
27841 const RecordType& other_type = RecordType::Cast(obj: other);
27842 // Equal record types must have the same shape
27843 // (number of fields and named fields).
27844 if (shape() != other_type.shape()) {
27845 return false;
27846 }
27847 Thread* thread = Thread::Current();
27848 Zone* zone = thread->zone();
27849 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
27850 return false;
27851 }
27852 // Equal record types must have equal field types.
27853 AbstractType& field_type = Type::Handle(zone);
27854 AbstractType& other_field_type = Type::Handle(zone);
27855 const intptr_t num_fields = NumFields();
27856 for (intptr_t i = 0; i < num_fields; ++i) {
27857 field_type = FieldTypeAt(index: i);
27858 other_field_type = other_type.FieldTypeAt(index: i);
27859 if (!field_type.IsEquivalent(other: other_field_type, kind,
27860 function_type_equivalence)) {
27861 return false;
27862 }
27863 }
27864 return true;
27865}
27866
27867uword RecordType::ComputeHash() const {
27868 ASSERT(IsFinalized());
27869 uint32_t result = 0;
27870 // A legacy type should have the same hash as its non-nullable version to be
27871 // consistent with the definition of type equality in Dart code.
27872 Nullability type_nullability = nullability();
27873 if (type_nullability == Nullability::kLegacy) {
27874 type_nullability = Nullability::kNonNullable;
27875 }
27876 result = CombineHashes(hash: result, other_hash: static_cast<uint32_t>(type_nullability));
27877 result = CombineHashes(hash: result, other_hash: static_cast<uint32_t>(shape().AsInt()));
27878 AbstractType& type = AbstractType::Handle();
27879 const intptr_t num_fields = NumFields();
27880 for (intptr_t i = 0; i < num_fields; ++i) {
27881 type = FieldTypeAt(index: i);
27882 result = CombineHashes(hash: result, other_hash: type.Hash());
27883 }
27884 result = FinalizeHash(hash: result, hashbits: kHashBits);
27885 SetHash(result);
27886 return result;
27887}
27888
27889bool RecordType::RequireConstCanonicalTypeErasure(Zone* zone) const {
27890 if (IsNonNullable()) {
27891 return true;
27892 }
27893 if (IsLegacy()) {
27894 return false;
27895 }
27896 AbstractType& type = AbstractType::Handle();
27897 const intptr_t num_fields = NumFields();
27898 for (intptr_t i = 0; i < num_fields; ++i) {
27899 type = FieldTypeAt(index: i);
27900 if (type.RequireConstCanonicalTypeErasure(zone)) {
27901 return true;
27902 }
27903 }
27904 return false;
27905}
27906
27907AbstractTypePtr RecordType::Canonicalize(Thread* thread) const {
27908 ASSERT(IsFinalized());
27909 Zone* zone = thread->zone();
27910 AbstractType& type = AbstractType::Handle(zone);
27911 if (IsCanonical()) {
27912#ifdef DEBUG
27913 // Verify that all fields are allocated in old space and are canonical.
27914 ASSERT(Array::Handle(zone, field_types()).IsOld());
27915 const intptr_t num_fields = NumFields();
27916 for (intptr_t i = 0; i < num_fields; ++i) {
27917 type = FieldTypeAt(i);
27918 ASSERT(type.IsOld());
27919 ASSERT(type.IsCanonical());
27920 }
27921#endif
27922 return ptr();
27923 }
27924 auto isolate_group = thread->isolate_group();
27925 ObjectStore* object_store = isolate_group->object_store();
27926 RecordType& rec = RecordType::Handle(zone);
27927 {
27928 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27929 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27930 rec ^= table.GetOrNull(key: CanonicalRecordTypeKey(*this));
27931 ASSERT(object_store->canonical_record_types() == table.Release().ptr());
27932 }
27933 if (rec.IsNull()) {
27934 ASSERT(Array::Handle(zone, field_types()).IsOld());
27935 const intptr_t num_fields = NumFields();
27936 for (intptr_t i = 0; i < num_fields; ++i) {
27937 type = FieldTypeAt(index: i);
27938 if (!type.IsCanonical()) {
27939 type = type.Canonicalize(thread);
27940 SetFieldTypeAt(index: i, value: type);
27941 }
27942 }
27943 // Check to see if the record type got added to canonical table as part
27944 // of the canonicalization of its signature types.
27945 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27946 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27947 rec ^= table.GetOrNull(key: CanonicalRecordTypeKey(*this));
27948 if (rec.IsNull()) {
27949 // Add this record type into the canonical table of record types.
27950 if (this->IsNew()) {
27951 rec ^= Object::Clone(orig: *this, space: Heap::kOld);
27952 } else {
27953 rec = this->ptr();
27954 }
27955 ASSERT(rec.IsOld());
27956 rec.SetCanonical(); // Mark object as being canonical.
27957 bool present = table.Insert(key: rec);
27958 ASSERT(!present);
27959 }
27960 object_store->set_canonical_record_types(table.Release());
27961 }
27962 return rec.ptr();
27963}
27964
27965void RecordType::EnumerateURIs(URIs* uris) const {
27966 AbstractType& type = AbstractType::Handle();
27967 const intptr_t num_fields = NumFields();
27968 for (intptr_t i = 0; i < num_fields; ++i) {
27969 type = FieldTypeAt(index: i);
27970 type.EnumerateURIs(uris);
27971 }
27972}
27973
27974void RecordType::PrintName(NameVisibility name_visibility,
27975 BaseTextBuffer* printer) const {
27976 RecordType::Cast(obj: *this).Print(name_visibility, printer);
27977}
27978
27979AbstractTypePtr RecordType::InstantiateFrom(
27980 const TypeArguments& instantiator_type_arguments,
27981 const TypeArguments& function_type_arguments,
27982 intptr_t num_free_fun_type_params,
27983 Heap::Space space,
27984 FunctionTypeMapping* function_type_mapping,
27985 intptr_t num_parent_type_args_adjustment) const {
27986 ASSERT(IsFinalized());
27987 Zone* zone = Thread::Current()->zone();
27988
27989 const intptr_t num_fields = NumFields();
27990 const Array& old_field_types = Array::Handle(zone, ptr: field_types());
27991 const Array& new_field_types =
27992 Array::Handle(zone, ptr: Array::New(len: num_fields, space));
27993 AbstractType& type = AbstractType::Handle(zone);
27994 for (intptr_t i = 0; i < num_fields; ++i) {
27995 type ^= old_field_types.At(index: i);
27996 if (!type.IsInstantiated()) {
27997 type = type.InstantiateFrom(
27998 instantiator_type_arguments, function_type_arguments,
27999 num_free_fun_type_params, space, function_type_mapping,
28000 num_parent_type_args_adjustment);
28001 // A returned null type indicates a failed instantiation in dead code that
28002 // must be propagated up to the caller, the optimizing compiler.
28003 if (type.IsNull()) {
28004 return RecordType::null();
28005 }
28006 }
28007 new_field_types.SetAt(index: i, value: type);
28008 }
28009
28010 const auto& rec = RecordType::Handle(
28011 zone, ptr: RecordType::New(shape: shape(), field_types: new_field_types, nullability: nullability(), space));
28012
28013 rec.SetIsFinalized();
28014
28015 // Canonicalization is not part of instantiation.
28016 return rec.ptr();
28017}
28018
28019AbstractTypePtr RecordType::UpdateFunctionTypes(
28020 intptr_t num_parent_type_args_adjustment,
28021 intptr_t num_free_fun_type_params,
28022 Heap::Space space,
28023 FunctionTypeMapping* function_type_mapping) const {
28024 ASSERT(IsFinalized());
28025 ASSERT(num_parent_type_args_adjustment >= 0);
28026 Zone* zone = Thread::Current()->zone();
28027 const auto& types = Array::Handle(zone, ptr: field_types());
28028 Array* updated_types = nullptr;
28029 auto& type = AbstractType::Handle(zone);
28030 auto& updated = AbstractType::Handle(zone);
28031 for (intptr_t i = 0, n = NumFields(); i < n; ++i) {
28032 type ^= types.At(index: i);
28033 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
28034 num_free_fun_type_params, space,
28035 function_type_mapping);
28036 if (type.ptr() != updated.ptr()) {
28037 if (updated_types == nullptr) {
28038 updated_types = &Array::Handle(zone, ptr: Array::New(len: n, space));
28039 for (intptr_t j = 0; j < i; ++j) {
28040 type ^= types.At(index: j);
28041 updated_types->SetAt(index: j, value: type);
28042 }
28043 }
28044 }
28045 if (updated_types != nullptr) {
28046 updated_types->SetAt(index: i, value: updated);
28047 }
28048 }
28049 if (updated_types == nullptr) {
28050 return ptr();
28051 }
28052 const auto& new_rt = RecordType::Handle(
28053 zone, ptr: RecordType::New(shape: shape(), field_types: *updated_types, nullability: nullability(), space));
28054 new_rt.SetIsFinalized();
28055 return new_rt.ptr();
28056}
28057
28058bool RecordType::IsSubtypeOf(
28059 const RecordType& other,
28060 Heap::Space space,
28061 FunctionTypeMapping* function_type_equivalence) const {
28062 if (ptr() == other.ptr()) {
28063 return true;
28064 }
28065 ASSERT(IsFinalized());
28066 ASSERT(other.IsFinalized());
28067 const intptr_t num_fields = NumFields();
28068 if (shape() != other.shape()) {
28069 // Different number of fields or different named fields.
28070 return false;
28071 }
28072 Thread* const thread = Thread::Current();
28073 if (!IsNullabilityEquivalent(thread, other_type: other, kind: TypeEquality::kInSubtypeTest)) {
28074 return false;
28075 }
28076 // Check subtyping of record field types.
28077 Zone* const zone = thread->zone();
28078 AbstractType& field_type = Type::Handle(zone);
28079 AbstractType& other_field_type = Type::Handle(zone);
28080 for (intptr_t i = 0; i < num_fields; ++i) {
28081 field_type = FieldTypeAt(index: i);
28082 other_field_type = other.FieldTypeAt(index: i);
28083 if (!field_type.IsSubtypeOf(other: other_field_type, space,
28084 function_type_equivalence)) {
28085 return false;
28086 }
28087 }
28088 return true;
28089}
28090
28091RecordPtr Record::New(RecordShape shape, Heap::Space space) {
28092 const intptr_t num_fields = shape.num_fields();
28093 ASSERT(num_fields >= 0);
28094 auto raw = Object::Allocate<Record>(space, elements: num_fields);
28095 NoSafepointScope no_safepoint;
28096 raw->untag()->set_shape(shape.AsSmi());
28097 return raw;
28098}
28099
28100const char* Record::ToCString() const {
28101 if (IsNull()) {
28102 return "Record: null";
28103 }
28104 Thread* thread = Thread::Current();
28105 Zone* zone = thread->zone();
28106 ZoneTextBuffer printer(zone);
28107 const intptr_t num_fields = this->num_fields();
28108 const Array& field_names = Array::Handle(zone, ptr: GetFieldNames(thread));
28109 const intptr_t num_positional_fields = num_fields - field_names.Length();
28110 Object& obj = Object::Handle(zone);
28111 printer.AddString(s: "Record (");
28112 for (intptr_t i = 0; i < num_fields; ++i) {
28113 if (i != 0) {
28114 printer.AddString(s: ", ");
28115 }
28116 if (i >= num_positional_fields) {
28117 obj = field_names.At(index: i - num_positional_fields);
28118 printer.AddString(s: obj.ToCString());
28119 printer.AddString(s: ": ");
28120 }
28121 obj = FieldAt(field_index: i);
28122 printer.AddString(s: obj.ToCString());
28123 }
28124 printer.AddString(s: ")");
28125 return printer.buffer();
28126}
28127
28128bool Record::CanonicalizeEquals(const Instance& other) const {
28129 if (this->ptr() == other.ptr()) {
28130 return true;
28131 }
28132
28133 if (!other.IsRecord() || other.IsNull()) {
28134 return false;
28135 }
28136
28137 const Record& other_rec = Record::Cast(obj: other);
28138 if (shape() != other_rec.shape()) {
28139 return false;
28140 }
28141
28142 const intptr_t num_fields = this->num_fields();
28143 for (intptr_t i = 0; i < num_fields; ++i) {
28144 if (this->FieldAt(field_index: i) != other_rec.FieldAt(field_index: i)) {
28145 return false;
28146 }
28147 }
28148 return true;
28149}
28150
28151uint32_t Record::CanonicalizeHash() const {
28152 Thread* thread = Thread::Current();
28153 uint32_t hash = thread->heap()->GetCanonicalHash(raw_obj: ptr());
28154 if (hash != 0) {
28155 return hash;
28156 }
28157 hash = shape().AsInt();
28158 Instance& element = Instance::Handle();
28159 const intptr_t num_fields = this->num_fields();
28160 for (intptr_t i = 0; i < num_fields; ++i) {
28161 element ^= FieldAt(field_index: i);
28162 hash = CombineHashes(hash, other_hash: element.CanonicalizeHash());
28163 }
28164 hash = FinalizeHash(hash, hashbits: kHashBits);
28165 thread->heap()->SetCanonicalHash(raw_obj: ptr(), hash);
28166 return hash;
28167}
28168
28169void Record::CanonicalizeFieldsLocked(Thread* thread) const {
28170 Zone* zone = thread->zone();
28171 Instance& obj = Instance::Handle(zone);
28172 const intptr_t num_fields = this->num_fields();
28173 for (intptr_t i = 0; i < num_fields; ++i) {
28174 obj ^= FieldAt(field_index: i);
28175 obj = obj.CanonicalizeLocked(thread);
28176 SetFieldAt(field_index: i, value: obj);
28177 }
28178}
28179
28180RecordTypePtr Record::GetRecordType() const {
28181 Zone* const zone = Thread::Current()->zone();
28182 const intptr_t num_fields = this->num_fields();
28183 const Array& field_types =
28184 Array::Handle(zone, ptr: Array::New(len: num_fields, space: Heap::kOld));
28185 Instance& obj = Instance::Handle(zone);
28186 AbstractType& type = AbstractType::Handle(zone);
28187 for (intptr_t i = 0; i < num_fields; ++i) {
28188 obj ^= FieldAt(field_index: i);
28189 type = obj.GetType(space: Heap::kNew);
28190 field_types.SetAt(index: i, value: type);
28191 }
28192 type = RecordType::New(shape: shape(), field_types, nullability: Nullability::kNonNullable);
28193 type = ClassFinalizer::FinalizeType(type);
28194 return RecordType::Cast(obj: type).ptr();
28195}
28196
28197intptr_t Record::GetPositionalFieldIndexFromFieldName(
28198 const String& field_name) {
28199 if (field_name.IsOneByteString() && field_name.Length() >= 1 &&
28200 field_name.CharAt(index: 0) == '$') {
28201 int64_t value = 0;
28202 const char* cstr = field_name.ToCString();
28203 if (OS::StringToInt64(str: cstr + 1 /* skip '$' */, value: &value)) {
28204 if (value >= 1 && value < kMaxElements) {
28205 return static_cast<intptr_t>(value - 1);
28206 }
28207 }
28208 }
28209 return -1;
28210}
28211
28212intptr_t Record::GetFieldIndexByName(Thread* thread,
28213 const String& field_name) const {
28214 ASSERT(field_name.IsSymbol());
28215 const intptr_t field_index =
28216 Record::GetPositionalFieldIndexFromFieldName(field_name);
28217 const Array& field_names = Array::Handle(ptr: GetFieldNames(thread));
28218 const intptr_t num_positional_fields = num_fields() - field_names.Length();
28219 if ((field_index >= 0) && (field_index < num_positional_fields)) {
28220 return field_index;
28221 } else {
28222 for (intptr_t i = 0, n = field_names.Length(); i < n; ++i) {
28223 if (field_names.At(index: i) == field_name.ptr()) {
28224 return num_positional_fields + i;
28225 }
28226 }
28227 }
28228 return -1;
28229}
28230
28231class RecordFieldNamesMapTraits {
28232 public:
28233 static const char* Name() { return "RecordFieldNamesMapTraits"; }
28234 static bool ReportStats() { return false; }
28235
28236 static bool IsMatch(const Object& a, const Object& b) {
28237 return Array::Cast(obj: a).CanonicalizeEquals(other: Array::Cast(obj: b));
28238 }
28239
28240 static uword Hash(const Object& key) {
28241 return Array::Cast(obj: key).CanonicalizeHash();
28242 }
28243
28244 static ObjectPtr NewKey(const Array& arr) { return arr.ptr(); }
28245};
28246typedef UnorderedHashMap<RecordFieldNamesMapTraits> RecordFieldNamesMap;
28247
28248RecordShape RecordShape::Register(Thread* thread,
28249 intptr_t num_fields,
28250 const Array& field_names) {
28251 ASSERT(!field_names.IsNull());
28252 ASSERT(field_names.IsImmutable());
28253 ASSERT(field_names.ptr() == Object::empty_array().ptr() ||
28254 field_names.Length() > 0);
28255
28256 Zone* zone = thread->zone();
28257 IsolateGroup* isolate_group = thread->isolate_group();
28258 ObjectStore* object_store = isolate_group->object_store();
28259
28260 if (object_store->record_field_names<std::memory_order_acquire>() ==
28261 Array::null()) {
28262 // First-time initialization.
28263 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
28264 if (object_store->record_field_names() == Array::null()) {
28265 // Reserve record field names index 0 for records without named fields.
28266 RecordFieldNamesMap map(
28267 HashTables::New<RecordFieldNamesMap>(initial_capacity: 16, space: Heap::kOld));
28268 map.InsertOrGetValue(key: Object::empty_array(),
28269 value_if_absent: Smi::Handle(zone, ptr: Smi::New(value: 0)));
28270 ASSERT(map.NumOccupied() == 1);
28271 object_store->set_record_field_names_map(map.Release());
28272 const auto& table = Array::Handle(zone, ptr: Array::New(len: 16));
28273 table.SetAt(index: 0, value: Object::empty_array());
28274 object_store->set_record_field_names<std::memory_order_release>(table);
28275 }
28276 }
28277
28278#if defined(DART_PRECOMPILER)
28279 const intptr_t kMaxNumFields = compiler::target::RecordShape::kMaxNumFields;
28280 const intptr_t kMaxFieldNamesIndex =
28281 compiler::target::RecordShape::kMaxFieldNamesIndex;
28282#else
28283 const intptr_t kMaxNumFields = RecordShape::kMaxNumFields;
28284 const intptr_t kMaxFieldNamesIndex = RecordShape::kMaxFieldNamesIndex;
28285#endif
28286
28287 if (num_fields > kMaxNumFields) {
28288 FATAL("Too many record fields");
28289 }
28290 if (field_names.ptr() == Object::empty_array().ptr()) {
28291 return RecordShape::ForUnnamed(num_fields);
28292 }
28293
28294 {
28295 SafepointReadRwLocker ml(thread, isolate_group->program_lock());
28296 RecordFieldNamesMap map(object_store->record_field_names_map());
28297 Smi& index = Smi::Handle(zone);
28298 index ^= map.GetOrNull(key: field_names);
28299 ASSERT(map.Release().ptr() == object_store->record_field_names_map());
28300 if (!index.IsNull()) {
28301 return RecordShape(num_fields, index.Value());
28302 }
28303 }
28304
28305 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
28306 RecordFieldNamesMap map(object_store->record_field_names_map());
28307 const intptr_t new_index = map.NumOccupied();
28308 if (new_index > kMaxFieldNamesIndex) {
28309 FATAL("Too many record shapes");
28310 }
28311
28312 const intptr_t index = Smi::Value(raw_smi: Smi::RawCast(raw: map.InsertOrGetValue(
28313 key: field_names, value_if_absent: Smi::Handle(zone, ptr: Smi::New(value: new_index)))));
28314 ASSERT(index > 0);
28315
28316 if (index == new_index) {
28317 ASSERT(map.NumOccupied() == (new_index + 1));
28318 Array& table = Array::Handle(zone, ptr: object_store->record_field_names());
28319 intptr_t capacity = table.Length();
28320 if (index >= table.Length()) {
28321 capacity = capacity + (capacity >> 2);
28322 table = Array::Grow(source: table, new_length: capacity);
28323 object_store->set_record_field_names(table);
28324 }
28325 table.SetAt(index, value: field_names);
28326 } else {
28327 ASSERT(index < new_index);
28328 }
28329 object_store->set_record_field_names_map(map.Release());
28330
28331 const RecordShape shape(num_fields, index);
28332 ASSERT(shape.GetFieldNames(thread) == field_names.ptr());
28333 ASSERT(shape.num_fields() == num_fields);
28334 return shape;
28335}
28336
28337ArrayPtr RecordShape::GetFieldNames(Thread* thread) const {
28338 ObjectStore* object_store = thread->isolate_group()->object_store();
28339 Array& table =
28340 Array::Handle(zone: thread->zone(), ptr: object_store->record_field_names());
28341 ASSERT(!table.IsNull());
28342 return Array::RawCast(raw: table.At(index: field_names_index()));
28343}
28344
28345} // namespace dart
28346

Provided by KDAB

Privacy Policy
Learn more about Flutter for embedded and desktop on industrialflutter.com

source code of flutter_engine/third_party/dart/runtime/vm/object.cc