1//===-- lib/runtime/assign.cpp ----------------------------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "flang/Runtime/assign.h"
10#include "flang-rt/runtime/assign-impl.h"
11#include "flang-rt/runtime/derived.h"
12#include "flang-rt/runtime/descriptor.h"
13#include "flang-rt/runtime/stat.h"
14#include "flang-rt/runtime/terminator.h"
15#include "flang-rt/runtime/tools.h"
16#include "flang-rt/runtime/type-info.h"
17#include "flang-rt/runtime/work-queue.h"
18
19namespace Fortran::runtime {
20
21// Predicate: is the left-hand side of an assignment an allocated allocatable
22// that must be deallocated?
23static inline RT_API_ATTRS bool MustDeallocateLHS(
24 Descriptor &to, const Descriptor &from, Terminator &terminator, int flags) {
25 // Top-level assignments to allocatable variables (*not* components)
26 // may first deallocate existing content if there's about to be a
27 // change in type or shape; see F'2018 10.2.1.3(3).
28 if (!(flags & MaybeReallocate)) {
29 return false;
30 }
31 if (!to.IsAllocatable() || !to.IsAllocated()) {
32 return false;
33 }
34 if (to.type() != from.type()) {
35 return true;
36 }
37 if (!(flags & ExplicitLengthCharacterLHS) && to.type().IsCharacter() &&
38 to.ElementBytes() != from.ElementBytes()) {
39 return true;
40 }
41 if (flags & PolymorphicLHS) {
42 DescriptorAddendum *toAddendum{to.Addendum()};
43 const typeInfo::DerivedType *toDerived{
44 toAddendum ? toAddendum->derivedType() : nullptr};
45 const DescriptorAddendum *fromAddendum{from.Addendum()};
46 const typeInfo::DerivedType *fromDerived{
47 fromAddendum ? fromAddendum->derivedType() : nullptr};
48 if (toDerived != fromDerived) {
49 return true;
50 }
51 if (fromDerived) {
52 // Distinct LEN parameters? Deallocate
53 std::size_t lenParms{fromDerived->LenParameters()};
54 for (std::size_t j{0}; j < lenParms; ++j) {
55 if (toAddendum->LenParameterValue(j) !=
56 fromAddendum->LenParameterValue(j)) {
57 return true;
58 }
59 }
60 }
61 }
62 if (from.rank() > 0) {
63 // Distinct shape? Deallocate
64 int rank{to.rank()};
65 for (int j{0}; j < rank; ++j) {
66 const auto &toDim{to.GetDimension(j)};
67 const auto &fromDim{from.GetDimension(j)};
68 if (toDim.Extent() != fromDim.Extent()) {
69 return true;
70 }
71 if ((flags & UpdateLHSBounds) &&
72 toDim.LowerBound() != fromDim.LowerBound()) {
73 return true;
74 }
75 }
76 }
77 // Not reallocating; may have to update bounds
78 if (flags & UpdateLHSBounds) {
79 int rank{to.rank()};
80 for (int j{0}; j < rank; ++j) {
81 to.GetDimension(j).SetLowerBound(from.GetDimension(j).LowerBound());
82 }
83 }
84 return false;
85}
86
87// Utility: allocate the allocatable left-hand side, either because it was
88// originally deallocated or because it required reallocation
89static RT_API_ATTRS int AllocateAssignmentLHS(
90 Descriptor &to, const Descriptor &from, Terminator &terminator, int flags) {
91 to.raw().type = from.raw().type;
92 if (!(flags & ExplicitLengthCharacterLHS)) {
93 to.raw().elem_len = from.ElementBytes();
94 }
95 const typeInfo::DerivedType *derived{nullptr};
96 DescriptorAddendum *toAddendum{to.Addendum()};
97 if (const DescriptorAddendum * fromAddendum{from.Addendum()}) {
98 derived = fromAddendum->derivedType();
99 if (toAddendum) {
100 toAddendum->set_derivedType(derived);
101 std::size_t lenParms{derived ? derived->LenParameters() : 0};
102 for (std::size_t j{0}; j < lenParms; ++j) {
103 toAddendum->SetLenParameterValue(j, fromAddendum->LenParameterValue(j));
104 }
105 }
106 } else if (toAddendum) {
107 toAddendum->set_derivedType(nullptr);
108 }
109 // subtle: leave bounds in place when "from" is scalar (10.2.1.3(3))
110 int rank{from.rank()};
111 auto stride{static_cast<SubscriptValue>(to.ElementBytes())};
112 for (int j{0}; j < rank; ++j) {
113 auto &toDim{to.GetDimension(j)};
114 const auto &fromDim{from.GetDimension(j)};
115 toDim.SetBounds(fromDim.LowerBound(), fromDim.UpperBound());
116 toDim.SetByteStride(stride);
117 stride *= toDim.Extent();
118 }
119 return ReturnError(terminator, to.Allocate(kNoAsyncObject));
120}
121
122// least <= 0, most >= 0
123static RT_API_ATTRS void MaximalByteOffsetRange(
124 const Descriptor &desc, std::int64_t &least, std::int64_t &most) {
125 least = most = 0;
126 if (desc.ElementBytes() == 0) {
127 return;
128 }
129 int n{desc.raw().rank};
130 for (int j{0}; j < n; ++j) {
131 const auto &dim{desc.GetDimension(j)};
132 auto extent{dim.Extent()};
133 if (extent > 0) {
134 auto sm{dim.ByteStride()};
135 if (sm < 0) {
136 least += (extent - 1) * sm;
137 } else {
138 most += (extent - 1) * sm;
139 }
140 }
141 }
142 most += desc.ElementBytes() - 1;
143}
144
145static inline RT_API_ATTRS bool RangesOverlap(const char *aStart,
146 const char *aEnd, const char *bStart, const char *bEnd) {
147 return aEnd >= bStart && bEnd >= aStart;
148}
149
150// Predicate: could the left-hand and right-hand sides of the assignment
151// possibly overlap in memory? Note that the descriptors themeselves
152// are included in the test.
153static RT_API_ATTRS bool MayAlias(const Descriptor &x, const Descriptor &y) {
154 const char *xBase{x.OffsetElement()};
155 const char *yBase{y.OffsetElement()};
156 if (!xBase || !yBase) {
157 return false; // not both allocated
158 }
159 const char *xDesc{reinterpret_cast<const char *>(&x)};
160 const char *xDescLast{xDesc + x.SizeInBytes() - 1};
161 const char *yDesc{reinterpret_cast<const char *>(&y)};
162 const char *yDescLast{yDesc + y.SizeInBytes() - 1};
163 std::int64_t xLeast, xMost, yLeast, yMost;
164 MaximalByteOffsetRange(x, xLeast, xMost);
165 MaximalByteOffsetRange(y, yLeast, yMost);
166 if (RangesOverlap(xDesc, xDescLast, yBase + yLeast, yBase + yMost) ||
167 RangesOverlap(yDesc, yDescLast, xBase + xLeast, xBase + xMost)) {
168 // A descriptor overlaps with the storage described by the other;
169 // this can arise when an allocatable or pointer component is
170 // being assigned to/from.
171 return true;
172 }
173 if (!RangesOverlap(
174 xBase + xLeast, xBase + xMost, yBase + yLeast, yBase + yMost)) {
175 return false; // no storage overlap
176 }
177 // TODO: check dimensions: if any is independent, return false
178 return true;
179}
180
181static RT_API_ATTRS void DoScalarDefinedAssignment(const Descriptor &to,
182 const Descriptor &from, const typeInfo::DerivedType &derived,
183 const typeInfo::SpecialBinding &special) {
184 bool toIsDesc{special.IsArgDescriptor(0)};
185 bool fromIsDesc{special.IsArgDescriptor(1)};
186 const auto *bindings{
187 derived.binding().OffsetElement<const typeInfo::Binding>()};
188 if (toIsDesc) {
189 if (fromIsDesc) {
190 auto *p{special.GetProc<void (*)(const Descriptor &, const Descriptor &)>(
191 bindings)};
192 p(to, from);
193 } else {
194 auto *p{special.GetProc<void (*)(const Descriptor &, void *)>(bindings)};
195 p(to, from.raw().base_addr);
196 }
197 } else {
198 if (fromIsDesc) {
199 auto *p{special.GetProc<void (*)(void *, const Descriptor &)>(bindings)};
200 p(to.raw().base_addr, from);
201 } else {
202 auto *p{special.GetProc<void (*)(void *, void *)>(bindings)};
203 p(to.raw().base_addr, from.raw().base_addr);
204 }
205 }
206}
207
208static RT_API_ATTRS void DoElementalDefinedAssignment(const Descriptor &to,
209 const Descriptor &from, const typeInfo::DerivedType &derived,
210 const typeInfo::SpecialBinding &special) {
211 SubscriptValue toAt[maxRank], fromAt[maxRank];
212 to.GetLowerBounds(toAt);
213 from.GetLowerBounds(fromAt);
214 StaticDescriptor<maxRank, true, 8 /*?*/> statDesc[2];
215 Descriptor &toElementDesc{statDesc[0].descriptor()};
216 Descriptor &fromElementDesc{statDesc[1].descriptor()};
217 toElementDesc.Establish(derived, nullptr, 0, nullptr, CFI_attribute_pointer);
218 fromElementDesc.Establish(
219 derived, nullptr, 0, nullptr, CFI_attribute_pointer);
220 for (std::size_t toElements{to.InlineElements()}; toElements-- > 0;
221 to.IncrementSubscripts(toAt), from.IncrementSubscripts(fromAt)) {
222 toElementDesc.set_base_addr(to.Element<char>(toAt));
223 fromElementDesc.set_base_addr(from.Element<char>(fromAt));
224 DoScalarDefinedAssignment(toElementDesc, fromElementDesc, derived, special);
225 }
226}
227
228template <typename CHAR>
229static RT_API_ATTRS void BlankPadCharacterAssignment(Descriptor &to,
230 const Descriptor &from, SubscriptValue toAt[], SubscriptValue fromAt[],
231 std::size_t elements, std::size_t toElementBytes,
232 std::size_t fromElementBytes) {
233 std::size_t padding{(toElementBytes - fromElementBytes) / sizeof(CHAR)};
234 std::size_t copiedCharacters{fromElementBytes / sizeof(CHAR)};
235 for (; elements-- > 0;
236 to.IncrementSubscripts(toAt), from.IncrementSubscripts(fromAt)) {
237 CHAR *p{to.Element<CHAR>(toAt)};
238 Fortran::runtime::memmove(
239 p, from.Element<std::add_const_t<CHAR>>(fromAt), fromElementBytes);
240 p += copiedCharacters;
241 for (auto n{padding}; n-- > 0;) {
242 *p++ = CHAR{' '};
243 }
244 }
245}
246
247RT_OFFLOAD_API_GROUP_BEGIN
248
249// Common implementation of assignments, both intrinsic assignments and
250// those cases of polymorphic user-defined ASSIGNMENT(=) TBPs that could not
251// be resolved in semantics. Most assignment statements do not need any
252// of the capabilities of this function -- but when the LHS is allocatable,
253// the type might have a user-defined ASSIGNMENT(=), or the type might be
254// finalizable, this function should be used.
255// When "to" is not a whole allocatable, "from" is an array, and defined
256// assignments are not used, "to" and "from" only need to have the same number
257// of elements, but their shape need not to conform (the assignment is done in
258// element sequence order). This facilitates some internal usages, like when
259// dealing with array constructors.
260RT_API_ATTRS void Assign(Descriptor &to, const Descriptor &from,
261 Terminator &terminator, int flags, MemmoveFct memmoveFct) {
262 WorkQueue workQueue{terminator};
263 if (workQueue.BeginAssign(to, from, flags, memmoveFct, nullptr) ==
264 StatContinue) {
265 workQueue.Run();
266 }
267}
268
269RT_API_ATTRS int AssignTicket::Begin(WorkQueue &workQueue) {
270 bool mustDeallocateLHS{(flags_ & DeallocateLHS) ||
271 MustDeallocateLHS(to_, *from_, workQueue.terminator(), flags_)};
272 DescriptorAddendum *toAddendum{to_.Addendum()};
273 toDerived_ = toAddendum ? toAddendum->derivedType() : nullptr;
274 if (toDerived_ && (flags_ & NeedFinalization) &&
275 toDerived_->noFinalizationNeeded()) {
276 flags_ &= ~NeedFinalization;
277 }
278 if (MayAlias(to_, *from_)) {
279 if (mustDeallocateLHS) {
280 // Convert the LHS into a temporary, then make it look deallocated.
281 toDeallocate_ = &tempDescriptor_.descriptor();
282 persist_ = true; // tempDescriptor_ state must outlive child tickets
283 std::memcpy(
284 reinterpret_cast<void *>(toDeallocate_), &to_, to_.SizeInBytes());
285 to_.set_base_addr(nullptr);
286 if (toDerived_ && (flags_ & NeedFinalization)) {
287 if (int status{workQueue.BeginFinalize(*toDeallocate_, *toDerived_)};
288 status != StatOk && status != StatContinue) {
289 return status;
290 }
291 flags_ &= ~NeedFinalization;
292 }
293 } else if (!IsSimpleMemmove()) {
294 // Handle LHS/RHS aliasing by copying RHS into a temp, then
295 // recursively assigning from that temp.
296 auto descBytes{from_->SizeInBytes()};
297 Descriptor &newFrom{tempDescriptor_.descriptor()};
298 persist_ = true; // tempDescriptor_ state must outlive child tickets
299 std::memcpy(reinterpret_cast<void *>(&newFrom), from_, descBytes);
300 // Pretend the temporary descriptor is for an ALLOCATABLE
301 // entity, otherwise, the Deallocate() below will not
302 // free the descriptor memory.
303 newFrom.raw().attribute = CFI_attribute_allocatable;
304 if (int stat{ReturnError(
305 workQueue.terminator(), newFrom.Allocate(kNoAsyncObject))};
306 stat != StatOk) {
307 return stat;
308 }
309 if (HasDynamicComponent(*from_)) {
310 // If 'from' has allocatable/automatic component, we cannot
311 // just make a shallow copy of the descriptor member.
312 // This will still leave data overlap in 'to' and 'newFrom'.
313 // For example:
314 // type t
315 // character, allocatable :: c(:)
316 // end type t
317 // type(t) :: x(3)
318 // x(2:3) = x(1:2)
319 // We have to make a deep copy into 'newFrom' in this case.
320 if (const DescriptorAddendum *addendum{newFrom.Addendum()}) {
321 if (const auto *derived{addendum->derivedType()}) {
322 if (!derived->noInitializationNeeded()) {
323 if (int status{workQueue.BeginInitialize(newFrom, *derived)};
324 status != StatOk && status != StatContinue) {
325 return status;
326 }
327 }
328 }
329 }
330 static constexpr int nestedFlags{MaybeReallocate | PolymorphicLHS};
331 if (int status{workQueue.BeginAssign(
332 newFrom, *from_, nestedFlags, memmoveFct_, nullptr)};
333 status != StatOk && status != StatContinue) {
334 return status;
335 }
336 } else {
337 ShallowCopy(newFrom, *from_, true, from_->IsContiguous());
338 }
339 from_ = &newFrom; // this is why from_ has to be a pointer
340 flags_ &= NeedFinalization | ComponentCanBeDefinedAssignment |
341 ExplicitLengthCharacterLHS | CanBeDefinedAssignment;
342 toDeallocate_ = &newFrom;
343 }
344 }
345 if (to_.IsAllocatable()) {
346 if (mustDeallocateLHS) {
347 if (!toDeallocate_ && to_.IsAllocated()) {
348 toDeallocate_ = &to_;
349 }
350 } else if (to_.rank() != from_->rank() && !to_.IsAllocated()) {
351 workQueue.terminator().Crash("Assign: mismatched ranks (%d != %d) in "
352 "assignment to unallocated allocatable",
353 to_.rank(), from_->rank());
354 }
355 } else if (!to_.IsAllocated()) {
356 workQueue.terminator().Crash(
357 "Assign: left-hand side variable is neither allocated nor allocatable");
358 }
359 if (toDerived_ && to_.IsAllocated()) {
360 // Schedule finalization or destruction of the LHS.
361 if (flags_ & NeedFinalization) {
362 if (int status{workQueue.BeginFinalize(to_, *toDerived_)};
363 status != StatOk && status != StatContinue) {
364 return status;
365 }
366 } else if (!toDerived_->noDestructionNeeded()) {
367 if (int status{
368 workQueue.BeginDestroy(to_, *toDerived_, /*finalize=*/false)};
369 status != StatOk && status != StatContinue) {
370 return status;
371 }
372 }
373 }
374 return StatContinue;
375}
376
377RT_API_ATTRS int AssignTicket::Continue(WorkQueue &workQueue) {
378 if (done_) {
379 // All child tickets are complete; can release this ticket's state.
380 if (toDeallocate_) {
381 toDeallocate_->Deallocate();
382 }
383 return StatOk;
384 }
385 // All necessary finalization or destruction that was initiated by Begin()
386 // has been completed. Deallocation may be pending, and if it's for the LHS,
387 // do it now so that the LHS gets reallocated.
388 if (toDeallocate_ == &to_) {
389 toDeallocate_ = nullptr;
390 to_.Deallocate();
391 }
392 // Allocate the LHS if needed
393 if (!to_.IsAllocated()) {
394 if (int stat{
395 AllocateAssignmentLHS(to_, *from_, workQueue.terminator(), flags_)};
396 stat != StatOk) {
397 return stat;
398 }
399 const auto *addendum{to_.Addendum()};
400 toDerived_ = addendum ? addendum->derivedType() : nullptr;
401 if (toDerived_) {
402 if (!toDerived_->noInitializationNeeded()) {
403 if (int status{workQueue.BeginInitialize(to_, *toDerived_)};
404 status != StatOk) {
405 return status;
406 }
407 }
408 }
409 }
410 // Check for a user-defined assignment type-bound procedure;
411 // see 10.2.1.4-5.
412 // Note that the aliasing and LHS (re)allocation handling above
413 // needs to run even with CanBeDefinedAssignment flag, since
414 // Assign() can be invoked recursively for component-wise assignments.
415 // The declared type (if known) must be used for generic resolution
416 // of ASSIGNMENT(=) to a binding, but that binding can be overridden.
417 if (declaredType_ && (flags_ & CanBeDefinedAssignment)) {
418 if (to_.rank() == 0) {
419 if (const auto *special{declaredType_->FindSpecialBinding(
420 typeInfo::SpecialBinding::Which::ScalarAssignment)}) {
421 DoScalarDefinedAssignment(to_, *from_, *toDerived_, *special);
422 done_ = true;
423 return StatContinue;
424 }
425 }
426 if (const auto *special{declaredType_->FindSpecialBinding(
427 typeInfo::SpecialBinding::Which::ElementalAssignment)}) {
428 DoElementalDefinedAssignment(to_, *from_, *toDerived_, *special);
429 done_ = true;
430 return StatContinue;
431 }
432 }
433 // Intrinsic assignment
434 std::size_t toElements{to_.InlineElements()};
435 if (from_->rank() > 0) {
436 std::size_t fromElements{from_->InlineElements()};
437 if (toElements != fromElements) {
438 workQueue.terminator().Crash("Assign: mismatching element counts in "
439 "array assignment (to %zd, from %zd)",
440 toElements, fromElements);
441 }
442 }
443 if (to_.type() != from_->type()) {
444 workQueue.terminator().Crash(
445 "Assign: mismatching types (to code %d != from code %d)",
446 to_.type().raw(), from_->type().raw());
447 }
448 std::size_t toElementBytes{to_.ElementBytes()};
449 std::size_t fromElementBytes{from_->ElementBytes()};
450 if (toElementBytes > fromElementBytes && !to_.type().IsCharacter()) {
451 workQueue.terminator().Crash("Assign: mismatching non-character element "
452 "sizes (to %zd bytes != from %zd bytes)",
453 toElementBytes, fromElementBytes);
454 }
455 if (toDerived_) {
456 if (toDerived_->noDefinedAssignment()) { // componentwise
457 if (int status{workQueue.BeginDerivedAssign<true>(
458 to_, *from_, *toDerived_, flags_, memmoveFct_, toDeallocate_)};
459 status != StatOk && status != StatContinue) {
460 return status;
461 }
462 } else { // elementwise
463 if (int status{workQueue.BeginDerivedAssign<false>(
464 to_, *from_, *toDerived_, flags_, memmoveFct_, toDeallocate_)};
465 status != StatOk && status != StatContinue) {
466 return status;
467 }
468 }
469 toDeallocate_ = nullptr;
470 } else if (IsSimpleMemmove()) {
471 memmoveFct_(to_.raw().base_addr, from_->raw().base_addr,
472 toElements * toElementBytes);
473 } else {
474 // Scalar expansion of the RHS is implied by using the same empty
475 // subscript values on each (seemingly) elemental reference into
476 // "from".
477 SubscriptValue toAt[maxRank];
478 to_.GetLowerBounds(toAt);
479 SubscriptValue fromAt[maxRank];
480 from_->GetLowerBounds(fromAt);
481 if (toElementBytes > fromElementBytes) { // blank padding
482 switch (to_.type().raw()) {
483 case CFI_type_signed_char:
484 case CFI_type_char:
485 BlankPadCharacterAssignment<char>(to_, *from_, toAt, fromAt, toElements,
486 toElementBytes, fromElementBytes);
487 break;
488 case CFI_type_char16_t:
489 BlankPadCharacterAssignment<char16_t>(to_, *from_, toAt, fromAt,
490 toElements, toElementBytes, fromElementBytes);
491 break;
492 case CFI_type_char32_t:
493 BlankPadCharacterAssignment<char32_t>(to_, *from_, toAt, fromAt,
494 toElements, toElementBytes, fromElementBytes);
495 break;
496 default:
497 workQueue.terminator().Crash(
498 "unexpected type code %d in blank padded Assign()",
499 to_.type().raw());
500 }
501 } else { // elemental copies, possibly with character truncation
502 for (std::size_t n{toElements}; n-- > 0;
503 to_.IncrementSubscripts(toAt), from_->IncrementSubscripts(fromAt)) {
504 memmoveFct_(to_.Element<char>(toAt), from_->Element<const char>(fromAt),
505 toElementBytes);
506 }
507 }
508 }
509 if (persist_) {
510 done_ = true;
511 return StatContinue;
512 } else {
513 if (toDeallocate_) {
514 toDeallocate_->Deallocate();
515 toDeallocate_ = nullptr;
516 }
517 return StatOk;
518 }
519}
520
521template <bool IS_COMPONENTWISE>
522RT_API_ATTRS int DerivedAssignTicket<IS_COMPONENTWISE>::Begin(
523 WorkQueue &workQueue) {
524 if (toIsContiguous_ && fromIsContiguous_ &&
525 this->derived_.noDestructionNeeded() &&
526 this->derived_.noDefinedAssignment() &&
527 this->instance_.rank() == this->from_->rank()) {
528 if (std::size_t elementBytes{this->instance_.ElementBytes()};
529 elementBytes == this->from_->ElementBytes()) {
530 // Fastest path. Both LHS and RHS are contiguous, RHS is not a scalar
531 // to be expanded, the types have the same size, and there are no
532 // allocatable components or defined ASSIGNMENT(=) at any level.
533 memmoveFct_(this->instance_.template OffsetElement<char>(),
534 this->from_->template OffsetElement<const char *>(),
535 this->instance_.InlineElements() * elementBytes);
536 return StatOk;
537 }
538 }
539 // Use PolymorphicLHS for components so that the right things happen
540 // when the components are polymorphic; when they're not, they're both
541 // not, and their declared types will match.
542 int nestedFlags{MaybeReallocate | PolymorphicLHS};
543 if (flags_ & ComponentCanBeDefinedAssignment) {
544 nestedFlags |= CanBeDefinedAssignment | ComponentCanBeDefinedAssignment;
545 }
546 flags_ = nestedFlags;
547 // Copy procedure pointer components
548 const Descriptor &procPtrDesc{this->derived_.procPtr()};
549 bool noDataComponents{this->IsComplete()};
550 if (std::size_t numProcPtrs{procPtrDesc.InlineElements()}) {
551 for (std::size_t k{0}; k < numProcPtrs; ++k) {
552 const auto &procPtr{
553 *procPtrDesc.ZeroBasedIndexedElement<typeInfo::ProcPtrComponent>(k)};
554 // Loop only over elements
555 if (k > 0) {
556 Elementwise::Reset();
557 }
558 for (; !Elementwise::IsComplete(); Elementwise::Advance()) {
559 memmoveFct_(this->instance_.template ElementComponent<char>(
560 this->subscripts_, procPtr.offset),
561 this->from_->template ElementComponent<const char>(
562 this->fromSubscripts_, procPtr.offset),
563 sizeof(typeInfo::ProcedurePointer));
564 }
565 }
566 if (noDataComponents) {
567 return StatOk;
568 }
569 Elementwise::Reset();
570 }
571 if (noDataComponents) {
572 return StatOk;
573 }
574 return StatContinue;
575}
576template RT_API_ATTRS int DerivedAssignTicket<false>::Begin(WorkQueue &);
577template RT_API_ATTRS int DerivedAssignTicket<true>::Begin(WorkQueue &);
578
579template <bool IS_COMPONENTWISE>
580RT_API_ATTRS int DerivedAssignTicket<IS_COMPONENTWISE>::Continue(
581 WorkQueue &workQueue) {
582 while (!this->IsComplete()) {
583 // Copy the data components (incl. the parent) first.
584 switch (this->component_->genre()) {
585 case typeInfo::Component::Genre::Data:
586 if (this->component_->category() == TypeCategory::Derived) {
587 Descriptor &toCompDesc{this->componentDescriptor_.descriptor()};
588 Descriptor &fromCompDesc{this->fromComponentDescriptor_.descriptor()};
589 this->component_->CreatePointerDescriptor(toCompDesc, this->instance_,
590 workQueue.terminator(), this->subscripts_);
591 this->component_->CreatePointerDescriptor(fromCompDesc, *this->from_,
592 workQueue.terminator(), this->fromSubscripts_);
593 const auto *componentDerived{this->component_->derivedType()};
594 this->Advance();
595 if (int status{workQueue.BeginAssign(toCompDesc, fromCompDesc, flags_,
596 memmoveFct_, componentDerived)};
597 status != StatOk) {
598 return status;
599 }
600 } else { // Component has intrinsic type; simply copy raw bytes
601 std::size_t componentByteSize{
602 this->component_->SizeInBytes(this->instance_)};
603 if (IS_COMPONENTWISE && toIsContiguous_ && fromIsContiguous_) {
604 std::size_t offset{
605 static_cast<std::size_t>(this->component_->offset())};
606 char *to{this->instance_.template OffsetElement<char>(offset)};
607 const char *from{
608 this->from_->template OffsetElement<const char>(offset)};
609 std::size_t toElementStride{this->instance_.ElementBytes()};
610 std::size_t fromElementStride{
611 this->from_->rank() == 0 ? 0 : this->from_->ElementBytes()};
612 if (toElementStride == fromElementStride &&
613 toElementStride == componentByteSize) {
614 memmoveFct_(to, from, this->elements_ * componentByteSize);
615 } else {
616 for (std::size_t n{this->elements_}; n--;
617 to += toElementStride, from += fromElementStride) {
618 memmoveFct_(to, from, componentByteSize);
619 }
620 }
621 this->SkipToNextComponent();
622 } else {
623 memmoveFct_(
624 this->instance_.template Element<char>(this->subscripts_) +
625 this->component_->offset(),
626 this->from_->template Element<const char>(this->fromSubscripts_) +
627 this->component_->offset(),
628 componentByteSize);
629 this->Advance();
630 }
631 }
632 break;
633 case typeInfo::Component::Genre::Pointer: {
634 std::size_t componentByteSize{
635 this->component_->SizeInBytes(this->instance_)};
636 if (IS_COMPONENTWISE && toIsContiguous_ && fromIsContiguous_) {
637 std::size_t offset{
638 static_cast<std::size_t>(this->component_->offset())};
639 char *to{this->instance_.template OffsetElement<char>(offset)};
640 const char *from{
641 this->from_->template OffsetElement<const char>(offset)};
642 std::size_t toElementStride{this->instance_.ElementBytes()};
643 std::size_t fromElementStride{
644 this->from_->rank() == 0 ? 0 : this->from_->ElementBytes()};
645 if (toElementStride == fromElementStride &&
646 toElementStride == componentByteSize) {
647 memmoveFct_(to, from, this->elements_ * componentByteSize);
648 } else {
649 for (std::size_t n{this->elements_}; n--;
650 to += toElementStride, from += fromElementStride) {
651 memmoveFct_(to, from, componentByteSize);
652 }
653 }
654 this->SkipToNextComponent();
655 } else {
656 memmoveFct_(this->instance_.template Element<char>(this->subscripts_) +
657 this->component_->offset(),
658 this->from_->template Element<const char>(this->fromSubscripts_) +
659 this->component_->offset(),
660 componentByteSize);
661 this->Advance();
662 }
663 } break;
664 case typeInfo::Component::Genre::Allocatable:
665 case typeInfo::Component::Genre::Automatic: {
666 auto *toDesc{reinterpret_cast<Descriptor *>(
667 this->instance_.template Element<char>(this->subscripts_) +
668 this->component_->offset())};
669 const auto *fromDesc{reinterpret_cast<const Descriptor *>(
670 this->from_->template Element<char>(this->fromSubscripts_) +
671 this->component_->offset())};
672 const auto *componentDerived{this->component_->derivedType()};
673 if (toDesc->IsAllocatable() && !fromDesc->IsAllocated()) {
674 if (toDesc->IsAllocated()) {
675 if (this->phase_ == 0) {
676 if (componentDerived && !componentDerived->noDestructionNeeded()) {
677 if (int status{workQueue.BeginDestroy(
678 *toDesc, *componentDerived, /*finalize=*/false)};
679 status != StatOk) {
680 this->phase_++;
681 return status;
682 }
683 }
684 }
685 toDesc->Deallocate();
686 }
687 this->Advance();
688 } else {
689 // Allocatable components of the LHS are unconditionally
690 // deallocated before assignment (F'2018 10.2.1.3(13)(1)),
691 // unlike a "top-level" assignment to a variable, where
692 // deallocation is optional.
693 int nestedFlags{flags_};
694 if (!componentDerived ||
695 (componentDerived->noFinalizationNeeded() &&
696 componentDerived->noInitializationNeeded() &&
697 componentDerived->noDestructionNeeded())) {
698 // The actual deallocation might be avoidable when the existing
699 // location can be reoccupied.
700 nestedFlags |= MaybeReallocate | UpdateLHSBounds;
701 } else {
702 // Force LHS deallocation with DeallocateLHS flag.
703 nestedFlags |= DeallocateLHS;
704 }
705 this->Advance();
706 if (int status{workQueue.BeginAssign(*toDesc, *fromDesc, nestedFlags,
707 memmoveFct_, componentDerived)};
708 status != StatOk) {
709 return status;
710 }
711 }
712 } break;
713 }
714 }
715 if (deallocateAfter_) {
716 deallocateAfter_->Deallocate();
717 }
718 return StatOk;
719}
720template RT_API_ATTRS int DerivedAssignTicket<false>::Continue(WorkQueue &);
721template RT_API_ATTRS int DerivedAssignTicket<true>::Continue(WorkQueue &);
722
723RT_API_ATTRS void DoFromSourceAssign(Descriptor &alloc,
724 const Descriptor &source, Terminator &terminator, MemmoveFct memmoveFct) {
725 if (alloc.rank() > 0 && source.rank() == 0) {
726 // The value of each element of allocate object becomes the value of source.
727 DescriptorAddendum *allocAddendum{alloc.Addendum()};
728 const typeInfo::DerivedType *allocDerived{
729 allocAddendum ? allocAddendum->derivedType() : nullptr};
730 SubscriptValue allocAt[maxRank];
731 alloc.GetLowerBounds(allocAt);
732 if (allocDerived) {
733 for (std::size_t n{alloc.InlineElements()}; n-- > 0;
734 alloc.IncrementSubscripts(allocAt)) {
735 Descriptor allocElement{*Descriptor::Create(*allocDerived,
736 reinterpret_cast<void *>(alloc.Element<char>(allocAt)), 0)};
737 Assign(allocElement, source, terminator, NoAssignFlags, memmoveFct);
738 }
739 } else { // intrinsic type
740 for (std::size_t n{alloc.InlineElements()}; n-- > 0;
741 alloc.IncrementSubscripts(allocAt)) {
742 memmoveFct(alloc.Element<char>(allocAt), source.raw().base_addr,
743 alloc.ElementBytes());
744 }
745 }
746 } else {
747 Assign(alloc, source, terminator, NoAssignFlags, memmoveFct);
748 }
749}
750
751RT_OFFLOAD_API_GROUP_END
752
753extern "C" {
754RT_EXT_API_GROUP_BEGIN
755
756void RTDEF(Assign)(Descriptor &to, const Descriptor &from,
757 const char *sourceFile, int sourceLine) {
758 Terminator terminator{sourceFile, sourceLine};
759 // All top-level defined assignments can be recognized in semantics and
760 // will have been already been converted to calls, so don't check for
761 // defined assignment apart from components.
762 Assign(to, from, terminator,
763 MaybeReallocate | NeedFinalization | ComponentCanBeDefinedAssignment);
764}
765
766void RTDEF(AssignTemporary)(Descriptor &to, const Descriptor &from,
767 const char *sourceFile, int sourceLine) {
768 Terminator terminator{sourceFile, sourceLine};
769 // Initialize the "to" if it is of derived type that needs initialization.
770 if (const DescriptorAddendum * addendum{to.Addendum()}) {
771 if (const auto *derived{addendum->derivedType()}) {
772 // Do not invoke the initialization, if the descriptor is unallocated.
773 // AssignTemporary() is used for component-by-component assignments,
774 // for example, for structure constructors. This means that the LHS
775 // may be an allocatable component with unallocated status.
776 // The initialization will just fail in this case. By skipping
777 // the initialization we let Assign() automatically allocate
778 // and initialize the component according to the RHS.
779 // So we only need to initialize the LHS here if it is allocated.
780 // Note that initializing already initialized entity has no visible
781 // effect, though, it is assumed that the compiler does not initialize
782 // the temporary and leaves the initialization to this runtime code.
783 if (!derived->noInitializationNeeded() && to.IsAllocated()) {
784 if (ReturnError(terminator, Initialize(to, *derived, terminator)) !=
785 StatOk) {
786 return;
787 }
788 }
789 }
790 }
791 Assign(to, from, terminator, MaybeReallocate | PolymorphicLHS);
792}
793
794void RTDEF(CopyInAssign)(Descriptor &temp, const Descriptor &var,
795 const char *sourceFile, int sourceLine) {
796 Terminator terminator{sourceFile, sourceLine};
797 temp = var;
798 temp.set_base_addr(nullptr);
799 temp.raw().attribute = CFI_attribute_allocatable;
800 temp.Allocate(kNoAsyncObject);
801 ShallowCopy(temp, var);
802}
803
804void RTDEF(CopyOutAssign)(
805 Descriptor *var, Descriptor &temp, const char *sourceFile, int sourceLine) {
806 Terminator terminator{sourceFile, sourceLine};
807 // Copyout from the temporary must not cause any finalizations
808 // for LHS. The variable must be properly initialized already.
809 if (var) {
810 ShallowCopy(*var, temp);
811 }
812 temp.Deallocate();
813}
814
815void RTDEF(AssignExplicitLengthCharacter)(Descriptor &to,
816 const Descriptor &from, const char *sourceFile, int sourceLine) {
817 Terminator terminator{sourceFile, sourceLine};
818 Assign(to, from, terminator,
819 MaybeReallocate | NeedFinalization | ComponentCanBeDefinedAssignment |
820 ExplicitLengthCharacterLHS);
821}
822
823void RTDEF(AssignPolymorphic)(Descriptor &to, const Descriptor &from,
824 const char *sourceFile, int sourceLine) {
825 Terminator terminator{sourceFile, sourceLine};
826 Assign(to, from, terminator,
827 MaybeReallocate | NeedFinalization | ComponentCanBeDefinedAssignment |
828 PolymorphicLHS);
829}
830
831RT_EXT_API_GROUP_END
832} // extern "C"
833} // namespace Fortran::runtime
834

source code of flang-rt/lib/runtime/assign.cpp