| 1 | //===-- lib/runtime/derived.cpp ---------------------------------*- C++ -*-===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | |
| 9 | #include "flang-rt/runtime/derived.h" |
| 10 | #include "flang-rt/runtime/descriptor.h" |
| 11 | #include "flang-rt/runtime/stat.h" |
| 12 | #include "flang-rt/runtime/terminator.h" |
| 13 | #include "flang-rt/runtime/tools.h" |
| 14 | #include "flang-rt/runtime/type-info.h" |
| 15 | #include "flang-rt/runtime/work-queue.h" |
| 16 | |
| 17 | namespace Fortran::runtime { |
| 18 | |
| 19 | RT_OFFLOAD_API_GROUP_BEGIN |
| 20 | |
| 21 | // Fill "extents" array with the extents of component "comp" from derived type |
| 22 | // instance "derivedInstance". |
| 23 | static RT_API_ATTRS void GetComponentExtents(SubscriptValue (&extents)[maxRank], |
| 24 | const typeInfo::Component &comp, const Descriptor &derivedInstance) { |
| 25 | const typeInfo::Value *bounds{comp.bounds()}; |
| 26 | for (int dim{0}; dim < comp.rank(); ++dim) { |
| 27 | auto lb{bounds[2 * dim].GetValue(&derivedInstance).value_or(0)}; |
| 28 | auto ub{bounds[2 * dim + 1].GetValue(&derivedInstance).value_or(0)}; |
| 29 | extents[dim] = ub >= lb ? static_cast<SubscriptValue>(ub - lb + 1) : 0; |
| 30 | } |
| 31 | } |
| 32 | |
| 33 | RT_API_ATTRS int Initialize(const Descriptor &instance, |
| 34 | const typeInfo::DerivedType &derived, Terminator &terminator, bool, |
| 35 | const Descriptor *) { |
| 36 | WorkQueue workQueue{terminator}; |
| 37 | int status{workQueue.BeginInitialize(instance, derived)}; |
| 38 | return status == StatContinue ? workQueue.Run() : status; |
| 39 | } |
| 40 | |
| 41 | RT_API_ATTRS int InitializeTicket::Begin(WorkQueue &) { |
| 42 | if (elements_ == 0) { |
| 43 | return StatOk; |
| 44 | } else { |
| 45 | // Initialize procedure pointer components in the first element, |
| 46 | // whence they will be copied later into all others. |
| 47 | const Descriptor &procPtrDesc{derived_.procPtr()}; |
| 48 | std::size_t numProcPtrs{procPtrDesc.InlineElements()}; |
| 49 | char *raw{instance_.OffsetElement<char>()}; |
| 50 | const auto *ppComponent{ |
| 51 | procPtrDesc.OffsetElement<typeInfo::ProcPtrComponent>()}; |
| 52 | for (std::size_t k{0}; k < numProcPtrs; ++k, ++ppComponent) { |
| 53 | auto &pptr{*reinterpret_cast<typeInfo::ProcedurePointer *>( |
| 54 | raw + ppComponent->offset)}; |
| 55 | pptr = ppComponent->procInitialization; |
| 56 | } |
| 57 | return StatContinue; |
| 58 | } |
| 59 | } |
| 60 | |
| 61 | RT_API_ATTRS int InitializeTicket::Continue(WorkQueue &workQueue) { |
| 62 | // Initialize the data components of the first element. |
| 63 | char *rawInstance{instance_.OffsetElement<char>()}; |
| 64 | for (; !Componentwise::IsComplete(); SkipToNextComponent()) { |
| 65 | char *rawComponent{rawInstance + component_->offset()}; |
| 66 | if (component_->genre() == typeInfo::Component::Genre::Allocatable) { |
| 67 | Descriptor &allocDesc{*reinterpret_cast<Descriptor *>(rawComponent)}; |
| 68 | component_->EstablishDescriptor( |
| 69 | allocDesc, instance_, workQueue.terminator()); |
| 70 | } else if (const void *init{component_->initialization()}) { |
| 71 | // Explicit initialization of data pointers and |
| 72 | // non-allocatable non-automatic components |
| 73 | std::size_t bytes{component_->SizeInBytes(instance_)}; |
| 74 | std::memcpy(rawComponent, init, bytes); |
| 75 | } else if (component_->genre() == typeInfo::Component::Genre::Pointer) { |
| 76 | // Data pointers without explicit initialization are established |
| 77 | // so that they are valid right-hand side targets of pointer |
| 78 | // assignment statements. |
| 79 | Descriptor &ptrDesc{*reinterpret_cast<Descriptor *>(rawComponent)}; |
| 80 | component_->EstablishDescriptor( |
| 81 | ptrDesc, instance_, workQueue.terminator()); |
| 82 | } else if (component_->genre() == typeInfo::Component::Genre::Data && |
| 83 | component_->derivedType() && |
| 84 | !component_->derivedType()->noInitializationNeeded()) { |
| 85 | // Default initialization of non-pointer non-allocatable/automatic |
| 86 | // data component. Handles parent component's elements. |
| 87 | SubscriptValue extents[maxRank]; |
| 88 | GetComponentExtents(extents, *component_, instance_); |
| 89 | Descriptor &compDesc{componentDescriptor_.descriptor()}; |
| 90 | const typeInfo::DerivedType &compType{*component_->derivedType()}; |
| 91 | compDesc.Establish(compType, rawComponent, component_->rank(), extents); |
| 92 | if (int status{workQueue.BeginInitialize(compDesc, compType)}; |
| 93 | status != StatOk) { |
| 94 | SkipToNextComponent(); |
| 95 | return status; |
| 96 | } |
| 97 | } |
| 98 | } |
| 99 | // The first element is now complete. Copy it into the others. |
| 100 | if (elements_ < 2) { |
| 101 | } else { |
| 102 | auto elementBytes{static_cast<SubscriptValue>(instance_.ElementBytes())}; |
| 103 | if (auto stride{instance_.FixedStride()}) { |
| 104 | if (*stride == elementBytes) { // contiguous |
| 105 | for (std::size_t done{1}; done < elements_;) { |
| 106 | std::size_t chunk{elements_ - done}; |
| 107 | if (chunk > done) { |
| 108 | chunk = done; |
| 109 | } |
| 110 | char *uninitialized{rawInstance + done * *stride}; |
| 111 | std::memcpy(uninitialized, rawInstance, chunk * *stride); |
| 112 | done += chunk; |
| 113 | } |
| 114 | } else { |
| 115 | for (std::size_t done{1}; done < elements_; ++done) { |
| 116 | char *uninitialized{rawInstance + done * *stride}; |
| 117 | std::memcpy(uninitialized, rawInstance, elementBytes); |
| 118 | } |
| 119 | } |
| 120 | } else { // one at a time with subscription |
| 121 | for (Elementwise::Advance(); !Elementwise::IsComplete(); |
| 122 | Elementwise::Advance()) { |
| 123 | char *element{instance_.Element<char>(subscripts_)}; |
| 124 | std::memcpy(element, rawInstance, elementBytes); |
| 125 | } |
| 126 | } |
| 127 | } |
| 128 | return StatOk; |
| 129 | } |
| 130 | |
| 131 | RT_API_ATTRS int InitializeClone(const Descriptor &clone, |
| 132 | const Descriptor &original, const typeInfo::DerivedType &derived, |
| 133 | Terminator &terminator, bool hasStat, const Descriptor *errMsg) { |
| 134 | if (original.IsPointer() || !original.IsAllocated()) { |
| 135 | return StatOk; // nothing to do |
| 136 | } else { |
| 137 | WorkQueue workQueue{terminator}; |
| 138 | int status{workQueue.BeginInitializeClone( |
| 139 | clone, original, derived, hasStat, errMsg)}; |
| 140 | return status == StatContinue ? workQueue.Run() : status; |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | RT_API_ATTRS int InitializeCloneTicket::Continue(WorkQueue &workQueue) { |
| 145 | while (!IsComplete()) { |
| 146 | if (component_->genre() == typeInfo::Component::Genre::Allocatable) { |
| 147 | Descriptor &origDesc{*instance_.ElementComponent<Descriptor>( |
| 148 | subscripts_, component_->offset())}; |
| 149 | if (origDesc.IsAllocated()) { |
| 150 | Descriptor &cloneDesc{*clone_.ElementComponent<Descriptor>( |
| 151 | subscripts_, component_->offset())}; |
| 152 | if (phase_ == 0) { |
| 153 | ++phase_; |
| 154 | cloneDesc.ApplyMold(origDesc, origDesc.rank()); |
| 155 | if (int stat{ReturnError(workQueue.terminator(), |
| 156 | cloneDesc.Allocate(kNoAsyncObject), errMsg_, hasStat_)}; |
| 157 | stat != StatOk) { |
| 158 | return stat; |
| 159 | } |
| 160 | if (const DescriptorAddendum *addendum{cloneDesc.Addendum()}) { |
| 161 | if (const typeInfo::DerivedType *derived{addendum->derivedType()}) { |
| 162 | if (!derived->noInitializationNeeded()) { |
| 163 | // Perform default initialization for the allocated element. |
| 164 | if (int status{workQueue.BeginInitialize(cloneDesc, *derived)}; |
| 165 | status != StatOk) { |
| 166 | return status; |
| 167 | } |
| 168 | } |
| 169 | } |
| 170 | } |
| 171 | } |
| 172 | if (phase_ == 1) { |
| 173 | ++phase_; |
| 174 | if (const DescriptorAddendum *addendum{cloneDesc.Addendum()}) { |
| 175 | if (const typeInfo::DerivedType *derived{addendum->derivedType()}) { |
| 176 | // Initialize derived type's allocatables. |
| 177 | if (int status{workQueue.BeginInitializeClone( |
| 178 | cloneDesc, origDesc, *derived, hasStat_, errMsg_)}; |
| 179 | status != StatOk) { |
| 180 | return status; |
| 181 | } |
| 182 | } |
| 183 | } |
| 184 | } |
| 185 | } |
| 186 | Advance(); |
| 187 | } else if (component_->genre() == typeInfo::Component::Genre::Data) { |
| 188 | if (component_->derivedType()) { |
| 189 | // Handle nested derived types. |
| 190 | const typeInfo::DerivedType &compType{*component_->derivedType()}; |
| 191 | SubscriptValue extents[maxRank]; |
| 192 | GetComponentExtents(extents, *component_, instance_); |
| 193 | Descriptor &origDesc{componentDescriptor_.descriptor()}; |
| 194 | Descriptor &cloneDesc{cloneComponentDescriptor_.descriptor()}; |
| 195 | origDesc.Establish(compType, |
| 196 | instance_.ElementComponent<char>(subscripts_, component_->offset()), |
| 197 | component_->rank(), extents); |
| 198 | cloneDesc.Establish(compType, |
| 199 | clone_.ElementComponent<char>(subscripts_, component_->offset()), |
| 200 | component_->rank(), extents); |
| 201 | Advance(); |
| 202 | if (int status{workQueue.BeginInitializeClone( |
| 203 | cloneDesc, origDesc, compType, hasStat_, errMsg_)}; |
| 204 | status != StatOk) { |
| 205 | return status; |
| 206 | } |
| 207 | } else { |
| 208 | SkipToNextComponent(); |
| 209 | } |
| 210 | } else { |
| 211 | SkipToNextComponent(); |
| 212 | } |
| 213 | } |
| 214 | return StatOk; |
| 215 | } |
| 216 | |
| 217 | // Fortran 2018 subclause 7.5.6.2 |
| 218 | RT_API_ATTRS void Finalize(const Descriptor &descriptor, |
| 219 | const typeInfo::DerivedType &derived, Terminator *terminator) { |
| 220 | if (!derived.noFinalizationNeeded() && descriptor.IsAllocated()) { |
| 221 | Terminator stubTerminator{"Finalize() in Fortran runtime" , 0}; |
| 222 | WorkQueue workQueue{terminator ? *terminator : stubTerminator}; |
| 223 | if (workQueue.BeginFinalize(descriptor, derived) == StatContinue) { |
| 224 | workQueue.Run(); |
| 225 | } |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | static RT_API_ATTRS const typeInfo::SpecialBinding *FindFinal( |
| 230 | const typeInfo::DerivedType &derived, int rank) { |
| 231 | if (const auto *ranked{derived.FindSpecialBinding( |
| 232 | typeInfo::SpecialBinding::RankFinal(rank))}) { |
| 233 | return ranked; |
| 234 | } else if (const auto *assumed{derived.FindSpecialBinding( |
| 235 | typeInfo::SpecialBinding::Which::AssumedRankFinal)}) { |
| 236 | return assumed; |
| 237 | } else { |
| 238 | return derived.FindSpecialBinding( |
| 239 | typeInfo::SpecialBinding::Which::ElementalFinal); |
| 240 | } |
| 241 | } |
| 242 | |
| 243 | static RT_API_ATTRS void CallFinalSubroutine(const Descriptor &descriptor, |
| 244 | const typeInfo::DerivedType &derived, Terminator &terminator) { |
| 245 | if (const auto *special{FindFinal(derived, descriptor.rank())}) { |
| 246 | if (special->which() == typeInfo::SpecialBinding::Which::ElementalFinal) { |
| 247 | std::size_t elements{descriptor.InlineElements()}; |
| 248 | SubscriptValue at[maxRank]; |
| 249 | descriptor.GetLowerBounds(at); |
| 250 | if (special->IsArgDescriptor(0)) { |
| 251 | StaticDescriptor<maxRank, true, 8 /*?*/> statDesc; |
| 252 | Descriptor &elemDesc{statDesc.descriptor()}; |
| 253 | elemDesc = descriptor; |
| 254 | elemDesc.raw().attribute = CFI_attribute_pointer; |
| 255 | elemDesc.raw().rank = 0; |
| 256 | auto *p{special->GetProc<void (*)(const Descriptor &)>()}; |
| 257 | for (std::size_t j{0}; j++ < elements; |
| 258 | descriptor.IncrementSubscripts(at)) { |
| 259 | elemDesc.set_base_addr(descriptor.Element<char>(at)); |
| 260 | p(elemDesc); |
| 261 | } |
| 262 | } else { |
| 263 | auto *p{special->GetProc<void (*)(char *)>()}; |
| 264 | for (std::size_t j{0}; j++ < elements; |
| 265 | descriptor.IncrementSubscripts(at)) { |
| 266 | p(descriptor.Element<char>(at)); |
| 267 | } |
| 268 | } |
| 269 | } else { |
| 270 | StaticDescriptor<maxRank, true, 10> statDesc; |
| 271 | Descriptor ©{statDesc.descriptor()}; |
| 272 | const Descriptor *argDescriptor{&descriptor}; |
| 273 | if (descriptor.rank() > 0 && special->IsArgContiguous(0) && |
| 274 | !descriptor.IsContiguous()) { |
| 275 | // The FINAL subroutine demands a contiguous array argument, but |
| 276 | // this INTENT(OUT) or intrinsic assignment LHS isn't contiguous. |
| 277 | // Finalize a shallow copy of the data. |
| 278 | copy = descriptor; |
| 279 | copy.set_base_addr(nullptr); |
| 280 | copy.raw().attribute = CFI_attribute_allocatable; |
| 281 | RUNTIME_CHECK(terminator, copy.Allocate(kNoAsyncObject) == CFI_SUCCESS); |
| 282 | ShallowCopyDiscontiguousToContiguous(copy, descriptor); |
| 283 | argDescriptor = © |
| 284 | } |
| 285 | if (special->IsArgDescriptor(0)) { |
| 286 | StaticDescriptor<maxRank, true, 8 /*?*/> statDesc; |
| 287 | Descriptor &tmpDesc{statDesc.descriptor()}; |
| 288 | tmpDesc = *argDescriptor; |
| 289 | tmpDesc.raw().attribute = CFI_attribute_pointer; |
| 290 | tmpDesc.Addendum()->set_derivedType(&derived); |
| 291 | auto *p{special->GetProc<void (*)(const Descriptor &)>()}; |
| 292 | p(tmpDesc); |
| 293 | } else { |
| 294 | auto *p{special->GetProc<void (*)(char *)>()}; |
| 295 | p(argDescriptor->OffsetElement<char>()); |
| 296 | } |
| 297 | if (argDescriptor == ©) { |
| 298 | ShallowCopyContiguousToDiscontiguous(descriptor, copy); |
| 299 | copy.Deallocate(); |
| 300 | } |
| 301 | } |
| 302 | } |
| 303 | } |
| 304 | |
| 305 | RT_API_ATTRS int FinalizeTicket::Begin(WorkQueue &workQueue) { |
| 306 | CallFinalSubroutine(instance_, derived_, workQueue.terminator()); |
| 307 | // If there's a finalizable parent component, handle it last, as required |
| 308 | // by the Fortran standard (7.5.6.2), and do so recursively with the same |
| 309 | // descriptor so that the rank is preserved. |
| 310 | finalizableParentType_ = derived_.GetParentType(); |
| 311 | if (finalizableParentType_) { |
| 312 | if (finalizableParentType_->noFinalizationNeeded()) { |
| 313 | finalizableParentType_ = nullptr; |
| 314 | } else { |
| 315 | SkipToNextComponent(); |
| 316 | } |
| 317 | } |
| 318 | return StatContinue; |
| 319 | } |
| 320 | |
| 321 | RT_API_ATTRS int FinalizeTicket::Continue(WorkQueue &workQueue) { |
| 322 | while (!IsComplete()) { |
| 323 | if (component_->genre() == typeInfo::Component::Genre::Allocatable && |
| 324 | component_->category() == TypeCategory::Derived) { |
| 325 | // Component may be polymorphic or unlimited polymorphic. Need to use the |
| 326 | // dynamic type to check whether finalization is needed. |
| 327 | const Descriptor &compDesc{*instance_.ElementComponent<Descriptor>( |
| 328 | subscripts_, component_->offset())}; |
| 329 | Advance(); |
| 330 | if (compDesc.IsAllocated()) { |
| 331 | if (const DescriptorAddendum *addendum{compDesc.Addendum()}) { |
| 332 | if (const typeInfo::DerivedType *compDynamicType{ |
| 333 | addendum->derivedType()}) { |
| 334 | if (!compDynamicType->noFinalizationNeeded()) { |
| 335 | if (int status{ |
| 336 | workQueue.BeginFinalize(compDesc, *compDynamicType)}; |
| 337 | status != StatOk) { |
| 338 | return status; |
| 339 | } |
| 340 | } |
| 341 | } |
| 342 | } |
| 343 | } |
| 344 | } else if (component_->genre() == typeInfo::Component::Genre::Allocatable || |
| 345 | component_->genre() == typeInfo::Component::Genre::Automatic) { |
| 346 | if (const typeInfo::DerivedType *compType{component_->derivedType()}; |
| 347 | compType && !compType->noFinalizationNeeded()) { |
| 348 | const Descriptor &compDesc{*instance_.ElementComponent<Descriptor>( |
| 349 | subscripts_, component_->offset())}; |
| 350 | Advance(); |
| 351 | if (compDesc.IsAllocated()) { |
| 352 | if (int status{workQueue.BeginFinalize(compDesc, *compType)}; |
| 353 | status != StatOk) { |
| 354 | return status; |
| 355 | } |
| 356 | } |
| 357 | } else { |
| 358 | SkipToNextComponent(); |
| 359 | } |
| 360 | } else if (component_->genre() == typeInfo::Component::Genre::Data && |
| 361 | component_->derivedType() && |
| 362 | !component_->derivedType()->noFinalizationNeeded()) { |
| 363 | SubscriptValue extents[maxRank]; |
| 364 | GetComponentExtents(extents, *component_, instance_); |
| 365 | Descriptor &compDesc{componentDescriptor_.descriptor()}; |
| 366 | const typeInfo::DerivedType &compType{*component_->derivedType()}; |
| 367 | compDesc.Establish(compType, |
| 368 | instance_.ElementComponent<char>(subscripts_, component_->offset()), |
| 369 | component_->rank(), extents); |
| 370 | Advance(); |
| 371 | if (int status{workQueue.BeginFinalize(compDesc, compType)}; |
| 372 | status != StatOk) { |
| 373 | return status; |
| 374 | } |
| 375 | } else { |
| 376 | SkipToNextComponent(); |
| 377 | } |
| 378 | } |
| 379 | // Last, do the parent component, if any and finalizable. |
| 380 | if (finalizableParentType_) { |
| 381 | Descriptor &tmpDesc{componentDescriptor_.descriptor()}; |
| 382 | tmpDesc = instance_; |
| 383 | tmpDesc.raw().attribute = CFI_attribute_pointer; |
| 384 | tmpDesc.Addendum()->set_derivedType(finalizableParentType_); |
| 385 | tmpDesc.raw().elem_len = finalizableParentType_->sizeInBytes(); |
| 386 | const auto &parentType{*finalizableParentType_}; |
| 387 | finalizableParentType_ = nullptr; |
| 388 | // Don't return StatOk here if the nested FInalize is still running; |
| 389 | // it needs this->componentDescriptor_. |
| 390 | return workQueue.BeginFinalize(tmpDesc, parentType); |
| 391 | } |
| 392 | return StatOk; |
| 393 | } |
| 394 | |
| 395 | // The order of finalization follows Fortran 2018 7.5.6.2, with |
| 396 | // elementwise finalization of non-parent components taking place |
| 397 | // before parent component finalization, and with all finalization |
| 398 | // preceding any deallocation. |
| 399 | RT_API_ATTRS void Destroy(const Descriptor &descriptor, bool finalize, |
| 400 | const typeInfo::DerivedType &derived, Terminator *terminator) { |
| 401 | if (descriptor.IsAllocated() && !derived.noDestructionNeeded()) { |
| 402 | Terminator stubTerminator{"Destroy() in Fortran runtime" , 0}; |
| 403 | WorkQueue workQueue{terminator ? *terminator : stubTerminator}; |
| 404 | if (workQueue.BeginDestroy(descriptor, derived, finalize) == StatContinue) { |
| 405 | workQueue.Run(); |
| 406 | } |
| 407 | } |
| 408 | } |
| 409 | |
| 410 | RT_API_ATTRS int DestroyTicket::Begin(WorkQueue &workQueue) { |
| 411 | if (finalize_ && !derived_.noFinalizationNeeded()) { |
| 412 | if (int status{workQueue.BeginFinalize(instance_, derived_)}; |
| 413 | status != StatOk && status != StatContinue) { |
| 414 | return status; |
| 415 | } |
| 416 | } |
| 417 | return StatContinue; |
| 418 | } |
| 419 | |
| 420 | RT_API_ATTRS int DestroyTicket::Continue(WorkQueue &workQueue) { |
| 421 | // Deallocate all direct and indirect allocatable and automatic components. |
| 422 | // Contrary to finalization, the order of deallocation does not matter. |
| 423 | while (!IsComplete()) { |
| 424 | const auto *componentDerived{component_->derivedType()}; |
| 425 | if (component_->genre() == typeInfo::Component::Genre::Allocatable) { |
| 426 | if (fixedStride_ && |
| 427 | (!componentDerived || componentDerived->noDestructionNeeded())) { |
| 428 | // common fast path, just deallocate in every element |
| 429 | char *p{instance_.OffsetElement<char>(component_->offset())}; |
| 430 | for (std::size_t j{0}; j < elements_; ++j, p += *fixedStride_) { |
| 431 | Descriptor &d{*reinterpret_cast<Descriptor *>(p)}; |
| 432 | d.Deallocate(); |
| 433 | } |
| 434 | SkipToNextComponent(); |
| 435 | } else { |
| 436 | Descriptor &d{*instance_.ElementComponent<Descriptor>( |
| 437 | subscripts_, component_->offset())}; |
| 438 | if (d.IsAllocated()) { |
| 439 | if (componentDerived && !componentDerived->noDestructionNeeded() && |
| 440 | phase_ == 0) { |
| 441 | if (int status{workQueue.BeginDestroy( |
| 442 | d, *componentDerived, /*finalize=*/false)}; |
| 443 | status != StatOk) { |
| 444 | ++phase_; |
| 445 | return status; |
| 446 | } |
| 447 | } |
| 448 | d.Deallocate(); |
| 449 | } |
| 450 | Advance(); |
| 451 | } |
| 452 | } else if (component_->genre() == typeInfo::Component::Genre::Data) { |
| 453 | if (!componentDerived || componentDerived->noDestructionNeeded()) { |
| 454 | SkipToNextComponent(); |
| 455 | } else { |
| 456 | SubscriptValue extents[maxRank]; |
| 457 | GetComponentExtents(extents, *component_, instance_); |
| 458 | Descriptor &compDesc{componentDescriptor_.descriptor()}; |
| 459 | const typeInfo::DerivedType &compType{*componentDerived}; |
| 460 | compDesc.Establish(compType, |
| 461 | instance_.ElementComponent<char>(subscripts_, component_->offset()), |
| 462 | component_->rank(), extents); |
| 463 | Advance(); |
| 464 | if (int status{workQueue.BeginDestroy( |
| 465 | compDesc, *componentDerived, /*finalize=*/false)}; |
| 466 | status != StatOk) { |
| 467 | return status; |
| 468 | } |
| 469 | } |
| 470 | } else { |
| 471 | SkipToNextComponent(); |
| 472 | } |
| 473 | } |
| 474 | return StatOk; |
| 475 | } |
| 476 | |
| 477 | RT_API_ATTRS bool HasDynamicComponent(const Descriptor &descriptor) { |
| 478 | if (const DescriptorAddendum * addendum{descriptor.Addendum()}) { |
| 479 | if (const auto *derived = addendum->derivedType()) { |
| 480 | // Destruction is needed if and only if there are direct or indirect |
| 481 | // allocatable or automatic components. |
| 482 | return !derived->noDestructionNeeded(); |
| 483 | } |
| 484 | } |
| 485 | return false; |
| 486 | } |
| 487 | |
| 488 | RT_OFFLOAD_API_GROUP_END |
| 489 | } // namespace Fortran::runtime |
| 490 | |