1//===--- CGBlocks.cpp - Emit LLVM Code for declarations ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This contains code to emit blocks.
10//
11//===----------------------------------------------------------------------===//
12
13#include "CGBlocks.h"
14#include "CGCXXABI.h"
15#include "CGDebugInfo.h"
16#include "CGObjCRuntime.h"
17#include "CGOpenCLRuntime.h"
18#include "CodeGenFunction.h"
19#include "CodeGenModule.h"
20#include "ConstantEmitter.h"
21#include "TargetInfo.h"
22#include "clang/AST/Attr.h"
23#include "clang/AST/DeclObjC.h"
24#include "clang/CodeGen/ConstantInitBuilder.h"
25#include "llvm/ADT/SmallSet.h"
26#include "llvm/IR/DataLayout.h"
27#include "llvm/IR/Module.h"
28#include "llvm/Support/ScopedPrinter.h"
29#include <algorithm>
30#include <cstdio>
31
32using namespace clang;
33using namespace CodeGen;
34
35CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
36 : Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
37 NoEscape(false), HasCXXObject(false), UsesStret(false),
38 HasCapturedVariableLayout(false), CapturesNonExternalType(false),
39 LocalAddress(RawAddress::invalid()), StructureType(nullptr),
40 Block(block) {
41
42 // Skip asm prefix, if any. 'name' is usually taken directly from
43 // the mangled name of the enclosing function.
44 if (!name.empty() && name[0] == '\01')
45 name = name.substr(Start: 1);
46}
47
48// Anchor the vtable to this translation unit.
49BlockByrefHelpers::~BlockByrefHelpers() {}
50
51/// Build the given block as a global block.
52static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
53 const CGBlockInfo &blockInfo,
54 llvm::Constant *blockFn);
55
56/// Build the helper function to copy a block.
57static llvm::Constant *buildCopyHelper(CodeGenModule &CGM,
58 const CGBlockInfo &blockInfo) {
59 return CodeGenFunction(CGM).GenerateCopyHelperFunction(blockInfo);
60}
61
62/// Build the helper function to dispose of a block.
63static llvm::Constant *buildDisposeHelper(CodeGenModule &CGM,
64 const CGBlockInfo &blockInfo) {
65 return CodeGenFunction(CGM).GenerateDestroyHelperFunction(blockInfo);
66}
67
68namespace {
69
70enum class CaptureStrKind {
71 // String for the copy helper.
72 CopyHelper,
73 // String for the dispose helper.
74 DisposeHelper,
75 // Merge the strings for the copy helper and dispose helper.
76 Merged
77};
78
79} // end anonymous namespace
80
81static std::string getBlockCaptureStr(const CGBlockInfo::Capture &Cap,
82 CaptureStrKind StrKind,
83 CharUnits BlockAlignment,
84 CodeGenModule &CGM);
85
86static std::string getBlockDescriptorName(const CGBlockInfo &BlockInfo,
87 CodeGenModule &CGM) {
88 std::string Name = "__block_descriptor_";
89 Name += llvm::to_string(Value: BlockInfo.BlockSize.getQuantity()) + "_";
90
91 if (BlockInfo.NeedsCopyDispose) {
92 if (CGM.getLangOpts().Exceptions)
93 Name += "e";
94 if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
95 Name += "a";
96 Name += llvm::to_string(Value: BlockInfo.BlockAlign.getQuantity()) + "_";
97
98 for (auto &Cap : BlockInfo.SortedCaptures) {
99 if (Cap.isConstantOrTrivial())
100 continue;
101
102 Name += llvm::to_string(Value: Cap.getOffset().getQuantity());
103
104 if (Cap.CopyKind == Cap.DisposeKind) {
105 // If CopyKind and DisposeKind are the same, merge the capture
106 // information.
107 assert(Cap.CopyKind != BlockCaptureEntityKind::None &&
108 "shouldn't see BlockCaptureManagedEntity that is None");
109 Name += getBlockCaptureStr(Cap, StrKind: CaptureStrKind::Merged,
110 BlockAlignment: BlockInfo.BlockAlign, CGM);
111 } else {
112 // If CopyKind and DisposeKind are not the same, which can happen when
113 // either Kind is None or the captured object is a __strong block,
114 // concatenate the copy and dispose strings.
115 Name += getBlockCaptureStr(Cap, StrKind: CaptureStrKind::CopyHelper,
116 BlockAlignment: BlockInfo.BlockAlign, CGM);
117 Name += getBlockCaptureStr(Cap, StrKind: CaptureStrKind::DisposeHelper,
118 BlockAlignment: BlockInfo.BlockAlign, CGM);
119 }
120 }
121 Name += "_";
122 }
123
124 std::string TypeAtEncoding =
125 CGM.getContext().getObjCEncodingForBlock(blockExpr: BlockInfo.getBlockExpr());
126 /// Replace occurrences of '@' with '\1'. '@' is reserved on ELF platforms as
127 /// a separator between symbol name and symbol version.
128 std::replace(first: TypeAtEncoding.begin(), last: TypeAtEncoding.end(), old_value: '@', new_value: '\1');
129 Name += "e" + llvm::to_string(Value: TypeAtEncoding.size()) + "_" + TypeAtEncoding;
130 Name += "l" + CGM.getObjCRuntime().getRCBlockLayoutStr(CGM, blockInfo: BlockInfo);
131 return Name;
132}
133
134/// buildBlockDescriptor - Build the block descriptor meta-data for a block.
135/// buildBlockDescriptor is accessed from 5th field of the Block_literal
136/// meta-data and contains stationary information about the block literal.
137/// Its definition will have 4 (or optionally 6) words.
138/// \code
139/// struct Block_descriptor {
140/// unsigned long reserved;
141/// unsigned long size; // size of Block_literal metadata in bytes.
142/// void *copy_func_helper_decl; // optional copy helper.
143/// void *destroy_func_decl; // optional destructor helper.
144/// void *block_method_encoding_address; // @encode for block literal signature.
145/// void *block_layout_info; // encoding of captured block variables.
146/// };
147/// \endcode
148static llvm::Constant *buildBlockDescriptor(CodeGenModule &CGM,
149 const CGBlockInfo &blockInfo) {
150 ASTContext &C = CGM.getContext();
151
152 llvm::IntegerType *ulong =
153 cast<llvm::IntegerType>(CGM.getTypes().ConvertType(T: C.UnsignedLongTy));
154 llvm::PointerType *i8p = nullptr;
155 if (CGM.getLangOpts().OpenCL)
156 i8p = llvm::PointerType::get(
157 C&: CGM.getLLVMContext(), AddressSpace: C.getTargetAddressSpace(AS: LangAS::opencl_constant));
158 else
159 i8p = CGM.VoidPtrTy;
160
161 std::string descName;
162
163 // If an equivalent block descriptor global variable exists, return it.
164 if (C.getLangOpts().ObjC &&
165 CGM.getLangOpts().getGC() == LangOptions::NonGC) {
166 descName = getBlockDescriptorName(BlockInfo: blockInfo, CGM);
167 if (llvm::GlobalValue *desc = CGM.getModule().getNamedValue(Name: descName))
168 return desc;
169 }
170
171 // If there isn't an equivalent block descriptor global variable, create a new
172 // one.
173 ConstantInitBuilder builder(CGM);
174 auto elements = builder.beginStruct();
175
176 // reserved
177 elements.addInt(intTy: ulong, value: 0);
178
179 // Size
180 // FIXME: What is the right way to say this doesn't fit? We should give
181 // a user diagnostic in that case. Better fix would be to change the
182 // API to size_t.
183 elements.addInt(intTy: ulong, value: blockInfo.BlockSize.getQuantity());
184
185 // Optional copy/dispose helpers.
186 bool hasInternalHelper = false;
187 if (blockInfo.NeedsCopyDispose) {
188 // copy_func_helper_decl
189 llvm::Constant *copyHelper = buildCopyHelper(CGM, blockInfo);
190 elements.add(value: copyHelper);
191
192 // destroy_func_decl
193 llvm::Constant *disposeHelper = buildDisposeHelper(CGM, blockInfo);
194 elements.add(value: disposeHelper);
195
196 if (cast<llvm::Function>(Val: copyHelper->stripPointerCasts())
197 ->hasInternalLinkage() ||
198 cast<llvm::Function>(Val: disposeHelper->stripPointerCasts())
199 ->hasInternalLinkage())
200 hasInternalHelper = true;
201 }
202
203 // Signature. Mandatory ObjC-style method descriptor @encode sequence.
204 std::string typeAtEncoding =
205 CGM.getContext().getObjCEncodingForBlock(blockExpr: blockInfo.getBlockExpr());
206 elements.add(value: CGM.GetAddrOfConstantCString(Str: typeAtEncoding).getPointer());
207
208 // GC layout.
209 if (C.getLangOpts().ObjC) {
210 if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
211 elements.add(value: CGM.getObjCRuntime().BuildGCBlockLayout(CGM, blockInfo));
212 else
213 elements.add(value: CGM.getObjCRuntime().BuildRCBlockLayout(CGM, blockInfo));
214 }
215 else
216 elements.addNullPointer(ptrTy: i8p);
217
218 unsigned AddrSpace = 0;
219 if (C.getLangOpts().OpenCL)
220 AddrSpace = C.getTargetAddressSpace(AS: LangAS::opencl_constant);
221
222 llvm::GlobalValue::LinkageTypes linkage;
223 if (descName.empty()) {
224 linkage = llvm::GlobalValue::InternalLinkage;
225 descName = "__block_descriptor_tmp";
226 } else if (hasInternalHelper) {
227 // If either the copy helper or the dispose helper has internal linkage,
228 // the block descriptor must have internal linkage too.
229 linkage = llvm::GlobalValue::InternalLinkage;
230 } else {
231 linkage = llvm::GlobalValue::LinkOnceODRLinkage;
232 }
233
234 llvm::GlobalVariable *global =
235 elements.finishAndCreateGlobal(descName, CGM.getPointerAlign(),
236 /*constant*/ true, linkage, AddrSpace);
237
238 if (linkage == llvm::GlobalValue::LinkOnceODRLinkage) {
239 if (CGM.supportsCOMDAT())
240 global->setComdat(CGM.getModule().getOrInsertComdat(Name: descName));
241 global->setVisibility(llvm::GlobalValue::HiddenVisibility);
242 global->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
243 }
244
245 return global;
246}
247
248/*
249 Purely notional variadic template describing the layout of a block.
250
251 template <class _ResultType, class... _ParamTypes, class... _CaptureTypes>
252 struct Block_literal {
253 /// Initialized to one of:
254 /// extern void *_NSConcreteStackBlock[];
255 /// extern void *_NSConcreteGlobalBlock[];
256 ///
257 /// In theory, we could start one off malloc'ed by setting
258 /// BLOCK_NEEDS_FREE, giving it a refcount of 1, and using
259 /// this isa:
260 /// extern void *_NSConcreteMallocBlock[];
261 struct objc_class *isa;
262
263 /// These are the flags (with corresponding bit number) that the
264 /// compiler is actually supposed to know about.
265 /// 23. BLOCK_IS_NOESCAPE - indicates that the block is non-escaping
266 /// 25. BLOCK_HAS_COPY_DISPOSE - indicates that the block
267 /// descriptor provides copy and dispose helper functions
268 /// 26. BLOCK_HAS_CXX_OBJ - indicates that there's a captured
269 /// object with a nontrivial destructor or copy constructor
270 /// 28. BLOCK_IS_GLOBAL - indicates that the block is allocated
271 /// as global memory
272 /// 29. BLOCK_USE_STRET - indicates that the block function
273 /// uses stret, which objc_msgSend needs to know about
274 /// 30. BLOCK_HAS_SIGNATURE - indicates that the block has an
275 /// @encoded signature string
276 /// And we're not supposed to manipulate these:
277 /// 24. BLOCK_NEEDS_FREE - indicates that the block has been moved
278 /// to malloc'ed memory
279 /// 27. BLOCK_IS_GC - indicates that the block has been moved to
280 /// to GC-allocated memory
281 /// Additionally, the bottom 16 bits are a reference count which
282 /// should be zero on the stack.
283 int flags;
284
285 /// Reserved; should be zero-initialized.
286 int reserved;
287
288 /// Function pointer generated from block literal.
289 _ResultType (*invoke)(Block_literal *, _ParamTypes...);
290
291 /// Block description metadata generated from block literal.
292 struct Block_descriptor *block_descriptor;
293
294 /// Captured values follow.
295 _CapturesTypes captures...;
296 };
297 */
298
299namespace {
300 /// A chunk of data that we actually have to capture in the block.
301 struct BlockLayoutChunk {
302 CharUnits Alignment;
303 CharUnits Size;
304 const BlockDecl::Capture *Capture; // null for 'this'
305 llvm::Type *Type;
306 QualType FieldType;
307 BlockCaptureEntityKind CopyKind, DisposeKind;
308 BlockFieldFlags CopyFlags, DisposeFlags;
309
310 BlockLayoutChunk(CharUnits align, CharUnits size,
311 const BlockDecl::Capture *capture, llvm::Type *type,
312 QualType fieldType, BlockCaptureEntityKind CopyKind,
313 BlockFieldFlags CopyFlags,
314 BlockCaptureEntityKind DisposeKind,
315 BlockFieldFlags DisposeFlags)
316 : Alignment(align), Size(size), Capture(capture), Type(type),
317 FieldType(fieldType), CopyKind(CopyKind), DisposeKind(DisposeKind),
318 CopyFlags(CopyFlags), DisposeFlags(DisposeFlags) {}
319
320 /// Tell the block info that this chunk has the given field index.
321 void setIndex(CGBlockInfo &info, unsigned index, CharUnits offset) {
322 if (!Capture) {
323 info.CXXThisIndex = index;
324 info.CXXThisOffset = offset;
325 } else {
326 info.SortedCaptures.push_back(CGBlockInfo::Capture::makeIndex(
327 index, offset, FieldType, CopyKind, CopyFlags, DisposeKind,
328 DisposeFlags, Capture));
329 }
330 }
331
332 bool isTrivial() const {
333 return CopyKind == BlockCaptureEntityKind::None &&
334 DisposeKind == BlockCaptureEntityKind::None;
335 }
336 };
337
338 /// Order by 1) all __strong together 2) next, all block together 3) next,
339 /// all byref together 4) next, all __weak together. Preserve descending
340 /// alignment in all situations.
341 bool operator<(const BlockLayoutChunk &left, const BlockLayoutChunk &right) {
342 if (left.Alignment != right.Alignment)
343 return left.Alignment > right.Alignment;
344
345 auto getPrefOrder = [](const BlockLayoutChunk &chunk) {
346 switch (chunk.CopyKind) {
347 case BlockCaptureEntityKind::ARCStrong:
348 return 0;
349 case BlockCaptureEntityKind::BlockObject:
350 switch (chunk.CopyFlags.getBitMask()) {
351 case BLOCK_FIELD_IS_OBJECT:
352 return 0;
353 case BLOCK_FIELD_IS_BLOCK:
354 return 1;
355 case BLOCK_FIELD_IS_BYREF:
356 return 2;
357 default:
358 break;
359 }
360 break;
361 case BlockCaptureEntityKind::ARCWeak:
362 return 3;
363 default:
364 break;
365 }
366 return 4;
367 };
368
369 return getPrefOrder(left) < getPrefOrder(right);
370 }
371} // end anonymous namespace
372
373static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
374computeCopyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
375 const LangOptions &LangOpts);
376
377static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
378computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
379 const LangOptions &LangOpts);
380
381static void addBlockLayout(CharUnits align, CharUnits size,
382 const BlockDecl::Capture *capture, llvm::Type *type,
383 QualType fieldType,
384 SmallVectorImpl<BlockLayoutChunk> &Layout,
385 CGBlockInfo &Info, CodeGenModule &CGM) {
386 if (!capture) {
387 // 'this' capture.
388 Layout.push_back(Elt: BlockLayoutChunk(
389 align, size, capture, type, fieldType, BlockCaptureEntityKind::None,
390 BlockFieldFlags(), BlockCaptureEntityKind::None, BlockFieldFlags()));
391 return;
392 }
393
394 const LangOptions &LangOpts = CGM.getLangOpts();
395 BlockCaptureEntityKind CopyKind, DisposeKind;
396 BlockFieldFlags CopyFlags, DisposeFlags;
397
398 std::tie(args&: CopyKind, args&: CopyFlags) =
399 computeCopyInfoForBlockCapture(CI: *capture, T: fieldType, LangOpts);
400 std::tie(args&: DisposeKind, args&: DisposeFlags) =
401 computeDestroyInfoForBlockCapture(CI: *capture, T: fieldType, LangOpts);
402 Layout.push_back(Elt: BlockLayoutChunk(align, size, capture, type, fieldType,
403 CopyKind, CopyFlags, DisposeKind,
404 DisposeFlags));
405
406 if (Info.NoEscape)
407 return;
408
409 if (!Layout.back().isTrivial())
410 Info.NeedsCopyDispose = true;
411}
412
413/// Determines if the given type is safe for constant capture in C++.
414static bool isSafeForCXXConstantCapture(QualType type) {
415 const RecordType *recordType =
416 type->getBaseElementTypeUnsafe()->getAs<RecordType>();
417
418 // Only records can be unsafe.
419 if (!recordType) return true;
420
421 const auto *record = cast<CXXRecordDecl>(Val: recordType->getDecl());
422
423 // Maintain semantics for classes with non-trivial dtors or copy ctors.
424 if (!record->hasTrivialDestructor()) return false;
425 if (record->hasNonTrivialCopyConstructor()) return false;
426
427 // Otherwise, we just have to make sure there aren't any mutable
428 // fields that might have changed since initialization.
429 return !record->hasMutableFields();
430}
431
432/// It is illegal to modify a const object after initialization.
433/// Therefore, if a const object has a constant initializer, we don't
434/// actually need to keep storage for it in the block; we'll just
435/// rematerialize it at the start of the block function. This is
436/// acceptable because we make no promises about address stability of
437/// captured variables.
438static llvm::Constant *tryCaptureAsConstant(CodeGenModule &CGM,
439 CodeGenFunction *CGF,
440 const VarDecl *var) {
441 // Return if this is a function parameter. We shouldn't try to
442 // rematerialize default arguments of function parameters.
443 if (isa<ParmVarDecl>(Val: var))
444 return nullptr;
445
446 QualType type = var->getType();
447
448 // We can only do this if the variable is const.
449 if (!type.isConstQualified()) return nullptr;
450
451 // Furthermore, in C++ we have to worry about mutable fields:
452 // C++ [dcl.type.cv]p4:
453 // Except that any class member declared mutable can be
454 // modified, any attempt to modify a const object during its
455 // lifetime results in undefined behavior.
456 if (CGM.getLangOpts().CPlusPlus && !isSafeForCXXConstantCapture(type))
457 return nullptr;
458
459 // If the variable doesn't have any initializer (shouldn't this be
460 // invalid?), it's not clear what we should do. Maybe capture as
461 // zero?
462 const Expr *init = var->getInit();
463 if (!init) return nullptr;
464
465 return ConstantEmitter(CGM, CGF).tryEmitAbstractForInitializer(D: *var);
466}
467
468/// Get the low bit of a nonzero character count. This is the
469/// alignment of the nth byte if the 0th byte is universally aligned.
470static CharUnits getLowBit(CharUnits v) {
471 return CharUnits::fromQuantity(Quantity: v.getQuantity() & (~v.getQuantity() + 1));
472}
473
474static void initializeForBlockHeader(CodeGenModule &CGM, CGBlockInfo &info,
475 SmallVectorImpl<llvm::Type*> &elementTypes) {
476
477 assert(elementTypes.empty());
478 if (CGM.getLangOpts().OpenCL) {
479 // The header is basically 'struct { int; int; generic void *;
480 // custom_fields; }'. Assert that struct is packed.
481 auto GenPtrAlign = CharUnits::fromQuantity(
482 Quantity: CGM.getTarget().getPointerAlign(AddrSpace: LangAS::opencl_generic) / 8);
483 auto GenPtrSize = CharUnits::fromQuantity(
484 Quantity: CGM.getTarget().getPointerWidth(AddrSpace: LangAS::opencl_generic) / 8);
485 assert(CGM.getIntSize() <= GenPtrSize);
486 assert(CGM.getIntAlign() <= GenPtrAlign);
487 assert((2 * CGM.getIntSize()).isMultipleOf(GenPtrAlign));
488 elementTypes.push_back(Elt: CGM.IntTy); /* total size */
489 elementTypes.push_back(Elt: CGM.IntTy); /* align */
490 elementTypes.push_back(
491 Elt: CGM.getOpenCLRuntime()
492 .getGenericVoidPointerType()); /* invoke function */
493 unsigned Offset =
494 2 * CGM.getIntSize().getQuantity() + GenPtrSize.getQuantity();
495 unsigned BlockAlign = GenPtrAlign.getQuantity();
496 if (auto *Helper =
497 CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
498 for (auto *I : Helper->getCustomFieldTypes()) /* custom fields */ {
499 // TargetOpenCLBlockHelp needs to make sure the struct is packed.
500 // If necessary, add padding fields to the custom fields.
501 unsigned Align = CGM.getDataLayout().getABITypeAlign(Ty: I).value();
502 if (BlockAlign < Align)
503 BlockAlign = Align;
504 assert(Offset % Align == 0);
505 Offset += CGM.getDataLayout().getTypeAllocSize(Ty: I);
506 elementTypes.push_back(Elt: I);
507 }
508 }
509 info.BlockAlign = CharUnits::fromQuantity(Quantity: BlockAlign);
510 info.BlockSize = CharUnits::fromQuantity(Quantity: Offset);
511 } else {
512 // The header is basically 'struct { void *; int; int; void *; void *; }'.
513 // Assert that the struct is packed.
514 assert(CGM.getIntSize() <= CGM.getPointerSize());
515 assert(CGM.getIntAlign() <= CGM.getPointerAlign());
516 assert((2 * CGM.getIntSize()).isMultipleOf(CGM.getPointerAlign()));
517 info.BlockAlign = CGM.getPointerAlign();
518 info.BlockSize = 3 * CGM.getPointerSize() + 2 * CGM.getIntSize();
519 elementTypes.push_back(Elt: CGM.VoidPtrTy);
520 elementTypes.push_back(Elt: CGM.IntTy);
521 elementTypes.push_back(Elt: CGM.IntTy);
522 elementTypes.push_back(Elt: CGM.VoidPtrTy);
523 elementTypes.push_back(Elt: CGM.getBlockDescriptorType());
524 }
525}
526
527static QualType getCaptureFieldType(const CodeGenFunction &CGF,
528 const BlockDecl::Capture &CI) {
529 const VarDecl *VD = CI.getVariable();
530
531 // If the variable is captured by an enclosing block or lambda expression,
532 // use the type of the capture field.
533 if (CGF.BlockInfo && CI.isNested())
534 return CGF.BlockInfo->getCapture(var: VD).fieldType();
535 if (auto *FD = CGF.LambdaCaptureFields.lookup(VD))
536 return FD->getType();
537 // If the captured variable is a non-escaping __block variable, the field
538 // type is the reference type. If the variable is a __block variable that
539 // already has a reference type, the field type is the variable's type.
540 return VD->isNonEscapingByref() ?
541 CGF.getContext().getLValueReferenceType(T: VD->getType()) : VD->getType();
542}
543
544/// Compute the layout of the given block. Attempts to lay the block
545/// out with minimal space requirements.
546static void computeBlockInfo(CodeGenModule &CGM, CodeGenFunction *CGF,
547 CGBlockInfo &info) {
548 ASTContext &C = CGM.getContext();
549 const BlockDecl *block = info.getBlockDecl();
550
551 SmallVector<llvm::Type*, 8> elementTypes;
552 initializeForBlockHeader(CGM, info, elementTypes);
553 bool hasNonConstantCustomFields = false;
554 if (auto *OpenCLHelper =
555 CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper())
556 hasNonConstantCustomFields =
557 !OpenCLHelper->areAllCustomFieldValuesConstant(Info: info);
558 if (!block->hasCaptures() && !hasNonConstantCustomFields) {
559 info.StructureType =
560 llvm::StructType::get(Context&: CGM.getLLVMContext(), Elements: elementTypes, isPacked: true);
561 info.CanBeGlobal = true;
562 return;
563 }
564 else if (C.getLangOpts().ObjC &&
565 CGM.getLangOpts().getGC() == LangOptions::NonGC)
566 info.HasCapturedVariableLayout = true;
567
568 if (block->doesNotEscape())
569 info.NoEscape = true;
570
571 // Collect the layout chunks.
572 SmallVector<BlockLayoutChunk, 16> layout;
573 layout.reserve(N: block->capturesCXXThis() +
574 (block->capture_end() - block->capture_begin()));
575
576 CharUnits maxFieldAlign;
577
578 // First, 'this'.
579 if (block->capturesCXXThis()) {
580 assert(CGF && CGF->CurFuncDecl && isa<CXXMethodDecl>(CGF->CurFuncDecl) &&
581 "Can't capture 'this' outside a method");
582 QualType thisType = cast<CXXMethodDecl>(Val: CGF->CurFuncDecl)->getThisType();
583
584 // Theoretically, this could be in a different address space, so
585 // don't assume standard pointer size/align.
586 llvm::Type *llvmType = CGM.getTypes().ConvertType(T: thisType);
587 auto TInfo = CGM.getContext().getTypeInfoInChars(T: thisType);
588 maxFieldAlign = std::max(a: maxFieldAlign, b: TInfo.Align);
589
590 addBlockLayout(align: TInfo.Align, size: TInfo.Width, capture: nullptr, type: llvmType, fieldType: thisType,
591 Layout&: layout, Info&: info, CGM);
592 }
593
594 // Next, all the block captures.
595 for (const auto &CI : block->captures()) {
596 const VarDecl *variable = CI.getVariable();
597
598 if (CI.isEscapingByref()) {
599 // Just use void* instead of a pointer to the byref type.
600 CharUnits align = CGM.getPointerAlign();
601 maxFieldAlign = std::max(a: maxFieldAlign, b: align);
602
603 // Since a __block variable cannot be captured by lambdas, its type and
604 // the capture field type should always match.
605 assert(CGF && getCaptureFieldType(*CGF, CI) == variable->getType() &&
606 "capture type differs from the variable type");
607 addBlockLayout(align, CGM.getPointerSize(), &CI, CGM.VoidPtrTy,
608 variable->getType(), layout, info, CGM);
609 continue;
610 }
611
612 // Otherwise, build a layout chunk with the size and alignment of
613 // the declaration.
614 if (llvm::Constant *constant = tryCaptureAsConstant(CGM, CGF, var: variable)) {
615 info.SortedCaptures.push_back(
616 Elt: CGBlockInfo::Capture::makeConstant(value: constant, Cap: &CI));
617 continue;
618 }
619
620 QualType VT = getCaptureFieldType(CGF: *CGF, CI);
621
622 if (CGM.getLangOpts().CPlusPlus)
623 if (const CXXRecordDecl *record = VT->getAsCXXRecordDecl())
624 if (CI.hasCopyExpr() || !record->hasTrivialDestructor()) {
625 info.HasCXXObject = true;
626 if (!record->isExternallyVisible())
627 info.CapturesNonExternalType = true;
628 }
629
630 CharUnits size = C.getTypeSizeInChars(T: VT);
631 CharUnits align = C.getDeclAlign(variable);
632
633 maxFieldAlign = std::max(a: maxFieldAlign, b: align);
634
635 llvm::Type *llvmType =
636 CGM.getTypes().ConvertTypeForMem(T: VT);
637
638 addBlockLayout(align, size, capture: &CI, type: llvmType, fieldType: VT, Layout&: layout, Info&: info, CGM);
639 }
640
641 // If that was everything, we're done here.
642 if (layout.empty()) {
643 info.StructureType =
644 llvm::StructType::get(Context&: CGM.getLLVMContext(), Elements: elementTypes, isPacked: true);
645 info.CanBeGlobal = true;
646 info.buildCaptureMap();
647 return;
648 }
649
650 // Sort the layout by alignment. We have to use a stable sort here
651 // to get reproducible results. There should probably be an
652 // llvm::array_pod_stable_sort.
653 llvm::stable_sort(Range&: layout);
654
655 // Needed for blocks layout info.
656 info.BlockHeaderForcedGapOffset = info.BlockSize;
657 info.BlockHeaderForcedGapSize = CharUnits::Zero();
658
659 CharUnits &blockSize = info.BlockSize;
660 info.BlockAlign = std::max(a: maxFieldAlign, b: info.BlockAlign);
661
662 // Assuming that the first byte in the header is maximally aligned,
663 // get the alignment of the first byte following the header.
664 CharUnits endAlign = getLowBit(v: blockSize);
665
666 // If the end of the header isn't satisfactorily aligned for the
667 // maximum thing, look for things that are okay with the header-end
668 // alignment, and keep appending them until we get something that's
669 // aligned right. This algorithm is only guaranteed optimal if
670 // that condition is satisfied at some point; otherwise we can get
671 // things like:
672 // header // next byte has alignment 4
673 // something_with_size_5; // next byte has alignment 1
674 // something_with_alignment_8;
675 // which has 7 bytes of padding, as opposed to the naive solution
676 // which might have less (?).
677 if (endAlign < maxFieldAlign) {
678 SmallVectorImpl<BlockLayoutChunk>::iterator
679 li = layout.begin() + 1, le = layout.end();
680
681 // Look for something that the header end is already
682 // satisfactorily aligned for.
683 for (; li != le && endAlign < li->Alignment; ++li)
684 ;
685
686 // If we found something that's naturally aligned for the end of
687 // the header, keep adding things...
688 if (li != le) {
689 SmallVectorImpl<BlockLayoutChunk>::iterator first = li;
690 for (; li != le; ++li) {
691 assert(endAlign >= li->Alignment);
692
693 li->setIndex(info, index: elementTypes.size(), offset: blockSize);
694 elementTypes.push_back(Elt: li->Type);
695 blockSize += li->Size;
696 endAlign = getLowBit(v: blockSize);
697
698 // ...until we get to the alignment of the maximum field.
699 if (endAlign >= maxFieldAlign) {
700 ++li;
701 break;
702 }
703 }
704 // Don't re-append everything we just appended.
705 layout.erase(CS: first, CE: li);
706 }
707 }
708
709 assert(endAlign == getLowBit(blockSize));
710
711 // At this point, we just have to add padding if the end align still
712 // isn't aligned right.
713 if (endAlign < maxFieldAlign) {
714 CharUnits newBlockSize = blockSize.alignTo(Align: maxFieldAlign);
715 CharUnits padding = newBlockSize - blockSize;
716
717 // If we haven't yet added any fields, remember that there was an
718 // initial gap; this need to go into the block layout bit map.
719 if (blockSize == info.BlockHeaderForcedGapOffset) {
720 info.BlockHeaderForcedGapSize = padding;
721 }
722
723 elementTypes.push_back(Elt: llvm::ArrayType::get(ElementType: CGM.Int8Ty,
724 NumElements: padding.getQuantity()));
725 blockSize = newBlockSize;
726 endAlign = getLowBit(v: blockSize); // might be > maxFieldAlign
727 }
728
729 assert(endAlign >= maxFieldAlign);
730 assert(endAlign == getLowBit(blockSize));
731 // Slam everything else on now. This works because they have
732 // strictly decreasing alignment and we expect that size is always a
733 // multiple of alignment.
734 for (SmallVectorImpl<BlockLayoutChunk>::iterator
735 li = layout.begin(), le = layout.end(); li != le; ++li) {
736 if (endAlign < li->Alignment) {
737 // size may not be multiple of alignment. This can only happen with
738 // an over-aligned variable. We will be adding a padding field to
739 // make the size be multiple of alignment.
740 CharUnits padding = li->Alignment - endAlign;
741 elementTypes.push_back(Elt: llvm::ArrayType::get(ElementType: CGM.Int8Ty,
742 NumElements: padding.getQuantity()));
743 blockSize += padding;
744 endAlign = getLowBit(v: blockSize);
745 }
746 assert(endAlign >= li->Alignment);
747 li->setIndex(info, index: elementTypes.size(), offset: blockSize);
748 elementTypes.push_back(Elt: li->Type);
749 blockSize += li->Size;
750 endAlign = getLowBit(v: blockSize);
751 }
752
753 info.buildCaptureMap();
754 info.StructureType =
755 llvm::StructType::get(Context&: CGM.getLLVMContext(), Elements: elementTypes, isPacked: true);
756}
757
758/// Emit a block literal expression in the current function.
759llvm::Value *CodeGenFunction::EmitBlockLiteral(const BlockExpr *blockExpr) {
760 // If the block has no captures, we won't have a pre-computed
761 // layout for it.
762 if (!blockExpr->getBlockDecl()->hasCaptures())
763 // The block literal is emitted as a global variable, and the block invoke
764 // function has to be extracted from its initializer.
765 if (llvm::Constant *Block = CGM.getAddrOfGlobalBlockIfEmitted(BE: blockExpr))
766 return Block;
767
768 CGBlockInfo blockInfo(blockExpr->getBlockDecl(), CurFn->getName());
769 computeBlockInfo(CGM, CGF: this, info&: blockInfo);
770 blockInfo.BlockExpression = blockExpr;
771 if (!blockInfo.CanBeGlobal)
772 blockInfo.LocalAddress = CreateTempAlloca(Ty: blockInfo.StructureType,
773 align: blockInfo.BlockAlign, Name: "block");
774 return EmitBlockLiteral(Info: blockInfo);
775}
776
777llvm::Value *CodeGenFunction::EmitBlockLiteral(const CGBlockInfo &blockInfo) {
778 bool IsOpenCL = CGM.getContext().getLangOpts().OpenCL;
779 auto GenVoidPtrTy =
780 IsOpenCL ? CGM.getOpenCLRuntime().getGenericVoidPointerType() : VoidPtrTy;
781 LangAS GenVoidPtrAddr = IsOpenCL ? LangAS::opencl_generic : LangAS::Default;
782 auto GenVoidPtrSize = CharUnits::fromQuantity(
783 Quantity: CGM.getTarget().getPointerWidth(AddrSpace: GenVoidPtrAddr) / 8);
784 // Using the computed layout, generate the actual block function.
785 bool isLambdaConv = blockInfo.getBlockDecl()->isConversionFromLambda();
786 CodeGenFunction BlockCGF{CGM, true};
787 BlockCGF.SanOpts = SanOpts;
788 auto *InvokeFn = BlockCGF.GenerateBlockFunction(
789 GD: CurGD, Info: blockInfo, ldm: LocalDeclMap, IsLambdaConversionToBlock: isLambdaConv, BuildGlobalBlock: blockInfo.CanBeGlobal);
790 auto *blockFn = llvm::ConstantExpr::getPointerCast(C: InvokeFn, Ty: GenVoidPtrTy);
791
792 // If there is nothing to capture, we can emit this as a global block.
793 if (blockInfo.CanBeGlobal)
794 return CGM.getAddrOfGlobalBlockIfEmitted(BE: blockInfo.BlockExpression);
795
796 // Otherwise, we have to emit this as a local block.
797
798 RawAddress blockAddr = blockInfo.LocalAddress;
799 assert(blockAddr.isValid() && "block has no address!");
800
801 llvm::Constant *isa;
802 llvm::Constant *descriptor;
803 BlockFlags flags;
804 if (!IsOpenCL) {
805 // If the block is non-escaping, set field 'isa 'to NSConcreteGlobalBlock
806 // and set the BLOCK_IS_GLOBAL bit of field 'flags'. Copying a non-escaping
807 // block just returns the original block and releasing it is a no-op.
808 llvm::Constant *blockISA = blockInfo.NoEscape
809 ? CGM.getNSConcreteGlobalBlock()
810 : CGM.getNSConcreteStackBlock();
811 isa = blockISA;
812
813 // Build the block descriptor.
814 descriptor = buildBlockDescriptor(CGM, blockInfo);
815
816 // Compute the initial on-stack block flags.
817 flags = BLOCK_HAS_SIGNATURE;
818 if (blockInfo.HasCapturedVariableLayout)
819 flags |= BLOCK_HAS_EXTENDED_LAYOUT;
820 if (blockInfo.NeedsCopyDispose)
821 flags |= BLOCK_HAS_COPY_DISPOSE;
822 if (blockInfo.HasCXXObject)
823 flags |= BLOCK_HAS_CXX_OBJ;
824 if (blockInfo.UsesStret)
825 flags |= BLOCK_USE_STRET;
826 if (blockInfo.NoEscape)
827 flags |= BLOCK_IS_NOESCAPE | BLOCK_IS_GLOBAL;
828 }
829
830 auto projectField = [&](unsigned index, const Twine &name) -> Address {
831 return Builder.CreateStructGEP(Addr: blockAddr, Index: index, Name: name);
832 };
833 auto storeField = [&](llvm::Value *value, unsigned index, const Twine &name) {
834 Builder.CreateStore(Val: value, Addr: projectField(index, name));
835 };
836
837 // Initialize the block header.
838 {
839 // We assume all the header fields are densely packed.
840 unsigned index = 0;
841 CharUnits offset;
842 auto addHeaderField = [&](llvm::Value *value, CharUnits size,
843 const Twine &name) {
844 storeField(value, index, name);
845 offset += size;
846 index++;
847 };
848
849 if (!IsOpenCL) {
850 addHeaderField(isa, getPointerSize(), "block.isa");
851 addHeaderField(llvm::ConstantInt::get(Ty: IntTy, V: flags.getBitMask()),
852 getIntSize(), "block.flags");
853 addHeaderField(llvm::ConstantInt::get(Ty: IntTy, V: 0), getIntSize(),
854 "block.reserved");
855 } else {
856 addHeaderField(
857 llvm::ConstantInt::get(Ty: IntTy, V: blockInfo.BlockSize.getQuantity()),
858 getIntSize(), "block.size");
859 addHeaderField(
860 llvm::ConstantInt::get(Ty: IntTy, V: blockInfo.BlockAlign.getQuantity()),
861 getIntSize(), "block.align");
862 }
863 addHeaderField(blockFn, GenVoidPtrSize, "block.invoke");
864 if (!IsOpenCL)
865 addHeaderField(descriptor, getPointerSize(), "block.descriptor");
866 else if (auto *Helper =
867 CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
868 for (auto I : Helper->getCustomFieldValues(CGF&: *this, Info: blockInfo)) {
869 addHeaderField(
870 I.first,
871 CharUnits::fromQuantity(
872 Quantity: CGM.getDataLayout().getTypeAllocSize(Ty: I.first->getType())),
873 I.second);
874 }
875 }
876 }
877
878 // Finally, capture all the values into the block.
879 const BlockDecl *blockDecl = blockInfo.getBlockDecl();
880
881 // First, 'this'.
882 if (blockDecl->capturesCXXThis()) {
883 Address addr =
884 projectField(blockInfo.CXXThisIndex, "block.captured-this.addr");
885 Builder.CreateStore(Val: LoadCXXThis(), Addr: addr);
886 }
887
888 // Next, captured variables.
889 for (const auto &CI : blockDecl->captures()) {
890 const VarDecl *variable = CI.getVariable();
891 const CGBlockInfo::Capture &capture = blockInfo.getCapture(var: variable);
892
893 // Ignore constant captures.
894 if (capture.isConstant()) continue;
895
896 QualType type = capture.fieldType();
897
898 // This will be a [[type]]*, except that a byref entry will just be
899 // an i8**.
900 Address blockField = projectField(capture.getIndex(), "block.captured");
901
902 // Compute the address of the thing we're going to move into the
903 // block literal.
904 Address src = Address::invalid();
905
906 if (blockDecl->isConversionFromLambda()) {
907 // The lambda capture in a lambda's conversion-to-block-pointer is
908 // special; we'll simply emit it directly.
909 src = Address::invalid();
910 } else if (CI.isEscapingByref()) {
911 if (BlockInfo && CI.isNested()) {
912 // We need to use the capture from the enclosing block.
913 const CGBlockInfo::Capture &enclosingCapture =
914 BlockInfo->getCapture(var: variable);
915
916 // This is a [[type]]*, except that a byref entry will just be an i8**.
917 src = Builder.CreateStructGEP(Addr: LoadBlockStruct(),
918 Index: enclosingCapture.getIndex(),
919 Name: "block.capture.addr");
920 } else {
921 auto I = LocalDeclMap.find(variable);
922 assert(I != LocalDeclMap.end());
923 src = I->second;
924 }
925 } else {
926 DeclRefExpr declRef(getContext(), const_cast<VarDecl *>(variable),
927 /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
928 type.getNonReferenceType(), VK_LValue,
929 SourceLocation());
930 src = EmitDeclRefLValue(E: &declRef).getAddress(CGF&: *this);
931 };
932
933 // For byrefs, we just write the pointer to the byref struct into
934 // the block field. There's no need to chase the forwarding
935 // pointer at this point, since we're building something that will
936 // live a shorter life than the stack byref anyway.
937 if (CI.isEscapingByref()) {
938 // Get a void* that points to the byref struct.
939 llvm::Value *byrefPointer;
940 if (CI.isNested())
941 byrefPointer = Builder.CreateLoad(Addr: src, Name: "byref.capture");
942 else
943 byrefPointer = src.emitRawPointer(CGF&: *this);
944
945 // Write that void* into the capture field.
946 Builder.CreateStore(Val: byrefPointer, Addr: blockField);
947
948 // If we have a copy constructor, evaluate that into the block field.
949 } else if (const Expr *copyExpr = CI.getCopyExpr()) {
950 if (blockDecl->isConversionFromLambda()) {
951 // If we have a lambda conversion, emit the expression
952 // directly into the block instead.
953 AggValueSlot Slot =
954 AggValueSlot::forAddr(addr: blockField, quals: Qualifiers(),
955 isDestructed: AggValueSlot::IsDestructed,
956 needsGC: AggValueSlot::DoesNotNeedGCBarriers,
957 isAliased: AggValueSlot::IsNotAliased,
958 mayOverlap: AggValueSlot::DoesNotOverlap);
959 EmitAggExpr(E: copyExpr, AS: Slot);
960 } else {
961 EmitSynthesizedCXXCopyCtor(Dest: blockField, Src: src, Exp: copyExpr);
962 }
963
964 // If it's a reference variable, copy the reference into the block field.
965 } else if (type->getAs<ReferenceType>()) {
966 Builder.CreateStore(Val: src.emitRawPointer(CGF&: *this), Addr: blockField);
967
968 // If type is const-qualified, copy the value into the block field.
969 } else if (type.isConstQualified() &&
970 type.getObjCLifetime() == Qualifiers::OCL_Strong &&
971 CGM.getCodeGenOpts().OptimizationLevel != 0) {
972 llvm::Value *value = Builder.CreateLoad(Addr: src, Name: "captured");
973 Builder.CreateStore(Val: value, Addr: blockField);
974
975 // If this is an ARC __strong block-pointer variable, don't do a
976 // block copy.
977 //
978 // TODO: this can be generalized into the normal initialization logic:
979 // we should never need to do a block-copy when initializing a local
980 // variable, because the local variable's lifetime should be strictly
981 // contained within the stack block's.
982 } else if (type.getObjCLifetime() == Qualifiers::OCL_Strong &&
983 type->isBlockPointerType()) {
984 // Load the block and do a simple retain.
985 llvm::Value *value = Builder.CreateLoad(Addr: src, Name: "block.captured_block");
986 value = EmitARCRetainNonBlock(value);
987
988 // Do a primitive store to the block field.
989 Builder.CreateStore(Val: value, Addr: blockField);
990
991 // Otherwise, fake up a POD copy into the block field.
992 } else {
993 // Fake up a new variable so that EmitScalarInit doesn't think
994 // we're referring to the variable in its own initializer.
995 ImplicitParamDecl BlockFieldPseudoVar(getContext(), type,
996 ImplicitParamKind::Other);
997
998 // We use one of these or the other depending on whether the
999 // reference is nested.
1000 DeclRefExpr declRef(getContext(), const_cast<VarDecl *>(variable),
1001 /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
1002 type, VK_LValue, SourceLocation());
1003
1004 ImplicitCastExpr l2r(ImplicitCastExpr::OnStack, type, CK_LValueToRValue,
1005 &declRef, VK_PRValue, FPOptionsOverride());
1006 // FIXME: Pass a specific location for the expr init so that the store is
1007 // attributed to a reasonable location - otherwise it may be attributed to
1008 // locations of subexpressions in the initialization.
1009 EmitExprAsInit(&l2r, &BlockFieldPseudoVar,
1010 MakeAddrLValue(Addr: blockField, T: type, Source: AlignmentSource::Decl),
1011 /*captured by init*/ false);
1012 }
1013
1014 // Push a cleanup for the capture if necessary.
1015 if (!blockInfo.NoEscape && !blockInfo.NeedsCopyDispose)
1016 continue;
1017
1018 // Ignore __block captures; there's nothing special in the on-stack block
1019 // that we need to do for them.
1020 if (CI.isByRef())
1021 continue;
1022
1023 // Ignore objects that aren't destructed.
1024 QualType::DestructionKind dtorKind = type.isDestructedType();
1025 if (dtorKind == QualType::DK_none)
1026 continue;
1027
1028 CodeGenFunction::Destroyer *destroyer;
1029
1030 // Block captures count as local values and have imprecise semantics.
1031 // They also can't be arrays, so need to worry about that.
1032 //
1033 // For const-qualified captures, emit clang.arc.use to ensure the captured
1034 // object doesn't get released while we are still depending on its validity
1035 // within the block.
1036 if (type.isConstQualified() &&
1037 type.getObjCLifetime() == Qualifiers::OCL_Strong &&
1038 CGM.getCodeGenOpts().OptimizationLevel != 0) {
1039 assert(CGM.getLangOpts().ObjCAutoRefCount &&
1040 "expected ObjC ARC to be enabled");
1041 destroyer = emitARCIntrinsicUse;
1042 } else if (dtorKind == QualType::DK_objc_strong_lifetime) {
1043 destroyer = destroyARCStrongImprecise;
1044 } else {
1045 destroyer = getDestroyer(destructionKind: dtorKind);
1046 }
1047
1048 CleanupKind cleanupKind = NormalCleanup;
1049 bool useArrayEHCleanup = needsEHCleanup(kind: dtorKind);
1050 if (useArrayEHCleanup)
1051 cleanupKind = NormalAndEHCleanup;
1052
1053 // Extend the lifetime of the capture to the end of the scope enclosing the
1054 // block expression except when the block decl is in the list of RetExpr's
1055 // cleanup objects, in which case its lifetime ends after the full
1056 // expression.
1057 auto IsBlockDeclInRetExpr = [&]() {
1058 auto *EWC = llvm::dyn_cast_or_null<ExprWithCleanups>(Val: RetExpr);
1059 if (EWC)
1060 for (auto &C : EWC->getObjects())
1061 if (auto *BD = C.dyn_cast<BlockDecl *>())
1062 if (BD == blockDecl)
1063 return true;
1064 return false;
1065 };
1066
1067 if (IsBlockDeclInRetExpr())
1068 pushDestroy(kind: cleanupKind, addr: blockField, type, destroyer, useEHCleanupForArray: useArrayEHCleanup);
1069 else
1070 pushLifetimeExtendedDestroy(kind: cleanupKind, addr: blockField, type, destroyer,
1071 useEHCleanupForArray: useArrayEHCleanup);
1072 }
1073
1074 // Cast to the converted block-pointer type, which happens (somewhat
1075 // unfortunately) to be a pointer to function type.
1076 llvm::Value *result = Builder.CreatePointerCast(
1077 V: blockAddr.getPointer(), DestTy: ConvertType(blockInfo.getBlockExpr()->getType()));
1078
1079 if (IsOpenCL) {
1080 CGM.getOpenCLRuntime().recordBlockInfo(E: blockInfo.BlockExpression, InvokeF: InvokeFn,
1081 Block: result, BlockTy: blockInfo.StructureType);
1082 }
1083
1084 return result;
1085}
1086
1087
1088llvm::Type *CodeGenModule::getBlockDescriptorType() {
1089 if (BlockDescriptorType)
1090 return BlockDescriptorType;
1091
1092 llvm::Type *UnsignedLongTy =
1093 getTypes().ConvertType(T: getContext().UnsignedLongTy);
1094
1095 // struct __block_descriptor {
1096 // unsigned long reserved;
1097 // unsigned long block_size;
1098 //
1099 // // later, the following will be added
1100 //
1101 // struct {
1102 // void (*copyHelper)();
1103 // void (*copyHelper)();
1104 // } helpers; // !!! optional
1105 //
1106 // const char *signature; // the block signature
1107 // const char *layout; // reserved
1108 // };
1109 BlockDescriptorType = llvm::StructType::create(
1110 Name: "struct.__block_descriptor", elt1: UnsignedLongTy, elts: UnsignedLongTy);
1111
1112 // Now form a pointer to that.
1113 unsigned AddrSpace = 0;
1114 if (getLangOpts().OpenCL)
1115 AddrSpace = getContext().getTargetAddressSpace(AS: LangAS::opencl_constant);
1116 BlockDescriptorType = llvm::PointerType::get(ElementType: BlockDescriptorType, AddressSpace: AddrSpace);
1117 return BlockDescriptorType;
1118}
1119
1120llvm::Type *CodeGenModule::getGenericBlockLiteralType() {
1121 if (GenericBlockLiteralType)
1122 return GenericBlockLiteralType;
1123
1124 llvm::Type *BlockDescPtrTy = getBlockDescriptorType();
1125
1126 if (getLangOpts().OpenCL) {
1127 // struct __opencl_block_literal_generic {
1128 // int __size;
1129 // int __align;
1130 // __generic void *__invoke;
1131 // /* custom fields */
1132 // };
1133 SmallVector<llvm::Type *, 8> StructFields(
1134 {IntTy, IntTy, getOpenCLRuntime().getGenericVoidPointerType()});
1135 if (auto *Helper = getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
1136 llvm::append_range(C&: StructFields, R: Helper->getCustomFieldTypes());
1137 }
1138 GenericBlockLiteralType = llvm::StructType::create(
1139 Elements: StructFields, Name: "struct.__opencl_block_literal_generic");
1140 } else {
1141 // struct __block_literal_generic {
1142 // void *__isa;
1143 // int __flags;
1144 // int __reserved;
1145 // void (*__invoke)(void *);
1146 // struct __block_descriptor *__descriptor;
1147 // };
1148 GenericBlockLiteralType =
1149 llvm::StructType::create(Name: "struct.__block_literal_generic", elt1: VoidPtrTy,
1150 elts: IntTy, elts: IntTy, elts: VoidPtrTy, elts: BlockDescPtrTy);
1151 }
1152
1153 return GenericBlockLiteralType;
1154}
1155
1156RValue CodeGenFunction::EmitBlockCallExpr(const CallExpr *E,
1157 ReturnValueSlot ReturnValue) {
1158 const auto *BPT = E->getCallee()->getType()->castAs<BlockPointerType>();
1159 llvm::Value *BlockPtr = EmitScalarExpr(E: E->getCallee());
1160 llvm::Type *GenBlockTy = CGM.getGenericBlockLiteralType();
1161 llvm::Value *Func = nullptr;
1162 QualType FnType = BPT->getPointeeType();
1163 ASTContext &Ctx = getContext();
1164 CallArgList Args;
1165
1166 if (getLangOpts().OpenCL) {
1167 // For OpenCL, BlockPtr is already casted to generic block literal.
1168
1169 // First argument of a block call is a generic block literal casted to
1170 // generic void pointer, i.e. i8 addrspace(4)*
1171 llvm::Type *GenericVoidPtrTy =
1172 CGM.getOpenCLRuntime().getGenericVoidPointerType();
1173 llvm::Value *BlockDescriptor = Builder.CreatePointerCast(
1174 V: BlockPtr, DestTy: GenericVoidPtrTy);
1175 QualType VoidPtrQualTy = Ctx.getPointerType(
1176 Ctx.getAddrSpaceQualType(T: Ctx.VoidTy, AddressSpace: LangAS::opencl_generic));
1177 Args.add(rvalue: RValue::get(V: BlockDescriptor), type: VoidPtrQualTy);
1178 // And the rest of the arguments.
1179 EmitCallArgs(Args, Prototype: FnType->getAs<FunctionProtoType>(), ArgRange: E->arguments());
1180
1181 // We *can* call the block directly unless it is a function argument.
1182 if (!isa<ParmVarDecl>(Val: E->getCalleeDecl()))
1183 Func = CGM.getOpenCLRuntime().getInvokeFunction(E: E->getCallee());
1184 else {
1185 llvm::Value *FuncPtr = Builder.CreateStructGEP(Ty: GenBlockTy, Ptr: BlockPtr, Idx: 2);
1186 Func = Builder.CreateAlignedLoad(GenericVoidPtrTy, FuncPtr,
1187 getPointerAlign());
1188 }
1189 } else {
1190 // Bitcast the block literal to a generic block literal.
1191 BlockPtr =
1192 Builder.CreatePointerCast(V: BlockPtr, DestTy: UnqualPtrTy, Name: "block.literal");
1193 // Get pointer to the block invoke function
1194 llvm::Value *FuncPtr = Builder.CreateStructGEP(Ty: GenBlockTy, Ptr: BlockPtr, Idx: 3);
1195
1196 // First argument is a block literal casted to a void pointer
1197 BlockPtr = Builder.CreatePointerCast(V: BlockPtr, DestTy: VoidPtrTy);
1198 Args.add(rvalue: RValue::get(V: BlockPtr), type: Ctx.VoidPtrTy);
1199 // And the rest of the arguments.
1200 EmitCallArgs(Args, Prototype: FnType->getAs<FunctionProtoType>(), ArgRange: E->arguments());
1201
1202 // Load the function.
1203 Func = Builder.CreateAlignedLoad(VoidPtrTy, FuncPtr, getPointerAlign());
1204 }
1205
1206 const FunctionType *FuncTy = FnType->castAs<FunctionType>();
1207 const CGFunctionInfo &FnInfo =
1208 CGM.getTypes().arrangeBlockFunctionCall(args: Args, type: FuncTy);
1209
1210 // Prepare the callee.
1211 CGCallee Callee(CGCalleeInfo(), Func);
1212
1213 // And call the block.
1214 return EmitCall(CallInfo: FnInfo, Callee, ReturnValue, Args);
1215}
1216
1217Address CodeGenFunction::GetAddrOfBlockDecl(const VarDecl *variable) {
1218 assert(BlockInfo && "evaluating block ref without block information?");
1219 const CGBlockInfo::Capture &capture = BlockInfo->getCapture(var: variable);
1220
1221 // Handle constant captures.
1222 if (capture.isConstant()) return LocalDeclMap.find(variable)->second;
1223
1224 Address addr = Builder.CreateStructGEP(Addr: LoadBlockStruct(), Index: capture.getIndex(),
1225 Name: "block.capture.addr");
1226
1227 if (variable->isEscapingByref()) {
1228 // addr should be a void** right now. Load, then cast the result
1229 // to byref*.
1230
1231 auto &byrefInfo = getBlockByrefInfo(var: variable);
1232 addr = Address(Builder.CreateLoad(Addr: addr), byrefInfo.Type,
1233 byrefInfo.ByrefAlignment);
1234
1235 addr = emitBlockByrefAddress(addr, byrefInfo, /*follow*/ true,
1236 variable->getName());
1237 }
1238
1239 assert((!variable->isNonEscapingByref() ||
1240 capture.fieldType()->isReferenceType()) &&
1241 "the capture field of a non-escaping variable should have a "
1242 "reference type");
1243 if (capture.fieldType()->isReferenceType())
1244 addr = EmitLoadOfReference(RefLVal: MakeAddrLValue(Addr: addr, T: capture.fieldType()));
1245
1246 return addr;
1247}
1248
1249void CodeGenModule::setAddrOfGlobalBlock(const BlockExpr *BE,
1250 llvm::Constant *Addr) {
1251 bool Ok = EmittedGlobalBlocks.insert(KV: std::make_pair(x&: BE, y&: Addr)).second;
1252 (void)Ok;
1253 assert(Ok && "Trying to replace an already-existing global block!");
1254}
1255
1256llvm::Constant *
1257CodeGenModule::GetAddrOfGlobalBlock(const BlockExpr *BE,
1258 StringRef Name) {
1259 if (llvm::Constant *Block = getAddrOfGlobalBlockIfEmitted(BE))
1260 return Block;
1261
1262 CGBlockInfo blockInfo(BE->getBlockDecl(), Name);
1263 blockInfo.BlockExpression = BE;
1264
1265 // Compute information about the layout, etc., of this block.
1266 computeBlockInfo(CGM&: *this, CGF: nullptr, info&: blockInfo);
1267
1268 // Using that metadata, generate the actual block function.
1269 {
1270 CodeGenFunction::DeclMapTy LocalDeclMap;
1271 CodeGenFunction(*this).GenerateBlockFunction(
1272 GD: GlobalDecl(), Info: blockInfo, ldm: LocalDeclMap,
1273 /*IsLambdaConversionToBlock*/ false, /*BuildGlobalBlock*/ true);
1274 }
1275
1276 return getAddrOfGlobalBlockIfEmitted(BE);
1277}
1278
1279static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
1280 const CGBlockInfo &blockInfo,
1281 llvm::Constant *blockFn) {
1282 assert(blockInfo.CanBeGlobal);
1283 // Callers should detect this case on their own: calling this function
1284 // generally requires computing layout information, which is a waste of time
1285 // if we've already emitted this block.
1286 assert(!CGM.getAddrOfGlobalBlockIfEmitted(blockInfo.BlockExpression) &&
1287 "Refusing to re-emit a global block.");
1288
1289 // Generate the constants for the block literal initializer.
1290 ConstantInitBuilder builder(CGM);
1291 auto fields = builder.beginStruct();
1292
1293 bool IsOpenCL = CGM.getLangOpts().OpenCL;
1294 bool IsWindows = CGM.getTarget().getTriple().isOSWindows();
1295 if (!IsOpenCL) {
1296 // isa
1297 if (IsWindows)
1298 fields.addNullPointer(ptrTy: CGM.Int8PtrPtrTy);
1299 else
1300 fields.add(value: CGM.getNSConcreteGlobalBlock());
1301
1302 // __flags
1303 BlockFlags flags = BLOCK_IS_GLOBAL | BLOCK_HAS_SIGNATURE;
1304 if (blockInfo.UsesStret)
1305 flags |= BLOCK_USE_STRET;
1306
1307 fields.addInt(intTy: CGM.IntTy, value: flags.getBitMask());
1308
1309 // Reserved
1310 fields.addInt(intTy: CGM.IntTy, value: 0);
1311 } else {
1312 fields.addInt(intTy: CGM.IntTy, value: blockInfo.BlockSize.getQuantity());
1313 fields.addInt(intTy: CGM.IntTy, value: blockInfo.BlockAlign.getQuantity());
1314 }
1315
1316 // Function
1317 fields.add(value: blockFn);
1318
1319 if (!IsOpenCL) {
1320 // Descriptor
1321 fields.add(value: buildBlockDescriptor(CGM, blockInfo));
1322 } else if (auto *Helper =
1323 CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
1324 for (auto *I : Helper->getCustomFieldValues(CGM, Info: blockInfo)) {
1325 fields.add(value: I);
1326 }
1327 }
1328
1329 unsigned AddrSpace = 0;
1330 if (CGM.getContext().getLangOpts().OpenCL)
1331 AddrSpace = CGM.getContext().getTargetAddressSpace(AS: LangAS::opencl_global);
1332
1333 llvm::GlobalVariable *literal = fields.finishAndCreateGlobal(
1334 args: "__block_literal_global", args: blockInfo.BlockAlign,
1335 /*constant*/ args: !IsWindows, args: llvm::GlobalVariable::InternalLinkage, args&: AddrSpace);
1336
1337 literal->addAttribute(Kind: "objc_arc_inert");
1338
1339 // Windows does not allow globals to be initialised to point to globals in
1340 // different DLLs. Any such variables must run code to initialise them.
1341 if (IsWindows) {
1342 auto *Init = llvm::Function::Create(Ty: llvm::FunctionType::get(Result: CGM.VoidTy,
1343 isVarArg: {}), Linkage: llvm::GlobalValue::InternalLinkage, N: ".block_isa_init",
1344 M: &CGM.getModule());
1345 llvm::IRBuilder<> b(llvm::BasicBlock::Create(Context&: CGM.getLLVMContext(), Name: "entry",
1346 Parent: Init));
1347 b.CreateAlignedStore(Val: CGM.getNSConcreteGlobalBlock(),
1348 Ptr: b.CreateStructGEP(Ty: literal->getValueType(), Ptr: literal, Idx: 0),
1349 Align: CGM.getPointerAlign().getAsAlign());
1350 b.CreateRetVoid();
1351 // We can't use the normal LLVM global initialisation array, because we
1352 // need to specify that this runs early in library initialisation.
1353 auto *InitVar = new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
1354 /*isConstant*/true, llvm::GlobalValue::InternalLinkage,
1355 Init, ".block_isa_init_ptr");
1356 InitVar->setSection(".CRT$XCLa");
1357 CGM.addUsedGlobal(GV: InitVar);
1358 }
1359
1360 // Return a constant of the appropriately-casted type.
1361 llvm::Type *RequiredType =
1362 CGM.getTypes().ConvertType(T: blockInfo.getBlockExpr()->getType());
1363 llvm::Constant *Result =
1364 llvm::ConstantExpr::getPointerCast(C: literal, Ty: RequiredType);
1365 CGM.setAddrOfGlobalBlock(BE: blockInfo.BlockExpression, Addr: Result);
1366 if (CGM.getContext().getLangOpts().OpenCL)
1367 CGM.getOpenCLRuntime().recordBlockInfo(
1368 E: blockInfo.BlockExpression,
1369 InvokeF: cast<llvm::Function>(Val: blockFn->stripPointerCasts()), Block: Result,
1370 BlockTy: literal->getValueType());
1371 return Result;
1372}
1373
1374void CodeGenFunction::setBlockContextParameter(const ImplicitParamDecl *D,
1375 unsigned argNum,
1376 llvm::Value *arg) {
1377 assert(BlockInfo && "not emitting prologue of block invocation function?!");
1378
1379 // Allocate a stack slot like for any local variable to guarantee optimal
1380 // debug info at -O0. The mem2reg pass will eliminate it when optimizing.
1381 RawAddress alloc = CreateMemTemp(D->getType(), D->getName() + ".addr");
1382 Builder.CreateStore(Val: arg, Addr: alloc);
1383 if (CGDebugInfo *DI = getDebugInfo()) {
1384 if (CGM.getCodeGenOpts().hasReducedDebugInfo()) {
1385 DI->setLocation(D->getLocation());
1386 DI->EmitDeclareOfBlockLiteralArgVariable(
1387 block: *BlockInfo, Name: D->getName(), ArgNo: argNum,
1388 LocalAddr: cast<llvm::AllocaInst>(Val: alloc.getPointer()), Builder);
1389 }
1390 }
1391
1392 SourceLocation StartLoc = BlockInfo->getBlockExpr()->getBody()->getBeginLoc();
1393 ApplyDebugLocation Scope(*this, StartLoc);
1394
1395 // Instead of messing around with LocalDeclMap, just set the value
1396 // directly as BlockPointer.
1397 BlockPointer = Builder.CreatePointerCast(
1398 V: arg,
1399 DestTy: llvm::PointerType::get(
1400 C&: getLLVMContext(),
1401 AddressSpace: getContext().getLangOpts().OpenCL
1402 ? getContext().getTargetAddressSpace(AS: LangAS::opencl_generic)
1403 : 0),
1404 Name: "block");
1405}
1406
1407Address CodeGenFunction::LoadBlockStruct() {
1408 assert(BlockInfo && "not in a block invocation function!");
1409 assert(BlockPointer && "no block pointer set!");
1410 return Address(BlockPointer, BlockInfo->StructureType, BlockInfo->BlockAlign);
1411}
1412
1413llvm::Function *CodeGenFunction::GenerateBlockFunction(
1414 GlobalDecl GD, const CGBlockInfo &blockInfo, const DeclMapTy &ldm,
1415 bool IsLambdaConversionToBlock, bool BuildGlobalBlock) {
1416 const BlockDecl *blockDecl = blockInfo.getBlockDecl();
1417
1418 CurGD = GD;
1419
1420 CurEHLocation = blockInfo.getBlockExpr()->getEndLoc();
1421
1422 BlockInfo = &blockInfo;
1423
1424 // Arrange for local static and local extern declarations to appear
1425 // to be local to this function as well, in case they're directly
1426 // referenced in a block.
1427 for (DeclMapTy::const_iterator i = ldm.begin(), e = ldm.end(); i != e; ++i) {
1428 const auto *var = dyn_cast<VarDecl>(Val: i->first);
1429 if (var && !var->hasLocalStorage())
1430 setAddrOfLocalVar(VD: var, Addr: i->second);
1431 }
1432
1433 // Begin building the function declaration.
1434
1435 // Build the argument list.
1436 FunctionArgList args;
1437
1438 // The first argument is the block pointer. Just take it as a void*
1439 // and cast it later.
1440 QualType selfTy = getContext().VoidPtrTy;
1441
1442 // For OpenCL passed block pointer can be private AS local variable or
1443 // global AS program scope variable (for the case with and without captures).
1444 // Generic AS is used therefore to be able to accommodate both private and
1445 // generic AS in one implementation.
1446 if (getLangOpts().OpenCL)
1447 selfTy = getContext().getPointerType(getContext().getAddrSpaceQualType(
1448 T: getContext().VoidTy, AddressSpace: LangAS::opencl_generic));
1449
1450 const IdentifierInfo *II = &CGM.getContext().Idents.get(Name: ".block_descriptor");
1451
1452 ImplicitParamDecl SelfDecl(getContext(), const_cast<BlockDecl *>(blockDecl),
1453 SourceLocation(), II, selfTy,
1454 ImplicitParamKind::ObjCSelf);
1455 args.push_back(&SelfDecl);
1456
1457 // Now add the rest of the parameters.
1458 args.append(in_start: blockDecl->param_begin(), in_end: blockDecl->param_end());
1459
1460 // Create the function declaration.
1461 const FunctionProtoType *fnType = blockInfo.getBlockExpr()->getFunctionType();
1462 const CGFunctionInfo &fnInfo =
1463 CGM.getTypes().arrangeBlockFunctionDeclaration(type: fnType, args);
1464 if (CGM.ReturnSlotInterferesWithArgs(FI: fnInfo))
1465 blockInfo.UsesStret = true;
1466
1467 llvm::FunctionType *fnLLVMType = CGM.getTypes().GetFunctionType(Info: fnInfo);
1468
1469 StringRef name = CGM.getBlockMangledName(GD, BD: blockDecl);
1470 llvm::Function *fn = llvm::Function::Create(
1471 Ty: fnLLVMType, Linkage: llvm::GlobalValue::InternalLinkage, N: name, M: &CGM.getModule());
1472 CGM.SetInternalFunctionAttributes(GD: blockDecl, F: fn, FI: fnInfo);
1473
1474 if (BuildGlobalBlock) {
1475 auto GenVoidPtrTy = getContext().getLangOpts().OpenCL
1476 ? CGM.getOpenCLRuntime().getGenericVoidPointerType()
1477 : VoidPtrTy;
1478 buildGlobalBlock(CGM, blockInfo,
1479 blockFn: llvm::ConstantExpr::getPointerCast(C: fn, Ty: GenVoidPtrTy));
1480 }
1481
1482 // Begin generating the function.
1483 StartFunction(GD: blockDecl, RetTy: fnType->getReturnType(), Fn: fn, FnInfo: fnInfo, Args: args,
1484 Loc: blockDecl->getLocation(),
1485 StartLoc: blockInfo.getBlockExpr()->getBody()->getBeginLoc());
1486
1487 // Okay. Undo some of what StartFunction did.
1488
1489 // At -O0 we generate an explicit alloca for the BlockPointer, so the RA
1490 // won't delete the dbg.declare intrinsics for captured variables.
1491 llvm::Value *BlockPointerDbgLoc = BlockPointer;
1492 if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
1493 // Allocate a stack slot for it, so we can point the debugger to it
1494 Address Alloca = CreateTempAlloca(BlockPointer->getType(),
1495 getPointerAlign(),
1496 "block.addr");
1497 // Set the DebugLocation to empty, so the store is recognized as a
1498 // frame setup instruction by llvm::DwarfDebug::beginFunction().
1499 auto NL = ApplyDebugLocation::CreateEmpty(CGF&: *this);
1500 Builder.CreateStore(Val: BlockPointer, Addr: Alloca);
1501 BlockPointerDbgLoc = Alloca.emitRawPointer(CGF&: *this);
1502 }
1503
1504 // If we have a C++ 'this' reference, go ahead and force it into
1505 // existence now.
1506 if (blockDecl->capturesCXXThis()) {
1507 Address addr = Builder.CreateStructGEP(
1508 Addr: LoadBlockStruct(), Index: blockInfo.CXXThisIndex, Name: "block.captured-this");
1509 CXXThisValue = Builder.CreateLoad(Addr: addr, Name: "this");
1510 }
1511
1512 // Also force all the constant captures.
1513 for (const auto &CI : blockDecl->captures()) {
1514 const VarDecl *variable = CI.getVariable();
1515 const CGBlockInfo::Capture &capture = blockInfo.getCapture(var: variable);
1516 if (!capture.isConstant()) continue;
1517
1518 CharUnits align = getContext().getDeclAlign(variable);
1519 Address alloca =
1520 CreateMemTemp(variable->getType(), align, "block.captured-const");
1521
1522 Builder.CreateStore(Val: capture.getConstant(), Addr: alloca);
1523
1524 setAddrOfLocalVar(VD: variable, Addr: alloca);
1525 }
1526
1527 // Save a spot to insert the debug information for all the DeclRefExprs.
1528 llvm::BasicBlock *entry = Builder.GetInsertBlock();
1529 llvm::BasicBlock::iterator entry_ptr = Builder.GetInsertPoint();
1530 --entry_ptr;
1531
1532 if (IsLambdaConversionToBlock)
1533 EmitLambdaBlockInvokeBody();
1534 else {
1535 PGO.assignRegionCounters(GD: GlobalDecl(blockDecl), Fn: fn);
1536 incrementProfileCounter(S: blockDecl->getBody());
1537 EmitStmt(S: blockDecl->getBody());
1538 }
1539
1540 // Remember where we were...
1541 llvm::BasicBlock *resume = Builder.GetInsertBlock();
1542
1543 // Go back to the entry.
1544 if (entry_ptr->getNextNonDebugInstruction())
1545 entry_ptr = entry_ptr->getNextNonDebugInstruction()->getIterator();
1546 else
1547 entry_ptr = entry->end();
1548 Builder.SetInsertPoint(TheBB: entry, IP: entry_ptr);
1549
1550 // Emit debug information for all the DeclRefExprs.
1551 // FIXME: also for 'this'
1552 if (CGDebugInfo *DI = getDebugInfo()) {
1553 for (const auto &CI : blockDecl->captures()) {
1554 const VarDecl *variable = CI.getVariable();
1555 DI->EmitLocation(Builder, Loc: variable->getLocation());
1556
1557 if (CGM.getCodeGenOpts().hasReducedDebugInfo()) {
1558 const CGBlockInfo::Capture &capture = blockInfo.getCapture(var: variable);
1559 if (capture.isConstant()) {
1560 auto addr = LocalDeclMap.find(variable)->second;
1561 (void)DI->EmitDeclareOfAutoVariable(
1562 Decl: variable, AI: addr.emitRawPointer(*this), Builder);
1563 continue;
1564 }
1565
1566 DI->EmitDeclareOfBlockDeclRefVariable(
1567 variable, storage: BlockPointerDbgLoc, Builder, blockInfo,
1568 InsertPoint: entry_ptr == entry->end() ? nullptr : &*entry_ptr);
1569 }
1570 }
1571 // Recover location if it was changed in the above loop.
1572 DI->EmitLocation(Builder,
1573 Loc: cast<CompoundStmt>(Val: blockDecl->getBody())->getRBracLoc());
1574 }
1575
1576 // And resume where we left off.
1577 if (resume == nullptr)
1578 Builder.ClearInsertionPoint();
1579 else
1580 Builder.SetInsertPoint(resume);
1581
1582 FinishFunction(EndLoc: cast<CompoundStmt>(Val: blockDecl->getBody())->getRBracLoc());
1583
1584 return fn;
1585}
1586
1587static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
1588computeCopyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
1589 const LangOptions &LangOpts) {
1590 if (CI.getCopyExpr()) {
1591 assert(!CI.isByRef());
1592 // don't bother computing flags
1593 return std::make_pair(x: BlockCaptureEntityKind::CXXRecord, y: BlockFieldFlags());
1594 }
1595 BlockFieldFlags Flags;
1596 if (CI.isEscapingByref()) {
1597 Flags = BLOCK_FIELD_IS_BYREF;
1598 if (T.isObjCGCWeak())
1599 Flags |= BLOCK_FIELD_IS_WEAK;
1600 return std::make_pair(x: BlockCaptureEntityKind::BlockObject, y&: Flags);
1601 }
1602
1603 Flags = BLOCK_FIELD_IS_OBJECT;
1604 bool isBlockPointer = T->isBlockPointerType();
1605 if (isBlockPointer)
1606 Flags = BLOCK_FIELD_IS_BLOCK;
1607
1608 switch (T.isNonTrivialToPrimitiveCopy()) {
1609 case QualType::PCK_Struct:
1610 return std::make_pair(x: BlockCaptureEntityKind::NonTrivialCStruct,
1611 y: BlockFieldFlags());
1612 case QualType::PCK_ARCWeak:
1613 // We need to register __weak direct captures with the runtime.
1614 return std::make_pair(x: BlockCaptureEntityKind::ARCWeak, y&: Flags);
1615 case QualType::PCK_ARCStrong:
1616 // We need to retain the copied value for __strong direct captures.
1617 // If it's a block pointer, we have to copy the block and assign that to
1618 // the destination pointer, so we might as well use _Block_object_assign.
1619 // Otherwise we can avoid that.
1620 return std::make_pair(x: !isBlockPointer ? BlockCaptureEntityKind::ARCStrong
1621 : BlockCaptureEntityKind::BlockObject,
1622 y&: Flags);
1623 case QualType::PCK_Trivial:
1624 case QualType::PCK_VolatileTrivial: {
1625 if (!T->isObjCRetainableType())
1626 // For all other types, the memcpy is fine.
1627 return std::make_pair(x: BlockCaptureEntityKind::None, y: BlockFieldFlags());
1628
1629 // Honor the inert __unsafe_unretained qualifier, which doesn't actually
1630 // make it into the type system.
1631 if (T->isObjCInertUnsafeUnretainedType())
1632 return std::make_pair(x: BlockCaptureEntityKind::None, y: BlockFieldFlags());
1633
1634 // Special rules for ARC captures:
1635 Qualifiers QS = T.getQualifiers();
1636
1637 // Non-ARC captures of retainable pointers are strong and
1638 // therefore require a call to _Block_object_assign.
1639 if (!QS.getObjCLifetime() && !LangOpts.ObjCAutoRefCount)
1640 return std::make_pair(x: BlockCaptureEntityKind::BlockObject, y&: Flags);
1641
1642 // Otherwise the memcpy is fine.
1643 return std::make_pair(x: BlockCaptureEntityKind::None, y: BlockFieldFlags());
1644 }
1645 }
1646 llvm_unreachable("after exhaustive PrimitiveCopyKind switch");
1647}
1648
1649namespace {
1650/// Release a __block variable.
1651struct CallBlockRelease final : EHScopeStack::Cleanup {
1652 Address Addr;
1653 BlockFieldFlags FieldFlags;
1654 bool LoadBlockVarAddr, CanThrow;
1655
1656 CallBlockRelease(Address Addr, BlockFieldFlags Flags, bool LoadValue,
1657 bool CT)
1658 : Addr(Addr), FieldFlags(Flags), LoadBlockVarAddr(LoadValue),
1659 CanThrow(CT) {}
1660
1661 void Emit(CodeGenFunction &CGF, Flags flags) override {
1662 llvm::Value *BlockVarAddr;
1663 if (LoadBlockVarAddr) {
1664 BlockVarAddr = CGF.Builder.CreateLoad(Addr);
1665 } else {
1666 BlockVarAddr = Addr.emitRawPointer(CGF);
1667 }
1668
1669 CGF.BuildBlockRelease(DeclPtr: BlockVarAddr, flags: FieldFlags, CanThrow);
1670 }
1671};
1672} // end anonymous namespace
1673
1674/// Check if \p T is a C++ class that has a destructor that can throw.
1675bool CodeGenFunction::cxxDestructorCanThrow(QualType T) {
1676 if (const auto *RD = T->getAsCXXRecordDecl())
1677 if (const CXXDestructorDecl *DD = RD->getDestructor())
1678 return DD->getType()->castAs<FunctionProtoType>()->canThrow();
1679 return false;
1680}
1681
1682// Return a string that has the information about a capture.
1683static std::string getBlockCaptureStr(const CGBlockInfo::Capture &Cap,
1684 CaptureStrKind StrKind,
1685 CharUnits BlockAlignment,
1686 CodeGenModule &CGM) {
1687 std::string Str;
1688 ASTContext &Ctx = CGM.getContext();
1689 const BlockDecl::Capture &CI = *Cap.Cap;
1690 QualType CaptureTy = CI.getVariable()->getType();
1691
1692 BlockCaptureEntityKind Kind;
1693 BlockFieldFlags Flags;
1694
1695 // CaptureStrKind::Merged should be passed only when the operations and the
1696 // flags are the same for copy and dispose.
1697 assert((StrKind != CaptureStrKind::Merged ||
1698 (Cap.CopyKind == Cap.DisposeKind &&
1699 Cap.CopyFlags == Cap.DisposeFlags)) &&
1700 "different operations and flags");
1701
1702 if (StrKind == CaptureStrKind::DisposeHelper) {
1703 Kind = Cap.DisposeKind;
1704 Flags = Cap.DisposeFlags;
1705 } else {
1706 Kind = Cap.CopyKind;
1707 Flags = Cap.CopyFlags;
1708 }
1709
1710 switch (Kind) {
1711 case BlockCaptureEntityKind::CXXRecord: {
1712 Str += "c";
1713 SmallString<256> TyStr;
1714 llvm::raw_svector_ostream Out(TyStr);
1715 CGM.getCXXABI().getMangleContext().mangleCanonicalTypeName(T: CaptureTy, Out);
1716 Str += llvm::to_string(Value: TyStr.size()) + TyStr.c_str();
1717 break;
1718 }
1719 case BlockCaptureEntityKind::ARCWeak:
1720 Str += "w";
1721 break;
1722 case BlockCaptureEntityKind::ARCStrong:
1723 Str += "s";
1724 break;
1725 case BlockCaptureEntityKind::BlockObject: {
1726 const VarDecl *Var = CI.getVariable();
1727 unsigned F = Flags.getBitMask();
1728 if (F & BLOCK_FIELD_IS_BYREF) {
1729 Str += "r";
1730 if (F & BLOCK_FIELD_IS_WEAK)
1731 Str += "w";
1732 else {
1733 // If CaptureStrKind::Merged is passed, check both the copy expression
1734 // and the destructor.
1735 if (StrKind != CaptureStrKind::DisposeHelper) {
1736 if (Ctx.getBlockVarCopyInit(VD: Var).canThrow())
1737 Str += "c";
1738 }
1739 if (StrKind != CaptureStrKind::CopyHelper) {
1740 if (CodeGenFunction::cxxDestructorCanThrow(T: CaptureTy))
1741 Str += "d";
1742 }
1743 }
1744 } else {
1745 assert((F & BLOCK_FIELD_IS_OBJECT) && "unexpected flag value");
1746 if (F == BLOCK_FIELD_IS_BLOCK)
1747 Str += "b";
1748 else
1749 Str += "o";
1750 }
1751 break;
1752 }
1753 case BlockCaptureEntityKind::NonTrivialCStruct: {
1754 bool IsVolatile = CaptureTy.isVolatileQualified();
1755 CharUnits Alignment = BlockAlignment.alignmentAtOffset(offset: Cap.getOffset());
1756
1757 Str += "n";
1758 std::string FuncStr;
1759 if (StrKind == CaptureStrKind::DisposeHelper)
1760 FuncStr = CodeGenFunction::getNonTrivialDestructorStr(
1761 QT: CaptureTy, Alignment, IsVolatile, Ctx);
1762 else
1763 // If CaptureStrKind::Merged is passed, use the copy constructor string.
1764 // It has all the information that the destructor string has.
1765 FuncStr = CodeGenFunction::getNonTrivialCopyConstructorStr(
1766 QT: CaptureTy, Alignment, IsVolatile, Ctx);
1767 // The underscore is necessary here because non-trivial copy constructor
1768 // and destructor strings can start with a number.
1769 Str += llvm::to_string(Value: FuncStr.size()) + "_" + FuncStr;
1770 break;
1771 }
1772 case BlockCaptureEntityKind::None:
1773 break;
1774 }
1775
1776 return Str;
1777}
1778
1779static std::string getCopyDestroyHelperFuncName(
1780 const SmallVectorImpl<CGBlockInfo::Capture> &Captures,
1781 CharUnits BlockAlignment, CaptureStrKind StrKind, CodeGenModule &CGM) {
1782 assert((StrKind == CaptureStrKind::CopyHelper ||
1783 StrKind == CaptureStrKind::DisposeHelper) &&
1784 "unexpected CaptureStrKind");
1785 std::string Name = StrKind == CaptureStrKind::CopyHelper
1786 ? "__copy_helper_block_"
1787 : "__destroy_helper_block_";
1788 if (CGM.getLangOpts().Exceptions)
1789 Name += "e";
1790 if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
1791 Name += "a";
1792 Name += llvm::to_string(Value: BlockAlignment.getQuantity()) + "_";
1793
1794 for (auto &Cap : Captures) {
1795 if (Cap.isConstantOrTrivial())
1796 continue;
1797 Name += llvm::to_string(Value: Cap.getOffset().getQuantity());
1798 Name += getBlockCaptureStr(Cap, StrKind, BlockAlignment, CGM);
1799 }
1800
1801 return Name;
1802}
1803
1804static void pushCaptureCleanup(BlockCaptureEntityKind CaptureKind,
1805 Address Field, QualType CaptureType,
1806 BlockFieldFlags Flags, bool ForCopyHelper,
1807 VarDecl *Var, CodeGenFunction &CGF) {
1808 bool EHOnly = ForCopyHelper;
1809
1810 switch (CaptureKind) {
1811 case BlockCaptureEntityKind::CXXRecord:
1812 case BlockCaptureEntityKind::ARCWeak:
1813 case BlockCaptureEntityKind::NonTrivialCStruct:
1814 case BlockCaptureEntityKind::ARCStrong: {
1815 if (CaptureType.isDestructedType() &&
1816 (!EHOnly || CGF.needsEHCleanup(kind: CaptureType.isDestructedType()))) {
1817 CodeGenFunction::Destroyer *Destroyer =
1818 CaptureKind == BlockCaptureEntityKind::ARCStrong
1819 ? CodeGenFunction::destroyARCStrongImprecise
1820 : CGF.getDestroyer(destructionKind: CaptureType.isDestructedType());
1821 CleanupKind Kind =
1822 EHOnly ? EHCleanup
1823 : CGF.getCleanupKind(kind: CaptureType.isDestructedType());
1824 CGF.pushDestroy(kind: Kind, addr: Field, type: CaptureType, destroyer: Destroyer, useEHCleanupForArray: Kind & EHCleanup);
1825 }
1826 break;
1827 }
1828 case BlockCaptureEntityKind::BlockObject: {
1829 if (!EHOnly || CGF.getLangOpts().Exceptions) {
1830 CleanupKind Kind = EHOnly ? EHCleanup : NormalAndEHCleanup;
1831 // Calls to _Block_object_dispose along the EH path in the copy helper
1832 // function don't throw as newly-copied __block variables always have a
1833 // reference count of 2.
1834 bool CanThrow =
1835 !ForCopyHelper && CGF.cxxDestructorCanThrow(T: CaptureType);
1836 CGF.enterByrefCleanup(Kind, Addr: Field, Flags, /*LoadBlockVarAddr*/ true,
1837 CanThrow);
1838 }
1839 break;
1840 }
1841 case BlockCaptureEntityKind::None:
1842 break;
1843 }
1844}
1845
1846static void setBlockHelperAttributesVisibility(bool CapturesNonExternalType,
1847 llvm::Function *Fn,
1848 const CGFunctionInfo &FI,
1849 CodeGenModule &CGM) {
1850 if (CapturesNonExternalType) {
1851 CGM.SetInternalFunctionAttributes(GD: GlobalDecl(), F: Fn, FI);
1852 } else {
1853 Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
1854 Fn->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
1855 CGM.SetLLVMFunctionAttributes(GD: GlobalDecl(), Info: FI, F: Fn, /*IsThunk=*/false);
1856 CGM.SetLLVMFunctionAttributesForDefinition(D: nullptr, F: Fn);
1857 }
1858}
1859/// Generate the copy-helper function for a block closure object:
1860/// static void block_copy_helper(block_t *dst, block_t *src);
1861/// The runtime will have previously initialized 'dst' by doing a
1862/// bit-copy of 'src'.
1863///
1864/// Note that this copies an entire block closure object to the heap;
1865/// it should not be confused with a 'byref copy helper', which moves
1866/// the contents of an individual __block variable to the heap.
1867llvm::Constant *
1868CodeGenFunction::GenerateCopyHelperFunction(const CGBlockInfo &blockInfo) {
1869 std::string FuncName = getCopyDestroyHelperFuncName(
1870 Captures: blockInfo.SortedCaptures, BlockAlignment: blockInfo.BlockAlign,
1871 StrKind: CaptureStrKind::CopyHelper, CGM);
1872
1873 if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(Name: FuncName))
1874 return Func;
1875
1876 ASTContext &C = getContext();
1877
1878 QualType ReturnTy = C.VoidTy;
1879
1880 FunctionArgList args;
1881 ImplicitParamDecl DstDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
1882 args.push_back(&DstDecl);
1883 ImplicitParamDecl SrcDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
1884 args.push_back(&SrcDecl);
1885
1886 const CGFunctionInfo &FI =
1887 CGM.getTypes().arrangeBuiltinFunctionDeclaration(resultType: ReturnTy, args);
1888
1889 // FIXME: it would be nice if these were mergeable with things with
1890 // identical semantics.
1891 llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(Info: FI);
1892
1893 llvm::Function *Fn =
1894 llvm::Function::Create(Ty: LTy, Linkage: llvm::GlobalValue::LinkOnceODRLinkage,
1895 N: FuncName, M: &CGM.getModule());
1896 if (CGM.supportsCOMDAT())
1897 Fn->setComdat(CGM.getModule().getOrInsertComdat(Name: FuncName));
1898
1899 SmallVector<QualType, 2> ArgTys;
1900 ArgTys.push_back(Elt: C.VoidPtrTy);
1901 ArgTys.push_back(Elt: C.VoidPtrTy);
1902
1903 setBlockHelperAttributesVisibility(CapturesNonExternalType: blockInfo.CapturesNonExternalType, Fn, FI,
1904 CGM);
1905 StartFunction(GD: GlobalDecl(), RetTy: ReturnTy, Fn, FnInfo: FI, Args: args);
1906 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
1907
1908 Address src = GetAddrOfLocalVar(&SrcDecl);
1909 src = Address(Builder.CreateLoad(Addr: src), blockInfo.StructureType,
1910 blockInfo.BlockAlign);
1911
1912 Address dst = GetAddrOfLocalVar(&DstDecl);
1913 dst = Address(Builder.CreateLoad(Addr: dst), blockInfo.StructureType,
1914 blockInfo.BlockAlign);
1915
1916 for (auto &capture : blockInfo.SortedCaptures) {
1917 if (capture.isConstantOrTrivial())
1918 continue;
1919
1920 const BlockDecl::Capture &CI = *capture.Cap;
1921 QualType captureType = CI.getVariable()->getType();
1922 BlockFieldFlags flags = capture.CopyFlags;
1923
1924 unsigned index = capture.getIndex();
1925 Address srcField = Builder.CreateStructGEP(Addr: src, Index: index);
1926 Address dstField = Builder.CreateStructGEP(Addr: dst, Index: index);
1927
1928 switch (capture.CopyKind) {
1929 case BlockCaptureEntityKind::CXXRecord:
1930 // If there's an explicit copy expression, we do that.
1931 assert(CI.getCopyExpr() && "copy expression for variable is missing");
1932 EmitSynthesizedCXXCopyCtor(Dest: dstField, Src: srcField, Exp: CI.getCopyExpr());
1933 break;
1934 case BlockCaptureEntityKind::ARCWeak:
1935 EmitARCCopyWeak(dst: dstField, src: srcField);
1936 break;
1937 case BlockCaptureEntityKind::NonTrivialCStruct: {
1938 // If this is a C struct that requires non-trivial copy construction,
1939 // emit a call to its copy constructor.
1940 QualType varType = CI.getVariable()->getType();
1941 callCStructCopyConstructor(Dst: MakeAddrLValue(Addr: dstField, T: varType),
1942 Src: MakeAddrLValue(Addr: srcField, T: varType));
1943 break;
1944 }
1945 case BlockCaptureEntityKind::ARCStrong: {
1946 llvm::Value *srcValue = Builder.CreateLoad(Addr: srcField, Name: "blockcopy.src");
1947 // At -O0, store null into the destination field (so that the
1948 // storeStrong doesn't over-release) and then call storeStrong.
1949 // This is a workaround to not having an initStrong call.
1950 if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
1951 auto *ty = cast<llvm::PointerType>(Val: srcValue->getType());
1952 llvm::Value *null = llvm::ConstantPointerNull::get(T: ty);
1953 Builder.CreateStore(Val: null, Addr: dstField);
1954 EmitARCStoreStrongCall(addr: dstField, value: srcValue, resultIgnored: true);
1955
1956 // With optimization enabled, take advantage of the fact that
1957 // the blocks runtime guarantees a memcpy of the block data, and
1958 // just emit a retain of the src field.
1959 } else {
1960 EmitARCRetainNonBlock(value: srcValue);
1961
1962 // Unless EH cleanup is required, we don't need this anymore, so kill
1963 // it. It's not quite worth the annoyance to avoid creating it in the
1964 // first place.
1965 if (!needsEHCleanup(kind: captureType.isDestructedType()))
1966 if (auto *I =
1967 cast_or_null<llvm::Instruction>(dstField.getBasePointer()))
1968 I->eraseFromParent();
1969 }
1970 break;
1971 }
1972 case BlockCaptureEntityKind::BlockObject: {
1973 llvm::Value *srcValue = Builder.CreateLoad(Addr: srcField, Name: "blockcopy.src");
1974 llvm::Value *dstAddr = dstField.emitRawPointer(CGF&: *this);
1975 llvm::Value *args[] = {
1976 dstAddr, srcValue, llvm::ConstantInt::get(Ty: Int32Ty, V: flags.getBitMask())
1977 };
1978
1979 if (CI.isByRef() && C.getBlockVarCopyInit(VD: CI.getVariable()).canThrow())
1980 EmitRuntimeCallOrInvoke(callee: CGM.getBlockObjectAssign(), args);
1981 else
1982 EmitNounwindRuntimeCall(callee: CGM.getBlockObjectAssign(), args);
1983 break;
1984 }
1985 case BlockCaptureEntityKind::None:
1986 continue;
1987 }
1988
1989 // Ensure that we destroy the copied object if an exception is thrown later
1990 // in the helper function.
1991 pushCaptureCleanup(CaptureKind: capture.CopyKind, Field: dstField, CaptureType: captureType, Flags: flags,
1992 /*ForCopyHelper*/ true, Var: CI.getVariable(), CGF&: *this);
1993 }
1994
1995 FinishFunction();
1996
1997 return Fn;
1998}
1999
2000static BlockFieldFlags
2001getBlockFieldFlagsForObjCObjectPointer(const BlockDecl::Capture &CI,
2002 QualType T) {
2003 BlockFieldFlags Flags = BLOCK_FIELD_IS_OBJECT;
2004 if (T->isBlockPointerType())
2005 Flags = BLOCK_FIELD_IS_BLOCK;
2006 return Flags;
2007}
2008
2009static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
2010computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
2011 const LangOptions &LangOpts) {
2012 if (CI.isEscapingByref()) {
2013 BlockFieldFlags Flags = BLOCK_FIELD_IS_BYREF;
2014 if (T.isObjCGCWeak())
2015 Flags |= BLOCK_FIELD_IS_WEAK;
2016 return std::make_pair(x: BlockCaptureEntityKind::BlockObject, y&: Flags);
2017 }
2018
2019 switch (T.isDestructedType()) {
2020 case QualType::DK_cxx_destructor:
2021 return std::make_pair(x: BlockCaptureEntityKind::CXXRecord, y: BlockFieldFlags());
2022 case QualType::DK_objc_strong_lifetime:
2023 // Use objc_storeStrong for __strong direct captures; the
2024 // dynamic tools really like it when we do this.
2025 return std::make_pair(x: BlockCaptureEntityKind::ARCStrong,
2026 y: getBlockFieldFlagsForObjCObjectPointer(CI, T));
2027 case QualType::DK_objc_weak_lifetime:
2028 // Support __weak direct captures.
2029 return std::make_pair(x: BlockCaptureEntityKind::ARCWeak,
2030 y: getBlockFieldFlagsForObjCObjectPointer(CI, T));
2031 case QualType::DK_nontrivial_c_struct:
2032 return std::make_pair(x: BlockCaptureEntityKind::NonTrivialCStruct,
2033 y: BlockFieldFlags());
2034 case QualType::DK_none: {
2035 // Non-ARC captures are strong, and we need to use _Block_object_dispose.
2036 // But honor the inert __unsafe_unretained qualifier, which doesn't actually
2037 // make it into the type system.
2038 if (T->isObjCRetainableType() && !T.getQualifiers().hasObjCLifetime() &&
2039 !LangOpts.ObjCAutoRefCount && !T->isObjCInertUnsafeUnretainedType())
2040 return std::make_pair(x: BlockCaptureEntityKind::BlockObject,
2041 y: getBlockFieldFlagsForObjCObjectPointer(CI, T));
2042 // Otherwise, we have nothing to do.
2043 return std::make_pair(x: BlockCaptureEntityKind::None, y: BlockFieldFlags());
2044 }
2045 }
2046 llvm_unreachable("after exhaustive DestructionKind switch");
2047}
2048
2049/// Generate the destroy-helper function for a block closure object:
2050/// static void block_destroy_helper(block_t *theBlock);
2051///
2052/// Note that this destroys a heap-allocated block closure object;
2053/// it should not be confused with a 'byref destroy helper', which
2054/// destroys the heap-allocated contents of an individual __block
2055/// variable.
2056llvm::Constant *
2057CodeGenFunction::GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo) {
2058 std::string FuncName = getCopyDestroyHelperFuncName(
2059 Captures: blockInfo.SortedCaptures, BlockAlignment: blockInfo.BlockAlign,
2060 StrKind: CaptureStrKind::DisposeHelper, CGM);
2061
2062 if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(Name: FuncName))
2063 return Func;
2064
2065 ASTContext &C = getContext();
2066
2067 QualType ReturnTy = C.VoidTy;
2068
2069 FunctionArgList args;
2070 ImplicitParamDecl SrcDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
2071 args.push_back(&SrcDecl);
2072
2073 const CGFunctionInfo &FI =
2074 CGM.getTypes().arrangeBuiltinFunctionDeclaration(resultType: ReturnTy, args);
2075
2076 // FIXME: We'd like to put these into a mergable by content, with
2077 // internal linkage.
2078 llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(Info: FI);
2079
2080 llvm::Function *Fn =
2081 llvm::Function::Create(Ty: LTy, Linkage: llvm::GlobalValue::LinkOnceODRLinkage,
2082 N: FuncName, M: &CGM.getModule());
2083 if (CGM.supportsCOMDAT())
2084 Fn->setComdat(CGM.getModule().getOrInsertComdat(Name: FuncName));
2085
2086 SmallVector<QualType, 1> ArgTys;
2087 ArgTys.push_back(Elt: C.VoidPtrTy);
2088
2089 setBlockHelperAttributesVisibility(CapturesNonExternalType: blockInfo.CapturesNonExternalType, Fn, FI,
2090 CGM);
2091 StartFunction(GD: GlobalDecl(), RetTy: ReturnTy, Fn, FnInfo: FI, Args: args);
2092 markAsIgnoreThreadCheckingAtRuntime(Fn);
2093
2094 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
2095
2096 Address src = GetAddrOfLocalVar(&SrcDecl);
2097 src = Address(Builder.CreateLoad(Addr: src), blockInfo.StructureType,
2098 blockInfo.BlockAlign);
2099
2100 CodeGenFunction::RunCleanupsScope cleanups(*this);
2101
2102 for (auto &capture : blockInfo.SortedCaptures) {
2103 if (capture.isConstantOrTrivial())
2104 continue;
2105
2106 const BlockDecl::Capture &CI = *capture.Cap;
2107 BlockFieldFlags flags = capture.DisposeFlags;
2108
2109 Address srcField = Builder.CreateStructGEP(Addr: src, Index: capture.getIndex());
2110
2111 pushCaptureCleanup(capture.DisposeKind, srcField,
2112 CI.getVariable()->getType(), flags,
2113 /*ForCopyHelper*/ false, CI.getVariable(), *this);
2114 }
2115
2116 cleanups.ForceCleanup();
2117
2118 FinishFunction();
2119
2120 return Fn;
2121}
2122
2123namespace {
2124
2125/// Emits the copy/dispose helper functions for a __block object of id type.
2126class ObjectByrefHelpers final : public BlockByrefHelpers {
2127 BlockFieldFlags Flags;
2128
2129public:
2130 ObjectByrefHelpers(CharUnits alignment, BlockFieldFlags flags)
2131 : BlockByrefHelpers(alignment), Flags(flags) {}
2132
2133 void emitCopy(CodeGenFunction &CGF, Address destField,
2134 Address srcField) override {
2135 destField = destField.withElementType(ElemTy: CGF.Int8Ty);
2136
2137 srcField = srcField.withElementType(ElemTy: CGF.Int8PtrTy);
2138 llvm::Value *srcValue = CGF.Builder.CreateLoad(Addr: srcField);
2139
2140 unsigned flags = (Flags | BLOCK_BYREF_CALLER).getBitMask();
2141
2142 llvm::Value *flagsVal = llvm::ConstantInt::get(Ty: CGF.Int32Ty, V: flags);
2143 llvm::FunctionCallee fn = CGF.CGM.getBlockObjectAssign();
2144
2145 llvm::Value *args[] = {destField.emitRawPointer(CGF), srcValue, flagsVal};
2146 CGF.EmitNounwindRuntimeCall(callee: fn, args);
2147 }
2148
2149 void emitDispose(CodeGenFunction &CGF, Address field) override {
2150 field = field.withElementType(ElemTy: CGF.Int8PtrTy);
2151 llvm::Value *value = CGF.Builder.CreateLoad(Addr: field);
2152
2153 CGF.BuildBlockRelease(DeclPtr: value, flags: Flags | BLOCK_BYREF_CALLER, CanThrow: false);
2154 }
2155
2156 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2157 id.AddInteger(I: Flags.getBitMask());
2158 }
2159};
2160
2161/// Emits the copy/dispose helpers for an ARC __block __weak variable.
2162class ARCWeakByrefHelpers final : public BlockByrefHelpers {
2163public:
2164 ARCWeakByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
2165
2166 void emitCopy(CodeGenFunction &CGF, Address destField,
2167 Address srcField) override {
2168 CGF.EmitARCMoveWeak(dst: destField, src: srcField);
2169 }
2170
2171 void emitDispose(CodeGenFunction &CGF, Address field) override {
2172 CGF.EmitARCDestroyWeak(addr: field);
2173 }
2174
2175 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2176 // 0 is distinguishable from all pointers and byref flags
2177 id.AddInteger(I: 0);
2178 }
2179};
2180
2181/// Emits the copy/dispose helpers for an ARC __block __strong variable
2182/// that's not of block-pointer type.
2183class ARCStrongByrefHelpers final : public BlockByrefHelpers {
2184public:
2185 ARCStrongByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
2186
2187 void emitCopy(CodeGenFunction &CGF, Address destField,
2188 Address srcField) override {
2189 // Do a "move" by copying the value and then zeroing out the old
2190 // variable.
2191
2192 llvm::Value *value = CGF.Builder.CreateLoad(Addr: srcField);
2193
2194 llvm::Value *null =
2195 llvm::ConstantPointerNull::get(T: cast<llvm::PointerType>(Val: value->getType()));
2196
2197 if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
2198 CGF.Builder.CreateStore(Val: null, Addr: destField);
2199 CGF.EmitARCStoreStrongCall(addr: destField, value, /*ignored*/ resultIgnored: true);
2200 CGF.EmitARCStoreStrongCall(addr: srcField, value: null, /*ignored*/ resultIgnored: true);
2201 return;
2202 }
2203 CGF.Builder.CreateStore(Val: value, Addr: destField);
2204 CGF.Builder.CreateStore(Val: null, Addr: srcField);
2205 }
2206
2207 void emitDispose(CodeGenFunction &CGF, Address field) override {
2208 CGF.EmitARCDestroyStrong(addr: field, precise: ARCImpreciseLifetime);
2209 }
2210
2211 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2212 // 1 is distinguishable from all pointers and byref flags
2213 id.AddInteger(I: 1);
2214 }
2215};
2216
2217/// Emits the copy/dispose helpers for an ARC __block __strong
2218/// variable that's of block-pointer type.
2219class ARCStrongBlockByrefHelpers final : public BlockByrefHelpers {
2220public:
2221 ARCStrongBlockByrefHelpers(CharUnits alignment)
2222 : BlockByrefHelpers(alignment) {}
2223
2224 void emitCopy(CodeGenFunction &CGF, Address destField,
2225 Address srcField) override {
2226 // Do the copy with objc_retainBlock; that's all that
2227 // _Block_object_assign would do anyway, and we'd have to pass the
2228 // right arguments to make sure it doesn't get no-op'ed.
2229 llvm::Value *oldValue = CGF.Builder.CreateLoad(Addr: srcField);
2230 llvm::Value *copy = CGF.EmitARCRetainBlock(value: oldValue, /*mandatory*/ true);
2231 CGF.Builder.CreateStore(Val: copy, Addr: destField);
2232 }
2233
2234 void emitDispose(CodeGenFunction &CGF, Address field) override {
2235 CGF.EmitARCDestroyStrong(addr: field, precise: ARCImpreciseLifetime);
2236 }
2237
2238 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2239 // 2 is distinguishable from all pointers and byref flags
2240 id.AddInteger(I: 2);
2241 }
2242};
2243
2244/// Emits the copy/dispose helpers for a __block variable with a
2245/// nontrivial copy constructor or destructor.
2246class CXXByrefHelpers final : public BlockByrefHelpers {
2247 QualType VarType;
2248 const Expr *CopyExpr;
2249
2250public:
2251 CXXByrefHelpers(CharUnits alignment, QualType type,
2252 const Expr *copyExpr)
2253 : BlockByrefHelpers(alignment), VarType(type), CopyExpr(copyExpr) {}
2254
2255 bool needsCopy() const override { return CopyExpr != nullptr; }
2256 void emitCopy(CodeGenFunction &CGF, Address destField,
2257 Address srcField) override {
2258 if (!CopyExpr) return;
2259 CGF.EmitSynthesizedCXXCopyCtor(Dest: destField, Src: srcField, Exp: CopyExpr);
2260 }
2261
2262 void emitDispose(CodeGenFunction &CGF, Address field) override {
2263 EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
2264 CGF.PushDestructorCleanup(VarType, field);
2265 CGF.PopCleanupBlocks(OldCleanupStackSize: cleanupDepth);
2266 }
2267
2268 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2269 id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
2270 }
2271};
2272
2273/// Emits the copy/dispose helpers for a __block variable that is a non-trivial
2274/// C struct.
2275class NonTrivialCStructByrefHelpers final : public BlockByrefHelpers {
2276 QualType VarType;
2277
2278public:
2279 NonTrivialCStructByrefHelpers(CharUnits alignment, QualType type)
2280 : BlockByrefHelpers(alignment), VarType(type) {}
2281
2282 void emitCopy(CodeGenFunction &CGF, Address destField,
2283 Address srcField) override {
2284 CGF.callCStructMoveConstructor(CGF.MakeAddrLValue(destField, VarType),
2285 CGF.MakeAddrLValue(srcField, VarType));
2286 }
2287
2288 bool needsDispose() const override {
2289 return VarType.isDestructedType();
2290 }
2291
2292 void emitDispose(CodeGenFunction &CGF, Address field) override {
2293 EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
2294 CGF.pushDestroy(VarType.isDestructedType(), field, VarType);
2295 CGF.PopCleanupBlocks(OldCleanupStackSize: cleanupDepth);
2296 }
2297
2298 void profileImpl(llvm::FoldingSetNodeID &id) const override {
2299 id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
2300 }
2301};
2302} // end anonymous namespace
2303
2304static llvm::Constant *
2305generateByrefCopyHelper(CodeGenFunction &CGF, const BlockByrefInfo &byrefInfo,
2306 BlockByrefHelpers &generator) {
2307 ASTContext &Context = CGF.getContext();
2308
2309 QualType ReturnTy = Context.VoidTy;
2310
2311 FunctionArgList args;
2312 ImplicitParamDecl Dst(Context, Context.VoidPtrTy, ImplicitParamKind::Other);
2313 args.push_back(&Dst);
2314
2315 ImplicitParamDecl Src(Context, Context.VoidPtrTy, ImplicitParamKind::Other);
2316 args.push_back(&Src);
2317
2318 const CGFunctionInfo &FI =
2319 CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(resultType: ReturnTy, args);
2320
2321 llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(Info: FI);
2322
2323 // FIXME: We'd like to put these into a mergable by content, with
2324 // internal linkage.
2325 llvm::Function *Fn =
2326 llvm::Function::Create(Ty: LTy, Linkage: llvm::GlobalValue::InternalLinkage,
2327 N: "__Block_byref_object_copy_", M: &CGF.CGM.getModule());
2328
2329 SmallVector<QualType, 2> ArgTys;
2330 ArgTys.push_back(Elt: Context.VoidPtrTy);
2331 ArgTys.push_back(Elt: Context.VoidPtrTy);
2332
2333 CGF.CGM.SetInternalFunctionAttributes(GD: GlobalDecl(), F: Fn, FI);
2334
2335 CGF.StartFunction(GD: GlobalDecl(), RetTy: ReturnTy, Fn, FnInfo: FI, Args: args);
2336 // Create a scope with an artificial location for the body of this function.
2337 auto AL = ApplyDebugLocation::CreateArtificial(CGF);
2338
2339 if (generator.needsCopy()) {
2340 // dst->x
2341 Address destField = CGF.GetAddrOfLocalVar(&Dst);
2342 destField = Address(CGF.Builder.CreateLoad(Addr: destField), byrefInfo.Type,
2343 byrefInfo.ByrefAlignment);
2344 destField =
2345 CGF.emitBlockByrefAddress(baseAddr: destField, info: byrefInfo, followForward: false, name: "dest-object");
2346
2347 // src->x
2348 Address srcField = CGF.GetAddrOfLocalVar(&Src);
2349 srcField = Address(CGF.Builder.CreateLoad(Addr: srcField), byrefInfo.Type,
2350 byrefInfo.ByrefAlignment);
2351 srcField =
2352 CGF.emitBlockByrefAddress(baseAddr: srcField, info: byrefInfo, followForward: false, name: "src-object");
2353
2354 generator.emitCopy(CGF, dest: destField, src: srcField);
2355 }
2356
2357 CGF.FinishFunction();
2358
2359 return Fn;
2360}
2361
2362/// Build the copy helper for a __block variable.
2363static llvm::Constant *buildByrefCopyHelper(CodeGenModule &CGM,
2364 const BlockByrefInfo &byrefInfo,
2365 BlockByrefHelpers &generator) {
2366 CodeGenFunction CGF(CGM);
2367 return generateByrefCopyHelper(CGF, byrefInfo, generator);
2368}
2369
2370/// Generate code for a __block variable's dispose helper.
2371static llvm::Constant *
2372generateByrefDisposeHelper(CodeGenFunction &CGF,
2373 const BlockByrefInfo &byrefInfo,
2374 BlockByrefHelpers &generator) {
2375 ASTContext &Context = CGF.getContext();
2376 QualType R = Context.VoidTy;
2377
2378 FunctionArgList args;
2379 ImplicitParamDecl Src(CGF.getContext(), Context.VoidPtrTy,
2380 ImplicitParamKind::Other);
2381 args.push_back(&Src);
2382
2383 const CGFunctionInfo &FI =
2384 CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(resultType: R, args);
2385
2386 llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(Info: FI);
2387
2388 // FIXME: We'd like to put these into a mergable by content, with
2389 // internal linkage.
2390 llvm::Function *Fn =
2391 llvm::Function::Create(Ty: LTy, Linkage: llvm::GlobalValue::InternalLinkage,
2392 N: "__Block_byref_object_dispose_",
2393 M: &CGF.CGM.getModule());
2394
2395 SmallVector<QualType, 1> ArgTys;
2396 ArgTys.push_back(Elt: Context.VoidPtrTy);
2397
2398 CGF.CGM.SetInternalFunctionAttributes(GD: GlobalDecl(), F: Fn, FI);
2399
2400 CGF.StartFunction(GD: GlobalDecl(), RetTy: R, Fn, FnInfo: FI, Args: args);
2401 // Create a scope with an artificial location for the body of this function.
2402 auto AL = ApplyDebugLocation::CreateArtificial(CGF);
2403
2404 if (generator.needsDispose()) {
2405 Address addr = CGF.GetAddrOfLocalVar(&Src);
2406 addr = Address(CGF.Builder.CreateLoad(Addr: addr), byrefInfo.Type,
2407 byrefInfo.ByrefAlignment);
2408 addr = CGF.emitBlockByrefAddress(baseAddr: addr, info: byrefInfo, followForward: false, name: "object");
2409
2410 generator.emitDispose(CGF, field: addr);
2411 }
2412
2413 CGF.FinishFunction();
2414
2415 return Fn;
2416}
2417
2418/// Build the dispose helper for a __block variable.
2419static llvm::Constant *buildByrefDisposeHelper(CodeGenModule &CGM,
2420 const BlockByrefInfo &byrefInfo,
2421 BlockByrefHelpers &generator) {
2422 CodeGenFunction CGF(CGM);
2423 return generateByrefDisposeHelper(CGF, byrefInfo, generator);
2424}
2425
2426/// Lazily build the copy and dispose helpers for a __block variable
2427/// with the given information.
2428template <class T>
2429static T *buildByrefHelpers(CodeGenModule &CGM, const BlockByrefInfo &byrefInfo,
2430 T &&generator) {
2431 llvm::FoldingSetNodeID id;
2432 generator.Profile(id);
2433
2434 void *insertPos;
2435 BlockByrefHelpers *node
2436 = CGM.ByrefHelpersCache.FindNodeOrInsertPos(ID: id, InsertPos&: insertPos);
2437 if (node) return static_cast<T*>(node);
2438
2439 generator.CopyHelper = buildByrefCopyHelper(CGM, byrefInfo, generator);
2440 generator.DisposeHelper = buildByrefDisposeHelper(CGM, byrefInfo, generator);
2441
2442 T *copy = new (CGM.getContext()) T(std::forward<T>(generator));
2443 CGM.ByrefHelpersCache.InsertNode(copy, insertPos);
2444 return copy;
2445}
2446
2447/// Build the copy and dispose helpers for the given __block variable
2448/// emission. Places the helpers in the global cache. Returns null
2449/// if no helpers are required.
2450BlockByrefHelpers *
2451CodeGenFunction::buildByrefHelpers(llvm::StructType &byrefType,
2452 const AutoVarEmission &emission) {
2453 const VarDecl &var = *emission.Variable;
2454 assert(var.isEscapingByref() &&
2455 "only escaping __block variables need byref helpers");
2456
2457 QualType type = var.getType();
2458
2459 auto &byrefInfo = getBlockByrefInfo(var: &var);
2460
2461 // The alignment we care about for the purposes of uniquing byref
2462 // helpers is the alignment of the actual byref value field.
2463 CharUnits valueAlignment =
2464 byrefInfo.ByrefAlignment.alignmentAtOffset(offset: byrefInfo.FieldOffset);
2465
2466 if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
2467 const Expr *copyExpr =
2468 CGM.getContext().getBlockVarCopyInit(VD: &var).getCopyExpr();
2469 if (!copyExpr && record->hasTrivialDestructor()) return nullptr;
2470
2471 return ::buildByrefHelpers(
2472 CGM, byrefInfo, generator: CXXByrefHelpers(valueAlignment, type, copyExpr));
2473 }
2474
2475 // If type is a non-trivial C struct type that is non-trivial to
2476 // destructly move or destroy, build the copy and dispose helpers.
2477 if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct ||
2478 type.isDestructedType() == QualType::DK_nontrivial_c_struct)
2479 return ::buildByrefHelpers(
2480 CGM, byrefInfo, generator: NonTrivialCStructByrefHelpers(valueAlignment, type));
2481
2482 // Otherwise, if we don't have a retainable type, there's nothing to do.
2483 // that the runtime does extra copies.
2484 if (!type->isObjCRetainableType()) return nullptr;
2485
2486 Qualifiers qs = type.getQualifiers();
2487
2488 // If we have lifetime, that dominates.
2489 if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
2490 switch (lifetime) {
2491 case Qualifiers::OCL_None: llvm_unreachable("impossible");
2492
2493 // These are just bits as far as the runtime is concerned.
2494 case Qualifiers::OCL_ExplicitNone:
2495 case Qualifiers::OCL_Autoreleasing:
2496 return nullptr;
2497
2498 // Tell the runtime that this is ARC __weak, called by the
2499 // byref routines.
2500 case Qualifiers::OCL_Weak:
2501 return ::buildByrefHelpers(CGM, byrefInfo,
2502 generator: ARCWeakByrefHelpers(valueAlignment));
2503
2504 // ARC __strong __block variables need to be retained.
2505 case Qualifiers::OCL_Strong:
2506 // Block pointers need to be copied, and there's no direct
2507 // transfer possible.
2508 if (type->isBlockPointerType()) {
2509 return ::buildByrefHelpers(CGM, byrefInfo,
2510 generator: ARCStrongBlockByrefHelpers(valueAlignment));
2511
2512 // Otherwise, we transfer ownership of the retain from the stack
2513 // to the heap.
2514 } else {
2515 return ::buildByrefHelpers(CGM, byrefInfo,
2516 generator: ARCStrongByrefHelpers(valueAlignment));
2517 }
2518 }
2519 llvm_unreachable("fell out of lifetime switch!");
2520 }
2521
2522 BlockFieldFlags flags;
2523 if (type->isBlockPointerType()) {
2524 flags |= BLOCK_FIELD_IS_BLOCK;
2525 } else if (CGM.getContext().isObjCNSObjectType(Ty: type) ||
2526 type->isObjCObjectPointerType()) {
2527 flags |= BLOCK_FIELD_IS_OBJECT;
2528 } else {
2529 return nullptr;
2530 }
2531
2532 if (type.isObjCGCWeak())
2533 flags |= BLOCK_FIELD_IS_WEAK;
2534
2535 return ::buildByrefHelpers(CGM, byrefInfo,
2536 generator: ObjectByrefHelpers(valueAlignment, flags));
2537}
2538
2539Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
2540 const VarDecl *var,
2541 bool followForward) {
2542 auto &info = getBlockByrefInfo(var);
2543 return emitBlockByrefAddress(baseAddr, info, followForward, var->getName());
2544}
2545
2546Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
2547 const BlockByrefInfo &info,
2548 bool followForward,
2549 const llvm::Twine &name) {
2550 // Chase the forwarding address if requested.
2551 if (followForward) {
2552 Address forwardingAddr = Builder.CreateStructGEP(Addr: baseAddr, Index: 1, Name: "forwarding");
2553 baseAddr = Address(Builder.CreateLoad(Addr: forwardingAddr), info.Type,
2554 info.ByrefAlignment);
2555 }
2556
2557 return Builder.CreateStructGEP(Addr: baseAddr, Index: info.FieldIndex, Name: name);
2558}
2559
2560/// BuildByrefInfo - This routine changes a __block variable declared as T x
2561/// into:
2562///
2563/// struct {
2564/// void *__isa;
2565/// void *__forwarding;
2566/// int32_t __flags;
2567/// int32_t __size;
2568/// void *__copy_helper; // only if needed
2569/// void *__destroy_helper; // only if needed
2570/// void *__byref_variable_layout;// only if needed
2571/// char padding[X]; // only if needed
2572/// T x;
2573/// } x
2574///
2575const BlockByrefInfo &CodeGenFunction::getBlockByrefInfo(const VarDecl *D) {
2576 auto it = BlockByrefInfos.find(D);
2577 if (it != BlockByrefInfos.end())
2578 return it->second;
2579
2580 llvm::StructType *byrefType =
2581 llvm::StructType::create(getLLVMContext(),
2582 "struct.__block_byref_" + D->getNameAsString());
2583
2584 QualType Ty = D->getType();
2585
2586 CharUnits size;
2587 SmallVector<llvm::Type *, 8> types;
2588
2589 // void *__isa;
2590 types.push_back(Elt: VoidPtrTy);
2591 size += getPointerSize();
2592
2593 // void *__forwarding;
2594 types.push_back(Elt: VoidPtrTy);
2595 size += getPointerSize();
2596
2597 // int32_t __flags;
2598 types.push_back(Elt: Int32Ty);
2599 size += CharUnits::fromQuantity(Quantity: 4);
2600
2601 // int32_t __size;
2602 types.push_back(Elt: Int32Ty);
2603 size += CharUnits::fromQuantity(Quantity: 4);
2604
2605 // Note that this must match *exactly* the logic in buildByrefHelpers.
2606 bool hasCopyAndDispose = getContext().BlockRequiresCopying(Ty, D);
2607 if (hasCopyAndDispose) {
2608 /// void *__copy_helper;
2609 types.push_back(Elt: VoidPtrTy);
2610 size += getPointerSize();
2611
2612 /// void *__destroy_helper;
2613 types.push_back(Elt: VoidPtrTy);
2614 size += getPointerSize();
2615 }
2616
2617 bool HasByrefExtendedLayout = false;
2618 Qualifiers::ObjCLifetime Lifetime = Qualifiers::OCL_None;
2619 if (getContext().getByrefLifetime(Ty, Lifetime, HasByrefExtendedLayout) &&
2620 HasByrefExtendedLayout) {
2621 /// void *__byref_variable_layout;
2622 types.push_back(Elt: VoidPtrTy);
2623 size += CharUnits::fromQuantity(Quantity: PointerSizeInBytes);
2624 }
2625
2626 // T x;
2627 llvm::Type *varTy = ConvertTypeForMem(T: Ty);
2628
2629 bool packed = false;
2630 CharUnits varAlign = getContext().getDeclAlign(D);
2631 CharUnits varOffset = size.alignTo(Align: varAlign);
2632
2633 // We may have to insert padding.
2634 if (varOffset != size) {
2635 llvm::Type *paddingTy =
2636 llvm::ArrayType::get(ElementType: Int8Ty, NumElements: (varOffset - size).getQuantity());
2637
2638 types.push_back(Elt: paddingTy);
2639 size = varOffset;
2640
2641 // Conversely, we might have to prevent LLVM from inserting padding.
2642 } else if (CGM.getDataLayout().getABITypeAlign(Ty: varTy) >
2643 uint64_t(varAlign.getQuantity())) {
2644 packed = true;
2645 }
2646 types.push_back(Elt: varTy);
2647
2648 byrefType->setBody(Elements: types, isPacked: packed);
2649
2650 BlockByrefInfo info;
2651 info.Type = byrefType;
2652 info.FieldIndex = types.size() - 1;
2653 info.FieldOffset = varOffset;
2654 info.ByrefAlignment = std::max(varAlign, getPointerAlign());
2655
2656 auto pair = BlockByrefInfos.insert({D, info});
2657 assert(pair.second && "info was inserted recursively?");
2658 return pair.first->second;
2659}
2660
2661/// Initialize the structural components of a __block variable, i.e.
2662/// everything but the actual object.
2663void CodeGenFunction::emitByrefStructureInit(const AutoVarEmission &emission) {
2664 // Find the address of the local.
2665 Address addr = emission.Addr;
2666
2667 // That's an alloca of the byref structure type.
2668 llvm::StructType *byrefType = cast<llvm::StructType>(Val: addr.getElementType());
2669
2670 unsigned nextHeaderIndex = 0;
2671 CharUnits nextHeaderOffset;
2672 auto storeHeaderField = [&](llvm::Value *value, CharUnits fieldSize,
2673 const Twine &name) {
2674 auto fieldAddr = Builder.CreateStructGEP(Addr: addr, Index: nextHeaderIndex, Name: name);
2675 Builder.CreateStore(Val: value, Addr: fieldAddr);
2676
2677 nextHeaderIndex++;
2678 nextHeaderOffset += fieldSize;
2679 };
2680
2681 // Build the byref helpers if necessary. This is null if we don't need any.
2682 BlockByrefHelpers *helpers = buildByrefHelpers(byrefType&: *byrefType, emission);
2683
2684 const VarDecl &D = *emission.Variable;
2685 QualType type = D.getType();
2686
2687 bool HasByrefExtendedLayout = false;
2688 Qualifiers::ObjCLifetime ByrefLifetime = Qualifiers::OCL_None;
2689 bool ByRefHasLifetime =
2690 getContext().getByrefLifetime(Ty: type, Lifetime&: ByrefLifetime, HasByrefExtendedLayout);
2691
2692 llvm::Value *V;
2693
2694 // Initialize the 'isa', which is just 0 or 1.
2695 int isa = 0;
2696 if (type.isObjCGCWeak())
2697 isa = 1;
2698 V = Builder.CreateIntToPtr(V: Builder.getInt32(C: isa), DestTy: Int8PtrTy, Name: "isa");
2699 storeHeaderField(V, getPointerSize(), "byref.isa");
2700
2701 // Store the address of the variable into its own forwarding pointer.
2702 storeHeaderField(addr.emitRawPointer(CGF&: *this), getPointerSize(),
2703 "byref.forwarding");
2704
2705 // Blocks ABI:
2706 // c) the flags field is set to either 0 if no helper functions are
2707 // needed or BLOCK_BYREF_HAS_COPY_DISPOSE if they are,
2708 BlockFlags flags;
2709 if (helpers) flags |= BLOCK_BYREF_HAS_COPY_DISPOSE;
2710 if (ByRefHasLifetime) {
2711 if (HasByrefExtendedLayout) flags |= BLOCK_BYREF_LAYOUT_EXTENDED;
2712 else switch (ByrefLifetime) {
2713 case Qualifiers::OCL_Strong:
2714 flags |= BLOCK_BYREF_LAYOUT_STRONG;
2715 break;
2716 case Qualifiers::OCL_Weak:
2717 flags |= BLOCK_BYREF_LAYOUT_WEAK;
2718 break;
2719 case Qualifiers::OCL_ExplicitNone:
2720 flags |= BLOCK_BYREF_LAYOUT_UNRETAINED;
2721 break;
2722 case Qualifiers::OCL_None:
2723 if (!type->isObjCObjectPointerType() && !type->isBlockPointerType())
2724 flags |= BLOCK_BYREF_LAYOUT_NON_OBJECT;
2725 break;
2726 default:
2727 break;
2728 }
2729 if (CGM.getLangOpts().ObjCGCBitmapPrint) {
2730 printf(format: "\n Inline flag for BYREF variable layout (%d):", flags.getBitMask());
2731 if (flags & BLOCK_BYREF_HAS_COPY_DISPOSE)
2732 printf(format: " BLOCK_BYREF_HAS_COPY_DISPOSE");
2733 if (flags & BLOCK_BYREF_LAYOUT_MASK) {
2734 BlockFlags ThisFlag(flags.getBitMask() & BLOCK_BYREF_LAYOUT_MASK);
2735 if (ThisFlag == BLOCK_BYREF_LAYOUT_EXTENDED)
2736 printf(format: " BLOCK_BYREF_LAYOUT_EXTENDED");
2737 if (ThisFlag == BLOCK_BYREF_LAYOUT_STRONG)
2738 printf(format: " BLOCK_BYREF_LAYOUT_STRONG");
2739 if (ThisFlag == BLOCK_BYREF_LAYOUT_WEAK)
2740 printf(format: " BLOCK_BYREF_LAYOUT_WEAK");
2741 if (ThisFlag == BLOCK_BYREF_LAYOUT_UNRETAINED)
2742 printf(format: " BLOCK_BYREF_LAYOUT_UNRETAINED");
2743 if (ThisFlag == BLOCK_BYREF_LAYOUT_NON_OBJECT)
2744 printf(format: " BLOCK_BYREF_LAYOUT_NON_OBJECT");
2745 }
2746 printf(format: "\n");
2747 }
2748 }
2749 storeHeaderField(llvm::ConstantInt::get(Ty: IntTy, V: flags.getBitMask()),
2750 getIntSize(), "byref.flags");
2751
2752 CharUnits byrefSize = CGM.GetTargetTypeStoreSize(Ty: byrefType);
2753 V = llvm::ConstantInt::get(Ty: IntTy, V: byrefSize.getQuantity());
2754 storeHeaderField(V, getIntSize(), "byref.size");
2755
2756 if (helpers) {
2757 storeHeaderField(helpers->CopyHelper, getPointerSize(),
2758 "byref.copyHelper");
2759 storeHeaderField(helpers->DisposeHelper, getPointerSize(),
2760 "byref.disposeHelper");
2761 }
2762
2763 if (ByRefHasLifetime && HasByrefExtendedLayout) {
2764 auto layoutInfo = CGM.getObjCRuntime().BuildByrefLayout(CGM, T: type);
2765 storeHeaderField(layoutInfo, getPointerSize(), "byref.layout");
2766 }
2767}
2768
2769void CodeGenFunction::BuildBlockRelease(llvm::Value *V, BlockFieldFlags flags,
2770 bool CanThrow) {
2771 llvm::FunctionCallee F = CGM.getBlockObjectDispose();
2772 llvm::Value *args[] = {V,
2773 llvm::ConstantInt::get(Ty: Int32Ty, V: flags.getBitMask())};
2774
2775 if (CanThrow)
2776 EmitRuntimeCallOrInvoke(callee: F, args);
2777 else
2778 EmitNounwindRuntimeCall(callee: F, args);
2779}
2780
2781void CodeGenFunction::enterByrefCleanup(CleanupKind Kind, Address Addr,
2782 BlockFieldFlags Flags,
2783 bool LoadBlockVarAddr, bool CanThrow) {
2784 EHStack.pushCleanup<CallBlockRelease>(Kind, A: Addr, A: Flags, A: LoadBlockVarAddr,
2785 A: CanThrow);
2786}
2787
2788/// Adjust the declaration of something from the blocks API.
2789static void configureBlocksRuntimeObject(CodeGenModule &CGM,
2790 llvm::Constant *C) {
2791 auto *GV = cast<llvm::GlobalValue>(Val: C->stripPointerCasts());
2792
2793 if (CGM.getTarget().getTriple().isOSBinFormatCOFF()) {
2794 const IdentifierInfo &II = CGM.getContext().Idents.get(Name: C->getName());
2795 TranslationUnitDecl *TUDecl = CGM.getContext().getTranslationUnitDecl();
2796 DeclContext *DC = TranslationUnitDecl::castToDeclContext(D: TUDecl);
2797
2798 assert((isa<llvm::Function>(C->stripPointerCasts()) ||
2799 isa<llvm::GlobalVariable>(C->stripPointerCasts())) &&
2800 "expected Function or GlobalVariable");
2801
2802 const NamedDecl *ND = nullptr;
2803 for (const auto *Result : DC->lookup(Name: &II))
2804 if ((ND = dyn_cast<FunctionDecl>(Val: Result)) ||
2805 (ND = dyn_cast<VarDecl>(Val: Result)))
2806 break;
2807
2808 // TODO: support static blocks runtime
2809 if (GV->isDeclaration() && (!ND || !ND->hasAttr<DLLExportAttr>())) {
2810 GV->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
2811 GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
2812 } else {
2813 GV->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
2814 GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
2815 }
2816 }
2817
2818 if (CGM.getLangOpts().BlocksRuntimeOptional && GV->isDeclaration() &&
2819 GV->hasExternalLinkage())
2820 GV->setLinkage(llvm::GlobalValue::ExternalWeakLinkage);
2821
2822 CGM.setDSOLocal(GV);
2823}
2824
2825llvm::FunctionCallee CodeGenModule::getBlockObjectDispose() {
2826 if (BlockObjectDispose)
2827 return BlockObjectDispose;
2828
2829 llvm::Type *args[] = { Int8PtrTy, Int32Ty };
2830 llvm::FunctionType *fty
2831 = llvm::FunctionType::get(Result: VoidTy, Params: args, isVarArg: false);
2832 BlockObjectDispose = CreateRuntimeFunction(Ty: fty, Name: "_Block_object_dispose");
2833 configureBlocksRuntimeObject(
2834 CGM&: *this, C: cast<llvm::Constant>(Val: BlockObjectDispose.getCallee()));
2835 return BlockObjectDispose;
2836}
2837
2838llvm::FunctionCallee CodeGenModule::getBlockObjectAssign() {
2839 if (BlockObjectAssign)
2840 return BlockObjectAssign;
2841
2842 llvm::Type *args[] = { Int8PtrTy, Int8PtrTy, Int32Ty };
2843 llvm::FunctionType *fty
2844 = llvm::FunctionType::get(Result: VoidTy, Params: args, isVarArg: false);
2845 BlockObjectAssign = CreateRuntimeFunction(Ty: fty, Name: "_Block_object_assign");
2846 configureBlocksRuntimeObject(
2847 CGM&: *this, C: cast<llvm::Constant>(Val: BlockObjectAssign.getCallee()));
2848 return BlockObjectAssign;
2849}
2850
2851llvm::Constant *CodeGenModule::getNSConcreteGlobalBlock() {
2852 if (NSConcreteGlobalBlock)
2853 return NSConcreteGlobalBlock;
2854
2855 NSConcreteGlobalBlock = GetOrCreateLLVMGlobal(
2856 MangledName: "_NSConcreteGlobalBlock", Ty: Int8PtrTy, AddrSpace: LangAS::Default, D: nullptr);
2857 configureBlocksRuntimeObject(CGM&: *this, C: NSConcreteGlobalBlock);
2858 return NSConcreteGlobalBlock;
2859}
2860
2861llvm::Constant *CodeGenModule::getNSConcreteStackBlock() {
2862 if (NSConcreteStackBlock)
2863 return NSConcreteStackBlock;
2864
2865 NSConcreteStackBlock = GetOrCreateLLVMGlobal(
2866 MangledName: "_NSConcreteStackBlock", Ty: Int8PtrTy, AddrSpace: LangAS::Default, D: nullptr);
2867 configureBlocksRuntimeObject(CGM&: *this, C: NSConcreteStackBlock);
2868 return NSConcreteStackBlock;
2869}
2870

source code of clang/lib/CodeGen/CGBlocks.cpp