1 | /* |
2 | * Copyright (C) 2008 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #ifndef AbstractMacroAssembler_h |
27 | #define AbstractMacroAssembler_h |
28 | |
29 | #include <wtf/Platform.h> |
30 | |
31 | #include <MacroAssemblerCodeRef.h> |
32 | #include <CodeLocation.h> |
33 | #include <wtf/Noncopyable.h> |
34 | #include <wtf/UnusedParam.h> |
35 | |
36 | #if ENABLE(ASSEMBLER) |
37 | |
38 | namespace JSC { |
39 | |
40 | class LinkBuffer; |
41 | class RepatchBuffer; |
42 | |
43 | template <class AssemblerType> |
44 | class AbstractMacroAssembler { |
45 | public: |
46 | typedef AssemblerType AssemblerType_T; |
47 | |
48 | typedef MacroAssemblerCodePtr CodePtr; |
49 | typedef MacroAssemblerCodeRef CodeRef; |
50 | |
51 | class Jump; |
52 | |
53 | typedef typename AssemblerType::RegisterID RegisterID; |
54 | typedef typename AssemblerType::FPRegisterID FPRegisterID; |
55 | typedef typename AssemblerType::JmpSrc JmpSrc; |
56 | typedef typename AssemblerType::JmpDst JmpDst; |
57 | |
58 | |
59 | // Section 1: MacroAssembler operand types |
60 | // |
61 | // The following types are used as operands to MacroAssembler operations, |
62 | // describing immediate and memory operands to the instructions to be planted. |
63 | |
64 | |
65 | enum Scale { |
66 | TimesOne, |
67 | TimesTwo, |
68 | TimesFour, |
69 | TimesEight, |
70 | }; |
71 | |
72 | // Address: |
73 | // |
74 | // Describes a simple base-offset address. |
75 | struct Address { |
76 | explicit Address(RegisterID base, int32_t offset = 0) |
77 | : base(base) |
78 | , offset(offset) |
79 | { |
80 | } |
81 | |
82 | RegisterID base; |
83 | int32_t offset; |
84 | }; |
85 | |
86 | // ImplicitAddress: |
87 | // |
88 | // This class is used for explicit 'load' and 'store' operations |
89 | // (as opposed to situations in which a memory operand is provided |
90 | // to a generic operation, such as an integer arithmetic instruction). |
91 | // |
92 | // In the case of a load (or store) operation we want to permit |
93 | // addresses to be implicitly constructed, e.g. the two calls: |
94 | // |
95 | // load32(Address(addrReg), destReg); |
96 | // load32(addrReg, destReg); |
97 | // |
98 | // Are equivalent, and the explicit wrapping of the Address in the former |
99 | // is unnecessary. |
100 | struct ImplicitAddress { |
101 | ImplicitAddress(RegisterID base) |
102 | : base(base) |
103 | , offset(0) |
104 | { |
105 | } |
106 | |
107 | ImplicitAddress(Address address) |
108 | : base(address.base) |
109 | , offset(address.offset) |
110 | { |
111 | } |
112 | |
113 | RegisterID base; |
114 | int32_t offset; |
115 | }; |
116 | |
117 | // BaseIndex: |
118 | // |
119 | // Describes a complex addressing mode. |
120 | struct BaseIndex { |
121 | BaseIndex(RegisterID base, RegisterID index, Scale scale, int32_t offset = 0) |
122 | : base(base) |
123 | , index(index) |
124 | , scale(scale) |
125 | , offset(offset) |
126 | { |
127 | } |
128 | |
129 | RegisterID base; |
130 | RegisterID index; |
131 | Scale scale; |
132 | int32_t offset; |
133 | }; |
134 | |
135 | // AbsoluteAddress: |
136 | // |
137 | // Describes an memory operand given by a pointer. For regular load & store |
138 | // operations an unwrapped void* will be used, rather than using this. |
139 | struct AbsoluteAddress { |
140 | explicit AbsoluteAddress(void* ptr) |
141 | : m_ptr(ptr) |
142 | { |
143 | } |
144 | |
145 | void* m_ptr; |
146 | }; |
147 | |
148 | // ImmPtr: |
149 | // |
150 | // A pointer sized immediate operand to an instruction - this is wrapped |
151 | // in a class requiring explicit construction in order to differentiate |
152 | // from pointers used as absolute addresses to memory operations |
153 | struct ImmPtr { |
154 | explicit ImmPtr(void* value) |
155 | : m_value(value) |
156 | { |
157 | } |
158 | |
159 | intptr_t asIntptr() |
160 | { |
161 | return reinterpret_cast<intptr_t>(m_value); |
162 | } |
163 | |
164 | void* m_value; |
165 | }; |
166 | |
167 | // Imm32: |
168 | // |
169 | // A 32bit immediate operand to an instruction - this is wrapped in a |
170 | // class requiring explicit construction in order to prevent RegisterIDs |
171 | // (which are implemented as an enum) from accidentally being passed as |
172 | // immediate values. |
173 | struct Imm32 { |
174 | explicit Imm32(int32_t value) |
175 | : m_value(value) |
176 | #if CPU(ARM) |
177 | , m_isPointer(false) |
178 | #endif |
179 | { |
180 | } |
181 | |
182 | #if !CPU(X86_64) |
183 | explicit Imm32(ImmPtr ptr) |
184 | : m_value(ptr.asIntptr()) |
185 | #if CPU(ARM) |
186 | , m_isPointer(true) |
187 | #endif |
188 | { |
189 | } |
190 | #endif |
191 | |
192 | int32_t m_value; |
193 | #if CPU(ARM) |
194 | // We rely on being able to regenerate code to recover exception handling |
195 | // information. Since ARMv7 supports 16-bit immediates there is a danger |
196 | // that if pointer values change the layout of the generated code will change. |
197 | // To avoid this problem, always generate pointers (and thus Imm32s constructed |
198 | // from ImmPtrs) with a code sequence that is able to represent any pointer |
199 | // value - don't use a more compact form in these cases. |
200 | bool m_isPointer; |
201 | #endif |
202 | }; |
203 | |
204 | |
205 | // Section 2: MacroAssembler code buffer handles |
206 | // |
207 | // The following types are used to reference items in the code buffer |
208 | // during JIT code generation. For example, the type Jump is used to |
209 | // track the location of a jump instruction so that it may later be |
210 | // linked to a label marking its destination. |
211 | |
212 | |
213 | // Label: |
214 | // |
215 | // A Label records a point in the generated instruction stream, typically such that |
216 | // it may be used as a destination for a jump. |
217 | class Label { |
218 | template<class TemplateAssemblerType> |
219 | friend class AbstractMacroAssembler; |
220 | friend class Jump; |
221 | friend class MacroAssemblerCodeRef; |
222 | friend class LinkBuffer; |
223 | |
224 | public: |
225 | Label() |
226 | { |
227 | } |
228 | |
229 | Label(AbstractMacroAssembler<AssemblerType>* masm) |
230 | : m_label(masm->m_assembler.label()) |
231 | { |
232 | } |
233 | |
234 | bool isUsed() const { return m_label.isUsed(); } |
235 | void used() { m_label.used(); } |
236 | private: |
237 | JmpDst m_label; |
238 | }; |
239 | |
240 | // DataLabelPtr: |
241 | // |
242 | // A DataLabelPtr is used to refer to a location in the code containing a pointer to be |
243 | // patched after the code has been generated. |
244 | class DataLabelPtr { |
245 | template<class TemplateAssemblerType> |
246 | friend class AbstractMacroAssembler; |
247 | friend class LinkBuffer; |
248 | public: |
249 | DataLabelPtr() |
250 | { |
251 | } |
252 | |
253 | DataLabelPtr(AbstractMacroAssembler<AssemblerType>* masm) |
254 | : m_label(masm->m_assembler.label()) |
255 | { |
256 | } |
257 | |
258 | private: |
259 | JmpDst m_label; |
260 | }; |
261 | |
262 | // DataLabel32: |
263 | // |
264 | // A DataLabelPtr is used to refer to a location in the code containing a pointer to be |
265 | // patched after the code has been generated. |
266 | class DataLabel32 { |
267 | template<class TemplateAssemblerType> |
268 | friend class AbstractMacroAssembler; |
269 | friend class LinkBuffer; |
270 | public: |
271 | DataLabel32() |
272 | { |
273 | } |
274 | |
275 | DataLabel32(AbstractMacroAssembler<AssemblerType>* masm) |
276 | : m_label(masm->m_assembler.label()) |
277 | { |
278 | } |
279 | |
280 | private: |
281 | JmpDst m_label; |
282 | }; |
283 | |
284 | // Call: |
285 | // |
286 | // A Call object is a reference to a call instruction that has been planted |
287 | // into the code buffer - it is typically used to link the call, setting the |
288 | // relative offset such that when executed it will call to the desired |
289 | // destination. |
290 | class Call { |
291 | template<class TemplateAssemblerType> |
292 | friend class AbstractMacroAssembler; |
293 | |
294 | public: |
295 | enum Flags { |
296 | None = 0x0, |
297 | Linkable = 0x1, |
298 | Near = 0x2, |
299 | LinkableNear = 0x3, |
300 | }; |
301 | |
302 | Call() |
303 | : m_flags(None) |
304 | { |
305 | } |
306 | |
307 | Call(JmpSrc jmp, Flags flags) |
308 | : m_jmp(jmp) |
309 | , m_flags(flags) |
310 | { |
311 | } |
312 | |
313 | bool isFlagSet(Flags flag) |
314 | { |
315 | return m_flags & flag; |
316 | } |
317 | |
318 | static Call fromTailJump(Jump jump) |
319 | { |
320 | return Call(jump.m_jmp, Linkable); |
321 | } |
322 | |
323 | JmpSrc m_jmp; |
324 | private: |
325 | Flags m_flags; |
326 | }; |
327 | |
328 | // Jump: |
329 | // |
330 | // A jump object is a reference to a jump instruction that has been planted |
331 | // into the code buffer - it is typically used to link the jump, setting the |
332 | // relative offset such that when executed it will jump to the desired |
333 | // destination. |
334 | class Jump { |
335 | template<class TemplateAssemblerType> |
336 | friend class AbstractMacroAssembler; |
337 | friend class Call; |
338 | friend class LinkBuffer; |
339 | public: |
340 | Jump() |
341 | { |
342 | } |
343 | |
344 | Jump(JmpSrc jmp) |
345 | : m_jmp(jmp) |
346 | { |
347 | } |
348 | |
349 | void link(AbstractMacroAssembler<AssemblerType>* masm) |
350 | { |
351 | masm->m_assembler.linkJump(m_jmp, masm->m_assembler.label()); |
352 | } |
353 | |
354 | void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm) |
355 | { |
356 | masm->m_assembler.linkJump(m_jmp, label.m_label); |
357 | } |
358 | |
359 | private: |
360 | JmpSrc m_jmp; |
361 | }; |
362 | |
363 | // JumpList: |
364 | // |
365 | // A JumpList is a set of Jump objects. |
366 | // All jumps in the set will be linked to the same destination. |
367 | class JumpList { |
368 | friend class LinkBuffer; |
369 | |
370 | public: |
371 | typedef Vector<Jump, 16> JumpVector; |
372 | |
373 | void link(AbstractMacroAssembler<AssemblerType>* masm) |
374 | { |
375 | size_t size = m_jumps.size(); |
376 | for (size_t i = 0; i < size; ++i) |
377 | m_jumps[i].link(masm); |
378 | m_jumps.clear(); |
379 | } |
380 | |
381 | void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm) |
382 | { |
383 | size_t size = m_jumps.size(); |
384 | for (size_t i = 0; i < size; ++i) |
385 | m_jumps[i].linkTo(label, masm); |
386 | m_jumps.clear(); |
387 | } |
388 | |
389 | void append(Jump jump) |
390 | { |
391 | m_jumps.append(jump); |
392 | } |
393 | |
394 | void append(JumpList& other) |
395 | { |
396 | m_jumps.append(other.m_jumps.begin(), other.m_jumps.size()); |
397 | } |
398 | |
399 | bool empty() |
400 | { |
401 | return !m_jumps.size(); |
402 | } |
403 | |
404 | const JumpVector& jumps() { return m_jumps; } |
405 | |
406 | private: |
407 | JumpVector m_jumps; |
408 | }; |
409 | |
410 | |
411 | // Section 3: Misc admin methods |
412 | |
413 | static CodePtr trampolineAt(CodeRef ref, Label label) |
414 | { |
415 | return CodePtr(AssemblerType::getRelocatedAddress(ref.m_code.dataLocation(), label.m_label)); |
416 | } |
417 | |
418 | size_t size() |
419 | { |
420 | return m_assembler.size(); |
421 | } |
422 | |
423 | Label label() |
424 | { |
425 | return Label(this); |
426 | } |
427 | |
428 | Label align() |
429 | { |
430 | m_assembler.align(16); |
431 | return Label(this); |
432 | } |
433 | |
434 | ptrdiff_t differenceBetween(Label from, Jump to) |
435 | { |
436 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp); |
437 | } |
438 | |
439 | ptrdiff_t differenceBetween(Label from, Call to) |
440 | { |
441 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp); |
442 | } |
443 | |
444 | ptrdiff_t differenceBetween(Label from, Label to) |
445 | { |
446 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); |
447 | } |
448 | |
449 | ptrdiff_t differenceBetween(Label from, DataLabelPtr to) |
450 | { |
451 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); |
452 | } |
453 | |
454 | ptrdiff_t differenceBetween(Label from, DataLabel32 to) |
455 | { |
456 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); |
457 | } |
458 | |
459 | ptrdiff_t differenceBetween(DataLabelPtr from, Jump to) |
460 | { |
461 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp); |
462 | } |
463 | |
464 | ptrdiff_t differenceBetween(DataLabelPtr from, DataLabelPtr to) |
465 | { |
466 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label); |
467 | } |
468 | |
469 | ptrdiff_t differenceBetween(DataLabelPtr from, Call to) |
470 | { |
471 | return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp); |
472 | } |
473 | |
474 | protected: |
475 | AssemblerType m_assembler; |
476 | |
477 | friend class LinkBuffer; |
478 | friend class RepatchBuffer; |
479 | |
480 | static void linkJump(void* code, Jump jump, CodeLocationLabel target) |
481 | { |
482 | AssemblerType::linkJump(code, jump.m_jmp, target.dataLocation()); |
483 | } |
484 | |
485 | static void linkPointer(void* code, typename AssemblerType::JmpDst label, void* value) |
486 | { |
487 | AssemblerType::linkPointer(code, label, value); |
488 | } |
489 | |
490 | static void* getLinkerAddress(void* code, typename AssemblerType::JmpSrc label) |
491 | { |
492 | return AssemblerType::getRelocatedAddress(code, label); |
493 | } |
494 | |
495 | static void* getLinkerAddress(void* code, typename AssemblerType::JmpDst label) |
496 | { |
497 | return AssemblerType::getRelocatedAddress(code, label); |
498 | } |
499 | |
500 | static unsigned getLinkerCallReturnOffset(Call call) |
501 | { |
502 | return AssemblerType::getCallReturnOffset(call.m_jmp); |
503 | } |
504 | |
505 | static void repatchJump(CodeLocationJump jump, CodeLocationLabel destination) |
506 | { |
507 | AssemblerType::relinkJump(jump.dataLocation(), destination.dataLocation()); |
508 | } |
509 | |
510 | static void repatchNearCall(CodeLocationNearCall nearCall, CodeLocationLabel destination) |
511 | { |
512 | AssemblerType::relinkCall(nearCall.dataLocation(), destination.executableAddress()); |
513 | } |
514 | |
515 | static void repatchInt32(CodeLocationDataLabel32 dataLabel32, int32_t value) |
516 | { |
517 | AssemblerType::repatchInt32(dataLabel32.dataLocation(), value); |
518 | } |
519 | |
520 | static void repatchPointer(CodeLocationDataLabelPtr dataLabelPtr, void* value) |
521 | { |
522 | AssemblerType::repatchPointer(dataLabelPtr.dataLocation(), value); |
523 | } |
524 | |
525 | static void repatchLoadPtrToLEA(CodeLocationInstruction instruction) |
526 | { |
527 | AssemblerType::repatchLoadPtrToLEA(instruction.dataLocation()); |
528 | } |
529 | }; |
530 | |
531 | } // namespace JSC |
532 | |
533 | #endif // ENABLE(ASSEMBLER) |
534 | |
535 | #endif // AbstractMacroAssembler_h |
536 | |