1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
29#include <wtf/Platform.h>
30
31#if ENABLE(JIT)
32
33namespace JSC {
34
35/* Deprecated: Please use JITStubCall instead. */
36
37// puts an arg onto the stack, as an arg to a context threaded function.
38ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID src, unsigned argumentNumber)
39{
40 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
41 poke(src, index: argumentStackOffset);
42}
43
44/* Deprecated: Please use JITStubCall instead. */
45
46ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(unsigned value, unsigned argumentNumber)
47{
48 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
49 poke(value: Imm32(value), index: argumentStackOffset);
50}
51
52/* Deprecated: Please use JITStubCall instead. */
53
54ALWAYS_INLINE void JIT::emitPutJITStubArgConstant(void* value, unsigned argumentNumber)
55{
56 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
57 poke(imm: ImmPtr(value), index: argumentStackOffset);
58}
59
60/* Deprecated: Please use JITStubCall instead. */
61
62ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
63{
64 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
65 peek(dest: dst, index: argumentStackOffset);
66}
67
68ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
69{
70 return m_codeBlock->isConstantRegisterIndex(index: src) && getConstantOperand(src).isDouble();
71}
72
73ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
74{
75 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
76 return m_codeBlock->getConstant(index: src);
77}
78
79ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
80{
81 storePtr(src: from, address: Address(callFrameRegister, entry * sizeof(Register)));
82}
83
84ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
85{
86 storePtr(imm: ImmPtr(value), address: Address(callFrameRegister, entry * sizeof(Register)));
87}
88
89ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
90{
91 loadPtr(address: Address(from, entry * sizeof(Register)), dest: to);
92#if !USE(JSVALUE32_64)
93 killLastResultRegister();
94#endif
95}
96
97ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
98{
99 load32(address: Address(from, entry * sizeof(Register)), dest: to);
100#if !USE(JSVALUE32_64)
101 killLastResultRegister();
102#endif
103}
104
105ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
106{
107 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
108
109 Call nakedCall = nearCall();
110 m_calls.append(val: CallRecord(nakedCall, m_bytecodeIndex, function.executableAddress()));
111 return nakedCall;
112}
113
114#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
115
116ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
117{
118#if CPU(ARM_TRADITIONAL)
119#ifndef NDEBUG
120 // Ensure the label after the sequence can also fit
121 insnSpace += sizeof(ARMWord);
122 constSpace += sizeof(uint64_t);
123#endif
124
125 ensureSpace(insnSpace, constSpace);
126
127#endif
128
129#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
130#ifndef NDEBUG
131 m_uninterruptedInstructionSequenceBegin = label();
132 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
133#endif
134#endif
135}
136
137ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace)
138{
139#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
140 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) == insnSpace);
141 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin == constSpace);
142#endif
143}
144
145#endif
146
147#if CPU(ARM)
148
149ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
150{
151 move(linkRegister, reg);
152}
153
154ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
155{
156 move(reg, linkRegister);
157}
158
159ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
160{
161 loadPtr(address, linkRegister);
162}
163
164#else // CPU(X86) || CPU(X86_64)
165
166ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
167{
168 pop(dest: reg);
169}
170
171ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
172{
173 push(src: reg);
174}
175
176ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
177{
178 push(address);
179}
180
181#endif
182
183#if USE(JIT_STUB_ARGUMENT_VA_LIST)
184ALWAYS_INLINE void JIT::restoreArgumentReference()
185{
186 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
187}
188ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline() {}
189#else
190ALWAYS_INLINE void JIT::restoreArgumentReference()
191{
192 move(src: stackPointerRegister, dest: firstArgumentRegister);
193 poke(src: callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
194}
195ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
196{
197#if CPU(X86)
198 // Within a trampoline the return address will be on the stack at this point.
199 addPtr(Imm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
200#elif CPU(ARM)
201 move(stackPointerRegister, firstArgumentRegister);
202#endif
203 // In the trampoline on x86-64, the first argument register is not overwritten.
204}
205#endif
206
207ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
208{
209 return branchPtr(cond: NotEqual, left: Address(reg, OBJECT_OFFSETOF(JSCell, m_structure)), right: ImmPtr(structure));
210}
211
212ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
213{
214 if (!m_codeBlock->isKnownNotImmediate(index: vReg))
215 linkSlowCase(iter);
216}
217
218ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
219{
220 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
221
222 m_slowCases.append(val: SlowCaseEntry(jump, m_bytecodeIndex));
223}
224
225ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
226{
227 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
228
229 const JumpList::JumpVector& jumpVector = jumpList.jumps();
230 size_t size = jumpVector.size();
231 for (size_t i = 0; i < size; ++i)
232 m_slowCases.append(val: SlowCaseEntry(jumpVector[i], m_bytecodeIndex));
233}
234
235ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
236{
237 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
238
239 m_jmpTable.append(val: JumpTable(jump, m_bytecodeIndex + relativeOffset));
240}
241
242ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
243{
244 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
245
246 jump.linkTo(label: m_labels[m_bytecodeIndex + relativeOffset], masm: this);
247}
248
249#if ENABLE(SAMPLING_FLAGS)
250ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
251{
252 ASSERT(flag >= 1);
253 ASSERT(flag <= 32);
254 or32(Imm32(1u << (flag - 1)), AbsoluteAddress(&SamplingFlags::s_flags));
255}
256
257ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
258{
259 ASSERT(flag >= 1);
260 ASSERT(flag <= 32);
261 and32(Imm32(~(1u << (flag - 1))), AbsoluteAddress(&SamplingFlags::s_flags));
262}
263#endif
264
265#if ENABLE(SAMPLING_COUNTERS)
266ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
267{
268#if CPU(X86_64) // Or any other 64-bit plattform.
269 addPtr(Imm32(count), AbsoluteAddress(&counter.m_counter));
270#elif CPU(X86) // Or any other little-endian 32-bit plattform.
271 intptr_t hiWord = reinterpret_cast<intptr_t>(&counter.m_counter) + sizeof(int32_t);
272 add32(Imm32(count), AbsoluteAddress(&counter.m_counter));
273 addWithCarry32(Imm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
274#else
275#error "SAMPLING_FLAGS not implemented on this platform."
276#endif
277}
278#endif
279
280#if ENABLE(OPCODE_SAMPLING)
281#if CPU(X86_64)
282ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
283{
284 move(ImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
285 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
286}
287#else
288ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
289{
290 storePtr(ImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
291}
292#endif
293#endif
294
295#if ENABLE(CODEBLOCK_SAMPLING)
296#if CPU(X86_64)
297ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
298{
299 move(ImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
300 storePtr(ImmPtr(codeBlock), X86Registers::ecx);
301}
302#else
303ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
304{
305 storePtr(ImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
306}
307#endif
308#endif
309
310inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
311{
312 return Address(base, (index * sizeof(Register)));
313}
314
315#if USE(JSVALUE32_64)
316
317inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
318{
319 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
320}
321
322inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
323{
324 return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
325}
326
327inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
328{
329 RegisterID mappedTag;
330 if (getMappedTag(index, mappedTag)) {
331 move(mappedTag, tag);
332 unmap(tag);
333 return;
334 }
335
336 if (m_codeBlock->isConstantRegisterIndex(index)) {
337 move(Imm32(getConstantOperand(index).tag()), tag);
338 unmap(tag);
339 return;
340 }
341
342 load32(tagFor(index), tag);
343 unmap(tag);
344}
345
346inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
347{
348 RegisterID mappedPayload;
349 if (getMappedPayload(index, mappedPayload)) {
350 move(mappedPayload, payload);
351 unmap(payload);
352 return;
353 }
354
355 if (m_codeBlock->isConstantRegisterIndex(index)) {
356 move(Imm32(getConstantOperand(index).payload()), payload);
357 unmap(payload);
358 return;
359 }
360
361 load32(payloadFor(index), payload);
362 unmap(payload);
363}
364
365inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
366{
367 move(Imm32(v.payload()), payload);
368 move(Imm32(v.tag()), tag);
369}
370
371inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
372{
373 ASSERT(tag != payload);
374
375 if (base == callFrameRegister) {
376 ASSERT(payload != base);
377 emitLoadPayload(index, payload);
378 emitLoadTag(index, tag);
379 return;
380 }
381
382 if (payload == base) { // avoid stomping base
383 load32(tagFor(index, base), tag);
384 load32(payloadFor(index, base), payload);
385 return;
386 }
387
388 load32(payloadFor(index, base), payload);
389 load32(tagFor(index, base), tag);
390}
391
392inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
393{
394 if (isMapped(index1)) {
395 emitLoad(index1, tag1, payload1);
396 emitLoad(index2, tag2, payload2);
397 return;
398 }
399 emitLoad(index2, tag2, payload2);
400 emitLoad(index1, tag1, payload1);
401}
402
403inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
404{
405 if (m_codeBlock->isConstantRegisterIndex(index)) {
406 Register& inConstantPool = m_codeBlock->constantRegister(index);
407 loadDouble(&inConstantPool, value);
408 } else
409 loadDouble(addressFor(index), value);
410}
411
412inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
413{
414 if (m_codeBlock->isConstantRegisterIndex(index)) {
415 Register& inConstantPool = m_codeBlock->constantRegister(index);
416 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
417 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
418 } else
419 convertInt32ToDouble(payloadFor(index), value);
420}
421
422inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
423{
424 store32(payload, payloadFor(index, base));
425 store32(tag, tagFor(index, base));
426}
427
428inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
429{
430 store32(payload, payloadFor(index, callFrameRegister));
431 if (!indexIsInt32)
432 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
433}
434
435inline void JIT::emitStoreInt32(unsigned index, Imm32 payload, bool indexIsInt32)
436{
437 store32(payload, payloadFor(index, callFrameRegister));
438 if (!indexIsInt32)
439 store32(Imm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
440}
441
442inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
443{
444 store32(payload, payloadFor(index, callFrameRegister));
445 if (!indexIsCell)
446 store32(Imm32(JSValue::CellTag), tagFor(index, callFrameRegister));
447}
448
449inline void JIT::emitStoreBool(unsigned index, RegisterID tag, bool indexIsBool)
450{
451 if (!indexIsBool)
452 store32(Imm32(0), payloadFor(index, callFrameRegister));
453 store32(tag, tagFor(index, callFrameRegister));
454}
455
456inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
457{
458 storeDouble(value, addressFor(index));
459}
460
461inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
462{
463 store32(Imm32(constant.payload()), payloadFor(index, base));
464 store32(Imm32(constant.tag()), tagFor(index, base));
465}
466
467ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
468{
469 emitStore(dst, jsUndefined());
470}
471
472inline bool JIT::isLabeled(unsigned bytecodeIndex)
473{
474 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
475 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
476 if (jumpTarget == bytecodeIndex)
477 return true;
478 if (jumpTarget > bytecodeIndex)
479 return false;
480 }
481 return false;
482}
483
484inline void JIT::map(unsigned bytecodeIndex, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
485{
486 if (isLabeled(bytecodeIndex))
487 return;
488
489 m_mappedBytecodeIndex = bytecodeIndex;
490 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
491 m_mappedTag = tag;
492 m_mappedPayload = payload;
493}
494
495inline void JIT::unmap(RegisterID registerID)
496{
497 if (m_mappedTag == registerID)
498 m_mappedTag = (RegisterID)-1;
499 else if (m_mappedPayload == registerID)
500 m_mappedPayload = (RegisterID)-1;
501}
502
503inline void JIT::unmap()
504{
505 m_mappedBytecodeIndex = (unsigned)-1;
506 m_mappedVirtualRegisterIndex = (unsigned)-1;
507 m_mappedTag = (RegisterID)-1;
508 m_mappedPayload = (RegisterID)-1;
509}
510
511inline bool JIT::isMapped(unsigned virtualRegisterIndex)
512{
513 if (m_mappedBytecodeIndex != m_bytecodeIndex)
514 return false;
515 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
516 return false;
517 return true;
518}
519
520inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
521{
522 if (m_mappedBytecodeIndex != m_bytecodeIndex)
523 return false;
524 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
525 return false;
526 if (m_mappedPayload == (RegisterID)-1)
527 return false;
528 payload = m_mappedPayload;
529 return true;
530}
531
532inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
533{
534 if (m_mappedBytecodeIndex != m_bytecodeIndex)
535 return false;
536 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
537 return false;
538 if (m_mappedTag == (RegisterID)-1)
539 return false;
540 tag = m_mappedTag;
541 return true;
542}
543
544inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
545{
546 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
547 addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
548}
549
550inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
551{
552 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
553 addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
554}
555
556inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
557{
558 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
559 linkSlowCase(iter);
560}
561
562ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
563{
564 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
565}
566
567ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
568{
569 if (isOperandConstantImmediateInt(op1)) {
570 constant = getConstantOperand(op1).asInt32();
571 op = op2;
572 return true;
573 }
574
575 if (isOperandConstantImmediateInt(op2)) {
576 constant = getConstantOperand(op2).asInt32();
577 op = op1;
578 return true;
579 }
580
581 return false;
582}
583
584/* Deprecated: Please use JITStubCall instead. */
585
586ALWAYS_INLINE void JIT::emitPutJITStubArg(RegisterID tag, RegisterID payload, unsigned argumentNumber)
587{
588 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
589 poke(payload, argumentStackOffset);
590 poke(tag, argumentStackOffset + 1);
591}
592
593/* Deprecated: Please use JITStubCall instead. */
594
595ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch1, RegisterID scratch2)
596{
597 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
598 if (m_codeBlock->isConstantRegisterIndex(src)) {
599 JSValue constant = m_codeBlock->getConstant(src);
600 poke(Imm32(constant.payload()), argumentStackOffset);
601 poke(Imm32(constant.tag()), argumentStackOffset + 1);
602 } else {
603 emitLoad(src, scratch1, scratch2);
604 poke(scratch2, argumentStackOffset);
605 poke(scratch1, argumentStackOffset + 1);
606 }
607}
608
609#else // USE(JSVALUE32_64)
610
611ALWAYS_INLINE void JIT::killLastResultRegister()
612{
613 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
614}
615
616// get arg puts an arg from the SF register array into a h/w register
617ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
618{
619 ASSERT(m_bytecodeIndex != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeIndex is set.
620
621 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
622 if (m_codeBlock->isConstantRegisterIndex(index: src)) {
623 JSValue value = m_codeBlock->getConstant(index: src);
624 move(imm: ImmPtr(JSValue::encode(value)), dest: dst);
625 killLastResultRegister();
626 return;
627 }
628
629 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(index: src)) {
630 bool atJumpTarget = false;
631 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(index: m_jumpTargetsPosition) <= m_bytecodeIndex) {
632 if (m_codeBlock->jumpTarget(index: m_jumpTargetsPosition) == m_bytecodeIndex)
633 atJumpTarget = true;
634 ++m_jumpTargetsPosition;
635 }
636
637 if (!atJumpTarget) {
638 // The argument we want is already stored in eax
639 if (dst != cachedResultRegister)
640 move(src: cachedResultRegister, dest: dst);
641 killLastResultRegister();
642 return;
643 }
644 }
645
646 loadPtr(address: Address(callFrameRegister, src * sizeof(Register)), dest: dst);
647 killLastResultRegister();
648}
649
650ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
651{
652 if (src2 == m_lastResultBytecodeRegister) {
653 emitGetVirtualRegister(src: src2, dst: dst2);
654 emitGetVirtualRegister(src: src1, dst: dst1);
655 } else {
656 emitGetVirtualRegister(src: src1, dst: dst1);
657 emitGetVirtualRegister(src: src2, dst: dst2);
658 }
659}
660
661ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
662{
663 return getConstantOperand(src).asInt32();
664}
665
666ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
667{
668 return m_codeBlock->isConstantRegisterIndex(index: src) && getConstantOperand(src).isInt32();
669}
670
671ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
672{
673 storePtr(src: from, address: Address(callFrameRegister, dst * sizeof(Register)));
674 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
675}
676
677ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
678{
679 storePtr(imm: ImmPtr(JSValue::encode(value: jsUndefined())), address: Address(callFrameRegister, dst * sizeof(Register)));
680}
681
682ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
683{
684#if USE(JSVALUE64)
685 return branchTestPtr(cond: Zero, reg, mask: tagMaskRegister);
686#else
687 return branchTest32(Zero, reg, Imm32(JSImmediate::TagMask));
688#endif
689}
690
691ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
692{
693 move(src: reg1, dest: scratch);
694 orPtr(src: reg2, dest: scratch);
695 return emitJumpIfJSCell(reg: scratch);
696}
697
698ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
699{
700 addSlowCase(jump: emitJumpIfJSCell(reg));
701}
702
703ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
704{
705#if USE(JSVALUE64)
706 return branchTestPtr(cond: NonZero, reg, mask: tagMaskRegister);
707#else
708 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagMask));
709#endif
710}
711
712ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
713{
714 addSlowCase(jump: emitJumpIfNotJSCell(reg));
715}
716
717ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
718{
719 if (!m_codeBlock->isKnownNotImmediate(index: vReg))
720 emitJumpSlowCaseIfNotJSCell(reg);
721}
722
723#if USE(JSVALUE64)
724ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
725{
726 return branchTestPtr(cond: NonZero, reg, mask: tagTypeNumberRegister);
727}
728ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
729{
730 return branchTestPtr(cond: Zero, reg, mask: tagTypeNumberRegister);
731}
732
733inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
734{
735 if (m_codeBlock->isConstantRegisterIndex(index)) {
736 Register& inConstantPool = m_codeBlock->constantRegister(index);
737 loadDouble(address: &inConstantPool, dest: value);
738 } else
739 loadDouble(address: addressFor(index), dest: value);
740}
741
742inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
743{
744 if (m_codeBlock->isConstantRegisterIndex(index)) {
745 Register& inConstantPool = m_codeBlock->constantRegister(index);
746 convertInt32ToDouble(src: AbsoluteAddress(&inConstantPool), dest: value);
747 } else
748 convertInt32ToDouble(src: addressFor(index), dest: value);
749}
750#endif
751
752ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
753{
754#if USE(JSVALUE64)
755 return branchPtr(cond: AboveOrEqual, left: reg, right: tagTypeNumberRegister);
756#else
757 return branchTest32(NonZero, reg, Imm32(JSImmediate::TagTypeNumber));
758#endif
759}
760
761ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
762{
763#if USE(JSVALUE64)
764 return branchPtr(cond: Below, left: reg, right: tagTypeNumberRegister);
765#else
766 return branchTest32(Zero, reg, Imm32(JSImmediate::TagTypeNumber));
767#endif
768}
769
770ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
771{
772 move(src: reg1, dest: scratch);
773 andPtr(src: reg2, dest: scratch);
774 return emitJumpIfNotImmediateInteger(reg: scratch);
775}
776
777ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
778{
779 addSlowCase(jump: emitJumpIfNotImmediateInteger(reg));
780}
781
782ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
783{
784 addSlowCase(jump: emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
785}
786
787ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
788{
789 addSlowCase(jump: emitJumpIfNotImmediateNumber(reg));
790}
791
792#if !USE(JSVALUE64)
793ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
794{
795 subPtr(Imm32(JSImmediate::TagTypeNumber), reg);
796}
797
798ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
799{
800 return branchSubPtr(Zero, Imm32(JSImmediate::TagTypeNumber), reg);
801}
802#endif
803
804ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
805{
806#if USE(JSVALUE64)
807 emitFastArithIntToImmNoCheck(src, dest);
808#else
809 if (src != dest)
810 move(src, dest);
811 addPtr(Imm32(JSImmediate::TagTypeNumber), dest);
812#endif
813}
814
815ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
816{
817#if USE(JSVALUE64)
818 UNUSED_PARAM(reg);
819#else
820 rshift32(Imm32(JSImmediate::IntegerPayloadShift), reg);
821#endif
822}
823
824// operand is int32_t, must have been zero-extended if register is 64-bit.
825ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
826{
827#if USE(JSVALUE64)
828 if (src != dest)
829 move(src, dest);
830 orPtr(src: tagTypeNumberRegister, dest);
831#else
832 signExtend32ToPtr(src, dest);
833 addPtr(dest, dest);
834 emitFastArithReTagImmediate(dest, dest);
835#endif
836}
837
838ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
839{
840 lshift32(imm: Imm32(JSImmediate::ExtendedPayloadShift), dest: reg);
841 or32(imm: Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), dest: reg);
842}
843
844/* Deprecated: Please use JITStubCall instead. */
845
846// get arg puts an arg from the SF register array onto the stack, as an arg to a context threaded function.
847ALWAYS_INLINE void JIT::emitPutJITStubArgFromVirtualRegister(unsigned src, unsigned argumentNumber, RegisterID scratch)
848{
849 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
850 if (m_codeBlock->isConstantRegisterIndex(index: src)) {
851 JSValue value = m_codeBlock->getConstant(index: src);
852 poke(imm: ImmPtr(JSValue::encode(value)), index: argumentStackOffset);
853 } else {
854 loadPtr(address: Address(callFrameRegister, src * sizeof(Register)), dest: scratch);
855 poke(src: scratch, index: argumentStackOffset);
856 }
857
858 killLastResultRegister();
859}
860
861#endif // USE(JSVALUE32_64)
862
863} // namespace JSC
864
865#endif // ENABLE(JIT)
866
867#endif
868

source code of qtscript/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITInlineMethods.h