1/*
2 * Copyright (C) 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "JITInlineMethods.h"
32#include "JITStubCall.h"
33#include "JSArray.h"
34#include "JSCell.h"
35#include "JSFunction.h"
36#include "JSPropertyNameIterator.h"
37#include "LinkBuffer.h"
38
39namespace JSC {
40
41#if USE(JSVALUE32_64)
42
43void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
44{
45#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
46 // (1) This function provides fast property access for string length
47 Label stringLengthBegin = align();
48
49 // regT0 holds payload, regT1 holds tag
50
51 Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
52 Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
53
54 // Checks out okay! - get the length from the Ustring.
55 load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT2);
56
57 Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
58 move(regT2, regT0);
59 move(Imm32(JSValue::Int32Tag), regT1);
60
61 ret();
62#endif
63
64 // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
65
66#if ENABLE(JIT_OPTIMIZE_CALL)
67 // VirtualCallLink Trampoline
68 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
69 Label virtualCallLinkBegin = align();
70 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
71
72 Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
73
74 Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
75 preserveReturnAddressAfterCall(regT3);
76 restoreArgumentReference();
77 Call callJSFunction2 = call();
78 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
79 emitGetJITStubArg(2, regT1); // argCount
80 restoreReturnAddressBeforeReturn(regT3);
81 hasCodeBlock2.link(this);
82
83 // Check argCount matches callee arity.
84 Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
85 preserveReturnAddressAfterCall(regT3);
86 emitPutJITStubArg(regT3, 1); // return address
87 restoreArgumentReference();
88 Call callArityCheck2 = call();
89 move(regT1, callFrameRegister);
90 emitGetJITStubArg(2, regT1); // argCount
91 restoreReturnAddressBeforeReturn(regT3);
92 arityCheckOkay2.link(this);
93
94 isNativeFunc2.link(this);
95
96 compileOpCallInitializeCallFrame();
97
98 preserveReturnAddressAfterCall(regT3);
99 emitPutJITStubArg(regT3, 1); // return address
100 restoreArgumentReference();
101 Call callLazyLinkCall = call();
102 restoreReturnAddressBeforeReturn(regT3);
103 jump(regT0);
104#endif // ENABLE(JIT_OPTIMIZE_CALL)
105
106 // VirtualCall Trampoline
107 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
108 Label virtualCallBegin = align();
109 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
110
111 Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
112
113 Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
114 preserveReturnAddressAfterCall(regT3);
115 restoreArgumentReference();
116 Call callJSFunction1 = call();
117 emitGetJITStubArg(2, regT1); // argCount
118 restoreReturnAddressBeforeReturn(regT3);
119 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
120 hasCodeBlock3.link(this);
121
122 // Check argCount matches callee arity.
123 Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
124 preserveReturnAddressAfterCall(regT3);
125 emitPutJITStubArg(regT3, 1); // return address
126 restoreArgumentReference();
127 Call callArityCheck1 = call();
128 move(regT1, callFrameRegister);
129 emitGetJITStubArg(2, regT1); // argCount
130 restoreReturnAddressBeforeReturn(regT3);
131 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
132 arityCheckOkay3.link(this);
133
134 isNativeFunc3.link(this);
135
136 compileOpCallInitializeCallFrame();
137 loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
138 jump(regT0);
139
140#if CPU(X86) || CPU(ARM_TRADITIONAL)
141 Label nativeCallThunk = align();
142 preserveReturnAddressAfterCall(regT0);
143 emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
144
145 // Load caller frame's scope chain into this callframe so that whatever we call can
146 // get to its global data.
147 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
148 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
149 emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
150
151#if CPU(X86)
152 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
153
154 /* We have two structs that we use to describe the stackframe we set up for our
155 * call to native code. NativeCallFrameStructure describes the how we set up the stack
156 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
157 * as the native code expects it. We do this as we are using the fastcall calling
158 * convention which results in the callee popping its arguments off the stack, but
159 * not the rest of the callframe so we need a nice way to ensure we increment the
160 * stack pointer by the right amount after the call.
161 */
162
163#if COMPILER(MSVC) || OS(LINUX)
164#if COMPILER(MSVC)
165#pragma pack(push)
166#pragma pack(4)
167#endif // COMPILER(MSVC)
168 struct NativeCallFrameStructure {
169 // CallFrame* callFrame; // passed in EDX
170 JSObject* callee;
171 JSValue thisValue;
172 ArgList* argPointer;
173 ArgList args;
174 JSValue result;
175 };
176 struct NativeFunctionCalleeSignature {
177 JSObject* callee;
178 JSValue thisValue;
179 ArgList* argPointer;
180 };
181#if COMPILER(MSVC)
182#pragma pack(pop)
183#endif // COMPILER(MSVC)
184#else
185 struct NativeCallFrameStructure {
186 // CallFrame* callFrame; // passed in ECX
187 // JSObject* callee; // passed in EDX
188 JSValue thisValue;
189 ArgList* argPointer;
190 ArgList args;
191 };
192 struct NativeFunctionCalleeSignature {
193 JSValue thisValue;
194 ArgList* argPointer;
195 };
196#endif
197
198 const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
199 // Allocate system stack frame
200 subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
201
202 // Set up arguments
203 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
204
205 // push argcount
206 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
207
208 // Calculate the start of the callframe header, and store in regT1
209 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
210
211 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
212 mul32(Imm32(sizeof(Register)), regT0, regT0);
213 subPtr(regT0, regT1);
214 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
215
216 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
217 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
218 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
219
220 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
221 loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
222 loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
223 storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
224 storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
225
226#if COMPILER(MSVC) || OS(LINUX)
227 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
228 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
229
230 // Plant callee
231 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
232 storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
233
234 // Plant callframe
235 move(callFrameRegister, X86Registers::edx);
236
237 call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
238
239 // JSValue is a non-POD type, so eax points to it
240 emitLoad(0, regT1, regT0, X86Registers::eax);
241#else
242 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
243 move(callFrameRegister, X86Registers::ecx); // callFrame
244 call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
245#endif
246
247 // We've put a few temporaries on the stack in addition to the actual arguments
248 // so pull them off now
249 addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
250
251#elif CPU(ARM_TRADITIONAL)
252 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
253
254 // Allocate stack space for our arglist
255 COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
256 subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
257
258 // Set up arguments
259 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
260
261 // Push argcount
262 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
263
264 // Calculate the start of the callframe header, and store in regT1
265 move(callFrameRegister, regT1);
266 sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
267
268 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
269 mul32(Imm32(sizeof(Register)), regT0, regT0);
270 subPtr(regT0, regT1);
271
272 // push pointer to arguments
273 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
274
275 // Argument passing method:
276 // r0 - points to return value
277 // r1 - callFrame
278 // r2 - callee
279 // stack: this(JSValue) and a pointer to ArgList
280
281 move(stackPointerRegister, regT3);
282 subPtr(Imm32(8), stackPointerRegister);
283 move(stackPointerRegister, regT0);
284 subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
285
286 // Setup arg4:
287 storePtr(regT3, Address(stackPointerRegister, 8));
288
289 // Setup arg3
290 // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
291 load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
292 storePtr(regT3, Address(stackPointerRegister, 0));
293 load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
294 storePtr(regT3, Address(stackPointerRegister, 4));
295
296 // Setup arg2:
297 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
298
299 // Setup arg1:
300 move(callFrameRegister, regT1);
301
302 call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
303
304 // Load return value
305 load32(Address(stackPointerRegister, 16), regT0);
306 load32(Address(stackPointerRegister, 20), regT1);
307
308 addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
309#endif
310
311 // Check for an exception
312 move(ImmPtr(&globalData->exception), regT2);
313 Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
314
315 // Grab the return address.
316 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
317
318 // Restore our caller's "r".
319 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
320
321 // Return.
322 restoreReturnAddressBeforeReturn(regT3);
323 ret();
324
325 // Handle an exception
326 sawException.link(this);
327 // Grab the return address.
328 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
329 move(ImmPtr(&globalData->exceptionLocation), regT2);
330 storePtr(regT1, regT2);
331 move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
332 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
333 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
334 restoreReturnAddressBeforeReturn(regT2);
335 ret();
336
337#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
338#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
339#else
340 breakpoint();
341#endif
342
343#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
344 Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
345 Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
346 Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
347#endif
348
349 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
350 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
351
352#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
353 patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
354 patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
355 patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
356#endif
357 patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
358 patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
359#if ENABLE(JIT_OPTIMIZE_CALL)
360 patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
361 patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
362 patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
363#endif
364
365 CodeRef finalCode = patchBuffer.finalizeCode();
366 *executablePool = finalCode.m_executablePool;
367
368 *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
369 *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
370#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
371 *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
372#else
373 UNUSED_PARAM(ctiStringLengthTrampoline);
374#endif
375#if ENABLE(JIT_OPTIMIZE_CALL)
376 *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
377#else
378 UNUSED_PARAM(ctiVirtualCallLink);
379#endif
380}
381
382void JIT::emit_op_mov(Instruction* currentInstruction)
383{
384 unsigned dst = currentInstruction[1].u.operand;
385 unsigned src = currentInstruction[2].u.operand;
386
387 if (m_codeBlock->isConstantRegisterIndex(src))
388 emitStore(dst, getConstantOperand(src));
389 else {
390 emitLoad(src, regT1, regT0);
391 emitStore(dst, regT1, regT0);
392 map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
393 }
394}
395
396void JIT::emit_op_end(Instruction* currentInstruction)
397{
398 if (m_codeBlock->needsFullScopeChain())
399 JITStubCall(this, cti_op_end).call();
400 ASSERT(returnValueRegister != callFrameRegister);
401 emitLoad(currentInstruction[1].u.operand, regT1, regT0);
402 restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
403 ret();
404}
405
406void JIT::emit_op_jmp(Instruction* currentInstruction)
407{
408 unsigned target = currentInstruction[1].u.operand;
409 addJump(jump(), target);
410}
411
412void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
413{
414 unsigned op1 = currentInstruction[1].u.operand;
415 unsigned op2 = currentInstruction[2].u.operand;
416 unsigned target = currentInstruction[3].u.operand;
417
418 emitTimeoutCheck();
419
420 if (isOperandConstantImmediateInt(op1)) {
421 emitLoad(op2, regT1, regT0);
422 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
423 addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
424 return;
425 }
426
427 if (isOperandConstantImmediateInt(op2)) {
428 emitLoad(op1, regT1, regT0);
429 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
430 addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
431 return;
432 }
433
434 emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
435 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
436 addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
437 addJump(branch32(LessThanOrEqual, regT0, regT2), target);
438}
439
440void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
441{
442 unsigned op1 = currentInstruction[1].u.operand;
443 unsigned op2 = currentInstruction[2].u.operand;
444 unsigned target = currentInstruction[3].u.operand;
445
446 if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
447 linkSlowCase(iter); // int32 check
448 linkSlowCase(iter); // int32 check
449
450 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
451 stubCall.addArgument(op1);
452 stubCall.addArgument(op2);
453 stubCall.call();
454 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
455}
456
457void JIT::emit_op_new_object(Instruction* currentInstruction)
458{
459 JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
460}
461
462void JIT::emit_op_instanceof(Instruction* currentInstruction)
463{
464 unsigned dst = currentInstruction[1].u.operand;
465 unsigned value = currentInstruction[2].u.operand;
466 unsigned baseVal = currentInstruction[3].u.operand;
467 unsigned proto = currentInstruction[4].u.operand;
468
469 // Load the operands into registers.
470 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
471 emitLoadPayload(value, regT2);
472 emitLoadPayload(baseVal, regT0);
473 emitLoadPayload(proto, regT1);
474
475 // Check that value, baseVal, and proto are cells.
476 emitJumpSlowCaseIfNotJSCell(value);
477 emitJumpSlowCaseIfNotJSCell(baseVal);
478 emitJumpSlowCaseIfNotJSCell(proto);
479
480 // Check that baseVal 'ImplementsDefaultHasInstance'.
481 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
482 addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
483
484 // Optimistically load the result true, and start looping.
485 // Initially, regT1 still contains proto and regT2 still contains value.
486 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
487 move(Imm32(JSValue::TrueTag), regT0);
488 Label loop(this);
489
490 // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
491 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
492 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
493 load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
494 Jump isInstance = branchPtr(Equal, regT2, regT1);
495 branchTest32(NonZero, regT2).linkTo(loop, this);
496
497 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
498 move(Imm32(JSValue::FalseTag), regT0);
499
500 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
501 isInstance.link(this);
502 emitStoreBool(dst, regT0);
503}
504
505void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
506{
507 unsigned dst = currentInstruction[1].u.operand;
508 unsigned value = currentInstruction[2].u.operand;
509 unsigned baseVal = currentInstruction[3].u.operand;
510 unsigned proto = currentInstruction[4].u.operand;
511
512 linkSlowCaseIfNotJSCell(iter, value);
513 linkSlowCaseIfNotJSCell(iter, baseVal);
514 linkSlowCaseIfNotJSCell(iter, proto);
515 linkSlowCase(iter);
516
517 JITStubCall stubCall(this, cti_op_instanceof);
518 stubCall.addArgument(value);
519 stubCall.addArgument(baseVal);
520 stubCall.addArgument(proto);
521 stubCall.call(dst);
522}
523
524void JIT::emit_op_new_func(Instruction* currentInstruction)
525{
526 JITStubCall stubCall(this, cti_op_new_func);
527 stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
528 stubCall.call(currentInstruction[1].u.operand);
529}
530
531void JIT::emit_op_get_global_var(Instruction* currentInstruction)
532{
533 int dst = currentInstruction[1].u.operand;
534 JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
535 ASSERT(globalObject->isGlobalObject());
536 int index = currentInstruction[3].u.operand;
537
538 loadPtr(&globalObject->d()->registers, regT2);
539
540 emitLoad(index, regT1, regT0, regT2);
541 emitStore(dst, regT1, regT0);
542 map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
543}
544
545void JIT::emit_op_put_global_var(Instruction* currentInstruction)
546{
547 JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
548 ASSERT(globalObject->isGlobalObject());
549 int index = currentInstruction[2].u.operand;
550 int value = currentInstruction[3].u.operand;
551
552 emitLoad(value, regT1, regT0);
553
554 loadPtr(&globalObject->d()->registers, regT2);
555 emitStore(index, regT1, regT0, regT2);
556 map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
557}
558
559void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
560{
561 int dst = currentInstruction[1].u.operand;
562 int index = currentInstruction[2].u.operand;
563 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
564
565 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
566 while (skip--)
567 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
568
569 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
570 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
571 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
572
573 emitLoad(index, regT1, regT0, regT2);
574 emitStore(dst, regT1, regT0);
575 map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
576}
577
578void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
579{
580 int index = currentInstruction[1].u.operand;
581 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
582 int value = currentInstruction[3].u.operand;
583
584 emitLoad(value, regT1, regT0);
585
586 emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
587 while (skip--)
588 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
589
590 loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
591 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
592 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
593
594 emitStore(index, regT1, regT0, regT2);
595 map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
596}
597
598void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
599{
600 JITStubCall stubCall(this, cti_op_tear_off_activation);
601 stubCall.addArgument(currentInstruction[1].u.operand);
602 stubCall.call();
603}
604
605void JIT::emit_op_tear_off_arguments(Instruction*)
606{
607 JITStubCall(this, cti_op_tear_off_arguments).call();
608}
609
610void JIT::emit_op_new_array(Instruction* currentInstruction)
611{
612 JITStubCall stubCall(this, cti_op_new_array);
613 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
614 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
615 stubCall.call(currentInstruction[1].u.operand);
616}
617
618void JIT::emit_op_resolve(Instruction* currentInstruction)
619{
620 JITStubCall stubCall(this, cti_op_resolve);
621 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
622 stubCall.call(currentInstruction[1].u.operand);
623}
624
625void JIT::emit_op_to_primitive(Instruction* currentInstruction)
626{
627 int dst = currentInstruction[1].u.operand;
628 int src = currentInstruction[2].u.operand;
629
630 emitLoad(src, regT1, regT0);
631
632 Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
633 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
634 isImm.link(this);
635
636 if (dst != src)
637 emitStore(dst, regT1, regT0);
638 map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
639}
640
641void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
642{
643 int dst = currentInstruction[1].u.operand;
644
645 linkSlowCase(iter);
646
647 JITStubCall stubCall(this, cti_op_to_primitive);
648 stubCall.addArgument(regT1, regT0);
649 stubCall.call(dst);
650}
651
652void JIT::emit_op_strcat(Instruction* currentInstruction)
653{
654 JITStubCall stubCall(this, cti_op_strcat);
655 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
656 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
657 stubCall.call(currentInstruction[1].u.operand);
658}
659
660void JIT::emit_op_resolve_base(Instruction* currentInstruction)
661{
662 JITStubCall stubCall(this, cti_op_resolve_base);
663 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
664 stubCall.call(currentInstruction[1].u.operand);
665}
666
667void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
668{
669 JITStubCall stubCall(this, cti_op_resolve_skip);
670 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
671 stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
672 stubCall.call(currentInstruction[1].u.operand);
673}
674
675void JIT::emit_op_resolve_global(Instruction* currentInstruction)
676{
677 // FIXME: Optimize to use patching instead of so many memory accesses.
678
679 unsigned dst = currentInstruction[1].u.operand;
680 void* globalObject = currentInstruction[2].u.jsCell;
681
682 unsigned currentIndex = m_globalResolveInfoIndex++;
683 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
684 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
685
686 // Verify structure.
687 move(ImmPtr(globalObject), regT0);
688 loadPtr(structureAddress, regT1);
689 addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
690
691 // Load property.
692 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
693 load32(offsetAddr, regT3);
694 load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
695 load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
696 emitStore(dst, regT1, regT0);
697 map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
698}
699
700void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
701{
702 unsigned dst = currentInstruction[1].u.operand;
703 void* globalObject = currentInstruction[2].u.jsCell;
704 Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
705
706 unsigned currentIndex = m_globalResolveInfoIndex++;
707
708 linkSlowCase(iter);
709 JITStubCall stubCall(this, cti_op_resolve_global);
710 stubCall.addArgument(ImmPtr(globalObject));
711 stubCall.addArgument(ImmPtr(ident));
712 stubCall.addArgument(Imm32(currentIndex));
713 stubCall.call(dst);
714}
715
716void JIT::emit_op_not(Instruction* currentInstruction)
717{
718 unsigned dst = currentInstruction[1].u.operand;
719 unsigned src = currentInstruction[2].u.operand;
720
721 emitLoadTag(src, regT0);
722
723 xor32(Imm32(JSValue::FalseTag), regT0);
724 addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
725 xor32(Imm32(JSValue::TrueTag), regT0);
726
727 emitStoreBool(dst, regT0, (dst == src));
728}
729
730void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
731{
732 unsigned dst = currentInstruction[1].u.operand;
733 unsigned src = currentInstruction[2].u.operand;
734
735 linkSlowCase(iter);
736
737 JITStubCall stubCall(this, cti_op_not);
738 stubCall.addArgument(src);
739 stubCall.call(dst);
740}
741
742void JIT::emit_op_jfalse(Instruction* currentInstruction)
743{
744 unsigned cond = currentInstruction[1].u.operand;
745 unsigned target = currentInstruction[2].u.operand;
746
747 emitLoad(cond, regT1, regT0);
748
749 Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
750 addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
751
752 Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
753 Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
754 addJump(jump(), target);
755
756 if (supportsFloatingPoint()) {
757 isNotInteger.link(this);
758
759 addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
760
761 zeroDouble(fpRegT0);
762 emitLoadDouble(cond, fpRegT1);
763 addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
764 } else
765 addSlowCase(isNotInteger);
766
767 isTrue.link(this);
768 isTrue2.link(this);
769}
770
771void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
772{
773 unsigned cond = currentInstruction[1].u.operand;
774 unsigned target = currentInstruction[2].u.operand;
775
776 linkSlowCase(iter);
777 JITStubCall stubCall(this, cti_op_jtrue);
778 stubCall.addArgument(cond);
779 stubCall.call();
780 emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
781}
782
783void JIT::emit_op_jtrue(Instruction* currentInstruction)
784{
785 unsigned cond = currentInstruction[1].u.operand;
786 unsigned target = currentInstruction[2].u.operand;
787
788 emitLoad(cond, regT1, regT0);
789
790 Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
791 addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
792
793 Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
794 Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
795 addJump(jump(), target);
796
797 if (supportsFloatingPoint()) {
798 isNotInteger.link(this);
799
800 addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
801
802 zeroDouble(fpRegT0);
803 emitLoadDouble(cond, fpRegT1);
804 addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
805 } else
806 addSlowCase(isNotInteger);
807
808 isFalse.link(this);
809 isFalse2.link(this);
810}
811
812void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813{
814 unsigned cond = currentInstruction[1].u.operand;
815 unsigned target = currentInstruction[2].u.operand;
816
817 linkSlowCase(iter);
818 JITStubCall stubCall(this, cti_op_jtrue);
819 stubCall.addArgument(cond);
820 stubCall.call();
821 emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
822}
823
824void JIT::emit_op_jeq_null(Instruction* currentInstruction)
825{
826 unsigned src = currentInstruction[1].u.operand;
827 unsigned target = currentInstruction[2].u.operand;
828
829 emitLoad(src, regT1, regT0);
830
831 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
832
833 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
834 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
835 addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
836
837 Jump wasNotImmediate = jump();
838
839 // Now handle the immediate cases - undefined & null
840 isImmediate.link(this);
841
842 set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
843 set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
844 or32(regT2, regT1);
845
846 addJump(branchTest32(NonZero, regT1), target);
847
848 wasNotImmediate.link(this);
849}
850
851void JIT::emit_op_jneq_null(Instruction* currentInstruction)
852{
853 unsigned src = currentInstruction[1].u.operand;
854 unsigned target = currentInstruction[2].u.operand;
855
856 emitLoad(src, regT1, regT0);
857
858 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
859
860 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
861 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
862 addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
863
864 Jump wasNotImmediate = jump();
865
866 // Now handle the immediate cases - undefined & null
867 isImmediate.link(this);
868
869 set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
870 set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
871 or32(regT2, regT1);
872
873 addJump(branchTest32(Zero, regT1), target);
874
875 wasNotImmediate.link(this);
876}
877
878void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
879{
880 unsigned src = currentInstruction[1].u.operand;
881 JSCell* ptr = currentInstruction[2].u.jsCell;
882 unsigned target = currentInstruction[3].u.operand;
883
884 emitLoad(src, regT1, regT0);
885 addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
886 addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
887}
888
889void JIT::emit_op_jsr(Instruction* currentInstruction)
890{
891 int retAddrDst = currentInstruction[1].u.operand;
892 int target = currentInstruction[2].u.operand;
893 DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
894 addJump(jump(), target);
895 m_jsrSites.append(JSRInfo(storeLocation, label()));
896}
897
898void JIT::emit_op_sret(Instruction* currentInstruction)
899{
900 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
901}
902
903void JIT::emit_op_eq(Instruction* currentInstruction)
904{
905 unsigned dst = currentInstruction[1].u.operand;
906 unsigned src1 = currentInstruction[2].u.operand;
907 unsigned src2 = currentInstruction[3].u.operand;
908
909 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
910 addSlowCase(branch32(NotEqual, regT1, regT3));
911 addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
912 addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
913
914 set8(Equal, regT0, regT2, regT0);
915 or32(Imm32(JSValue::FalseTag), regT0);
916
917 emitStoreBool(dst, regT0);
918}
919
920void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
921{
922 unsigned dst = currentInstruction[1].u.operand;
923 unsigned op1 = currentInstruction[2].u.operand;
924 unsigned op2 = currentInstruction[3].u.operand;
925
926 JumpList storeResult;
927 JumpList genericCase;
928
929 genericCase.append(getSlowCase(iter)); // tags not equal
930
931 linkSlowCase(iter); // tags equal and JSCell
932 genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
933 genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
934
935 // String case.
936 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
937 stubCallEqStrings.addArgument(regT0);
938 stubCallEqStrings.addArgument(regT2);
939 stubCallEqStrings.call();
940 storeResult.append(jump());
941
942 // Generic case.
943 genericCase.append(getSlowCase(iter)); // doubles
944 genericCase.link(this);
945 JITStubCall stubCallEq(this, cti_op_eq);
946 stubCallEq.addArgument(op1);
947 stubCallEq.addArgument(op2);
948 stubCallEq.call(regT0);
949
950 storeResult.link(this);
951 or32(Imm32(JSValue::FalseTag), regT0);
952 emitStoreBool(dst, regT0);
953}
954
955void JIT::emit_op_neq(Instruction* currentInstruction)
956{
957 unsigned dst = currentInstruction[1].u.operand;
958 unsigned src1 = currentInstruction[2].u.operand;
959 unsigned src2 = currentInstruction[3].u.operand;
960
961 emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
962 addSlowCase(branch32(NotEqual, regT1, regT3));
963 addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
964 addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
965
966 set8(NotEqual, regT0, regT2, regT0);
967 or32(Imm32(JSValue::FalseTag), regT0);
968
969 emitStoreBool(dst, regT0);
970}
971
972void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
973{
974 unsigned dst = currentInstruction[1].u.operand;
975
976 JumpList storeResult;
977 JumpList genericCase;
978
979 genericCase.append(getSlowCase(iter)); // tags not equal
980
981 linkSlowCase(iter); // tags equal and JSCell
982 genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
983 genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
984
985 // String case.
986 JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
987 stubCallEqStrings.addArgument(regT0);
988 stubCallEqStrings.addArgument(regT2);
989 stubCallEqStrings.call(regT0);
990 storeResult.append(jump());
991
992 // Generic case.
993 genericCase.append(getSlowCase(iter)); // doubles
994 genericCase.link(this);
995 JITStubCall stubCallEq(this, cti_op_eq);
996 stubCallEq.addArgument(regT1, regT0);
997 stubCallEq.addArgument(regT3, regT2);
998 stubCallEq.call(regT0);
999
1000 storeResult.link(this);
1001 xor32(Imm32(0x1), regT0);
1002 or32(Imm32(JSValue::FalseTag), regT0);
1003 emitStoreBool(dst, regT0);
1004}
1005
1006void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1007{
1008 unsigned dst = currentInstruction[1].u.operand;
1009 unsigned src1 = currentInstruction[2].u.operand;
1010 unsigned src2 = currentInstruction[3].u.operand;
1011
1012 emitLoadTag(src1, regT0);
1013 emitLoadTag(src2, regT1);
1014
1015 // Jump to a slow case if either operand is double, or if both operands are
1016 // cells and/or Int32s.
1017 move(regT0, regT2);
1018 and32(regT1, regT2);
1019 addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1020 addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1021
1022 if (type == OpStrictEq)
1023 set8(Equal, regT0, regT1, regT0);
1024 else
1025 set8(NotEqual, regT0, regT1, regT0);
1026
1027 or32(Imm32(JSValue::FalseTag), regT0);
1028
1029 emitStoreBool(dst, regT0);
1030}
1031
1032void JIT::emit_op_stricteq(Instruction* currentInstruction)
1033{
1034 compileOpStrictEq(currentInstruction, OpStrictEq);
1035}
1036
1037void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1038{
1039 unsigned dst = currentInstruction[1].u.operand;
1040 unsigned src1 = currentInstruction[2].u.operand;
1041 unsigned src2 = currentInstruction[3].u.operand;
1042
1043 linkSlowCase(iter);
1044 linkSlowCase(iter);
1045
1046 JITStubCall stubCall(this, cti_op_stricteq);
1047 stubCall.addArgument(src1);
1048 stubCall.addArgument(src2);
1049 stubCall.call(dst);
1050}
1051
1052void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1053{
1054 compileOpStrictEq(currentInstruction, OpNStrictEq);
1055}
1056
1057void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1058{
1059 unsigned dst = currentInstruction[1].u.operand;
1060 unsigned src1 = currentInstruction[2].u.operand;
1061 unsigned src2 = currentInstruction[3].u.operand;
1062
1063 linkSlowCase(iter);
1064 linkSlowCase(iter);
1065
1066 JITStubCall stubCall(this, cti_op_nstricteq);
1067 stubCall.addArgument(src1);
1068 stubCall.addArgument(src2);
1069 stubCall.call(dst);
1070}
1071
1072void JIT::emit_op_eq_null(Instruction* currentInstruction)
1073{
1074 unsigned dst = currentInstruction[1].u.operand;
1075 unsigned src = currentInstruction[2].u.operand;
1076
1077 emitLoad(src, regT1, regT0);
1078 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1079
1080 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1081 setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1082
1083 Jump wasNotImmediate = jump();
1084
1085 isImmediate.link(this);
1086
1087 set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1088 set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1089 or32(regT2, regT1);
1090
1091 wasNotImmediate.link(this);
1092
1093 or32(Imm32(JSValue::FalseTag), regT1);
1094
1095 emitStoreBool(dst, regT1);
1096}
1097
1098void JIT::emit_op_neq_null(Instruction* currentInstruction)
1099{
1100 unsigned dst = currentInstruction[1].u.operand;
1101 unsigned src = currentInstruction[2].u.operand;
1102
1103 emitLoad(src, regT1, regT0);
1104 Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1105
1106 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1107 setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1108
1109 Jump wasNotImmediate = jump();
1110
1111 isImmediate.link(this);
1112
1113 set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1114 set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1115 and32(regT2, regT1);
1116
1117 wasNotImmediate.link(this);
1118
1119 or32(Imm32(JSValue::FalseTag), regT1);
1120
1121 emitStoreBool(dst, regT1);
1122}
1123
1124void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1125{
1126 JITStubCall stubCall(this, cti_op_resolve_with_base);
1127 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1128 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1129 stubCall.call(currentInstruction[2].u.operand);
1130}
1131
1132void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1133{
1134 JITStubCall stubCall(this, cti_op_new_func_exp);
1135 stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1136 stubCall.call(currentInstruction[1].u.operand);
1137}
1138
1139void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1140{
1141 JITStubCall stubCall(this, cti_op_new_regexp);
1142 stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1143 stubCall.call(currentInstruction[1].u.operand);
1144}
1145
1146void JIT::emit_op_throw(Instruction* currentInstruction)
1147{
1148 unsigned exception = currentInstruction[1].u.operand;
1149 JITStubCall stubCall(this, cti_op_throw);
1150 stubCall.addArgument(exception);
1151 stubCall.call();
1152
1153#ifndef NDEBUG
1154 // cti_op_throw always changes it's return address,
1155 // this point in the code should never be reached.
1156 breakpoint();
1157#endif
1158}
1159
1160void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1161{
1162 int dst = currentInstruction[1].u.operand;
1163 int base = currentInstruction[2].u.operand;
1164 int i = currentInstruction[3].u.operand;
1165 int size = currentInstruction[4].u.operand;
1166 int breakTarget = currentInstruction[5].u.operand;
1167
1168 JumpList isNotObject;
1169
1170 emitLoad(base, regT1, regT0);
1171 if (!m_codeBlock->isKnownNotImmediate(base))
1172 isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1173 if (base != m_codeBlock->thisRegister()) {
1174 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1175 isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1176 }
1177
1178 // We could inline the case where you have a valid cache, but
1179 // this call doesn't seem to be hot.
1180 Label isObject(this);
1181 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1182 getPnamesStubCall.addArgument(regT0);
1183 getPnamesStubCall.call(dst);
1184 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1185 store32(Imm32(0), addressFor(i));
1186 store32(regT3, addressFor(size));
1187 Jump end = jump();
1188
1189 isNotObject.link(this);
1190 addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
1191 addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
1192 JITStubCall toObjectStubCall(this, cti_to_object);
1193 toObjectStubCall.addArgument(regT1, regT0);
1194 toObjectStubCall.call(base);
1195 jump().linkTo(isObject, this);
1196
1197 end.link(this);
1198}
1199
1200void JIT::emit_op_next_pname(Instruction* currentInstruction)
1201{
1202 int dst = currentInstruction[1].u.operand;
1203 int base = currentInstruction[2].u.operand;
1204 int i = currentInstruction[3].u.operand;
1205 int size = currentInstruction[4].u.operand;
1206 int it = currentInstruction[5].u.operand;
1207 int target = currentInstruction[6].u.operand;
1208
1209 JumpList callHasProperty;
1210
1211 Label begin(this);
1212 load32(addressFor(i), regT0);
1213 Jump end = branch32(Equal, regT0, addressFor(size));
1214
1215 // Grab key @ i
1216 loadPtr(addressFor(it), regT1);
1217 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1218 load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1219 store32(Imm32(JSValue::CellTag), tagFor(dst));
1220 store32(regT2, payloadFor(dst));
1221
1222 // Increment i
1223 add32(Imm32(1), regT0);
1224 store32(regT0, addressFor(i));
1225
1226 // Verify that i is valid:
1227 loadPtr(addressFor(base), regT0);
1228
1229 // Test base's structure
1230 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1231 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1232
1233 // Test base's prototype chain
1234 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1235 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1236 addJump(branchTestPtr(Zero, Address(regT3)), target);
1237
1238 Label checkPrototype(this);
1239 callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
1240 loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1241 loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1242 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1243 addPtr(Imm32(sizeof(Structure*)), regT3);
1244 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1245
1246 // Continue loop.
1247 addJump(jump(), target);
1248
1249 // Slow case: Ask the object if i is valid.
1250 callHasProperty.link(this);
1251 loadPtr(addressFor(dst), regT1);
1252 JITStubCall stubCall(this, cti_has_property);
1253 stubCall.addArgument(regT0);
1254 stubCall.addArgument(regT1);
1255 stubCall.call();
1256
1257 // Test for valid key.
1258 addJump(branchTest32(NonZero, regT0), target);
1259 jump().linkTo(begin, this);
1260
1261 // End of loop.
1262 end.link(this);
1263}
1264
1265void JIT::emit_op_push_scope(Instruction* currentInstruction)
1266{
1267 JITStubCall stubCall(this, cti_op_push_scope);
1268 stubCall.addArgument(currentInstruction[1].u.operand);
1269 stubCall.call(currentInstruction[1].u.operand);
1270}
1271
1272void JIT::emit_op_pop_scope(Instruction*)
1273{
1274 JITStubCall(this, cti_op_pop_scope).call();
1275}
1276
1277void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1278{
1279 int dst = currentInstruction[1].u.operand;
1280 int src = currentInstruction[2].u.operand;
1281
1282 emitLoad(src, regT1, regT0);
1283
1284 Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1285 addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
1286 isInt32.link(this);
1287
1288 if (src != dst)
1289 emitStore(dst, regT1, regT0);
1290 map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1291}
1292
1293void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1294{
1295 int dst = currentInstruction[1].u.operand;
1296
1297 linkSlowCase(iter);
1298
1299 JITStubCall stubCall(this, cti_op_to_jsnumber);
1300 stubCall.addArgument(regT1, regT0);
1301 stubCall.call(dst);
1302}
1303
1304void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1305{
1306 JITStubCall stubCall(this, cti_op_push_new_scope);
1307 stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1308 stubCall.addArgument(currentInstruction[3].u.operand);
1309 stubCall.call(currentInstruction[1].u.operand);
1310}
1311
1312void JIT::emit_op_catch(Instruction* currentInstruction)
1313{
1314 unsigned exception = currentInstruction[1].u.operand;
1315
1316 // This opcode only executes after a return from cti_op_throw.
1317
1318 // cti_op_throw may have taken us to a call frame further up the stack; reload
1319 // the call frame pointer to adjust.
1320 peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1321
1322 // Now store the exception returned by cti_op_throw.
1323 emitStore(exception, regT1, regT0);
1324 map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1325#ifdef QT_BUILD_SCRIPT_LIB
1326 JITStubCall stubCall(this, cti_op_debug_catch);
1327 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1328 stubCall.call();
1329#endif
1330}
1331
1332void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1333{
1334 JITStubCall stubCall(this, cti_op_jmp_scopes);
1335 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1336 stubCall.call();
1337 addJump(jump(), currentInstruction[2].u.operand);
1338}
1339
1340void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1341{
1342 unsigned tableIndex = currentInstruction[1].u.operand;
1343 unsigned defaultOffset = currentInstruction[2].u.operand;
1344 unsigned scrutinee = currentInstruction[3].u.operand;
1345
1346 // create jump table for switch destinations, track this switch statement.
1347 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1348 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1349 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1350
1351 JITStubCall stubCall(this, cti_op_switch_imm);
1352 stubCall.addArgument(scrutinee);
1353 stubCall.addArgument(Imm32(tableIndex));
1354 stubCall.call();
1355 jump(regT0);
1356}
1357
1358void JIT::emit_op_switch_char(Instruction* currentInstruction)
1359{
1360 unsigned tableIndex = currentInstruction[1].u.operand;
1361 unsigned defaultOffset = currentInstruction[2].u.operand;
1362 unsigned scrutinee = currentInstruction[3].u.operand;
1363
1364 // create jump table for switch destinations, track this switch statement.
1365 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1366 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1367 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1368
1369 JITStubCall stubCall(this, cti_op_switch_char);
1370 stubCall.addArgument(scrutinee);
1371 stubCall.addArgument(Imm32(tableIndex));
1372 stubCall.call();
1373 jump(regT0);
1374}
1375
1376void JIT::emit_op_switch_string(Instruction* currentInstruction)
1377{
1378 unsigned tableIndex = currentInstruction[1].u.operand;
1379 unsigned defaultOffset = currentInstruction[2].u.operand;
1380 unsigned scrutinee = currentInstruction[3].u.operand;
1381
1382 // create jump table for switch destinations, track this switch statement.
1383 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1384 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1385
1386 JITStubCall stubCall(this, cti_op_switch_string);
1387 stubCall.addArgument(scrutinee);
1388 stubCall.addArgument(Imm32(tableIndex));
1389 stubCall.call();
1390 jump(regT0);
1391}
1392
1393void JIT::emit_op_new_error(Instruction* currentInstruction)
1394{
1395 unsigned dst = currentInstruction[1].u.operand;
1396 unsigned type = currentInstruction[2].u.operand;
1397 unsigned message = currentInstruction[3].u.operand;
1398
1399 JITStubCall stubCall(this, cti_op_new_error);
1400 stubCall.addArgument(Imm32(type));
1401 stubCall.addArgument(m_codeBlock->getConstant(message));
1402 stubCall.addArgument(Imm32(m_bytecodeIndex));
1403 stubCall.call(dst);
1404}
1405
1406void JIT::emit_op_debug(Instruction* currentInstruction)
1407{
1408 JITStubCall stubCall(this, cti_op_debug);
1409 stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1410 stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1411 stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1412 stubCall.call();
1413}
1414
1415
1416void JIT::emit_op_enter(Instruction*)
1417{
1418 // Even though JIT code doesn't use them, we initialize our constant
1419 // registers to zap stale pointers, to avoid unnecessarily prolonging
1420 // object lifetime and increasing GC pressure.
1421 for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1422 emitStore(i, jsUndefined());
1423}
1424
1425void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
1426{
1427 emit_op_enter(currentInstruction);
1428
1429 JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1430}
1431
1432void JIT::emit_op_create_arguments(Instruction*)
1433{
1434 Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
1435
1436 // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
1437 if (m_codeBlock->m_numParameters == 1)
1438 JITStubCall(this, cti_op_create_arguments_no_params).call();
1439 else
1440 JITStubCall(this, cti_op_create_arguments).call();
1441
1442 argsCreated.link(this);
1443}
1444
1445void JIT::emit_op_init_arguments(Instruction*)
1446{
1447 emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
1448}
1449
1450void JIT::emit_op_convert_this(Instruction* currentInstruction)
1451{
1452 unsigned thisRegister = currentInstruction[1].u.operand;
1453
1454 emitLoad(thisRegister, regT1, regT0);
1455
1456 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1457
1458 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1459 addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1460
1461 map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1462}
1463
1464void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1465{
1466 unsigned thisRegister = currentInstruction[1].u.operand;
1467
1468 linkSlowCase(iter);
1469 linkSlowCase(iter);
1470
1471 JITStubCall stubCall(this, cti_op_convert_this);
1472 stubCall.addArgument(regT1, regT0);
1473 stubCall.call(thisRegister);
1474}
1475
1476void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1477{
1478 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1479 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1480
1481 JITStubCall stubCall(this, cti_op_profile_will_call);
1482 stubCall.addArgument(currentInstruction[1].u.operand);
1483 stubCall.call();
1484 noProfiler.link(this);
1485}
1486
1487void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1488{
1489 peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
1490 Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1491
1492 JITStubCall stubCall(this, cti_op_profile_did_call);
1493 stubCall.addArgument(currentInstruction[1].u.operand);
1494 stubCall.call();
1495 noProfiler.link(this);
1496}
1497
1498#else // USE(JSVALUE32_64)
1499
1500#define RECORD_JUMP_TARGET(targetOffset) \
1501 do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
1502
1503void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
1504{
1505#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1506 // (2) The second function provides fast property access for string length
1507 Label stringLengthBegin = align();
1508
1509 // Check eax is a string
1510 Jump string_failureCases1 = emitJumpIfNotJSCell(reg: regT0);
1511 Jump string_failureCases2 = branchPtr(cond: NotEqual, left: Address(regT0), right: ImmPtr(m_globalData->jsStringVPtr));
1512
1513 // Checks out okay! - get the length from the Ustring.
1514 load32(address: Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), dest: regT0);
1515
1516 Jump string_failureCases3 = branch32(cond: Above, left: regT0, right: Imm32(JSImmediate::maxImmediateInt));
1517
1518 // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
1519 emitFastArithIntToImmNoCheck(src: regT0, dest: regT0);
1520
1521 ret();
1522#endif
1523
1524 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1525 COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
1526
1527 // VirtualCallLink Trampoline
1528 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1529 Label virtualCallLinkBegin = align();
1530 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), dest: regT2);
1531
1532 Jump isNativeFunc2 = branch32(cond: Equal, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: Imm32(0));
1533
1534 Jump hasCodeBlock2 = branch32(cond: GreaterThan, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: Imm32(0));
1535 preserveReturnAddressAfterCall(reg: regT3);
1536 restoreArgumentReference();
1537 Call callJSFunction2 = call();
1538 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), dest: regT2);
1539 emitGetJITStubArg(argumentNumber: 2, dst: regT1); // argCount
1540 restoreReturnAddressBeforeReturn(reg: regT3);
1541 hasCodeBlock2.link(masm: this);
1542
1543 // Check argCount matches callee arity.
1544 Jump arityCheckOkay2 = branch32(cond: Equal, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: regT1);
1545 preserveReturnAddressAfterCall(reg: regT3);
1546 emitPutJITStubArg(src: regT3, argumentNumber: 1); // return address
1547 restoreArgumentReference();
1548 Call callArityCheck2 = call();
1549 move(src: regT1, dest: callFrameRegister);
1550 emitGetJITStubArg(argumentNumber: 2, dst: regT1); // argCount
1551 restoreReturnAddressBeforeReturn(reg: regT3);
1552 arityCheckOkay2.link(masm: this);
1553
1554 isNativeFunc2.link(masm: this);
1555
1556 compileOpCallInitializeCallFrame();
1557 preserveReturnAddressAfterCall(reg: regT3);
1558 emitPutJITStubArg(src: regT3, argumentNumber: 1); // return address
1559 restoreArgumentReference();
1560 Call callLazyLinkCall = call();
1561 restoreReturnAddressBeforeReturn(reg: regT3);
1562 jump(target: regT0);
1563
1564 // VirtualCall Trampoline
1565 // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
1566 Label virtualCallBegin = align();
1567 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), dest: regT2);
1568
1569 Jump isNativeFunc3 = branch32(cond: Equal, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: Imm32(0));
1570
1571 Jump hasCodeBlock3 = branch32(cond: GreaterThan, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: Imm32(0));
1572 preserveReturnAddressAfterCall(reg: regT3);
1573 restoreArgumentReference();
1574 Call callJSFunction1 = call();
1575 emitGetJITStubArg(argumentNumber: 2, dst: regT1); // argCount
1576 restoreReturnAddressBeforeReturn(reg: regT3);
1577 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), dest: regT2);
1578 hasCodeBlock3.link(masm: this);
1579
1580 // Check argCount matches callee arity.
1581 Jump arityCheckOkay3 = branch32(cond: Equal, left: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), right: regT1);
1582 preserveReturnAddressAfterCall(reg: regT3);
1583 emitPutJITStubArg(src: regT3, argumentNumber: 1); // return address
1584 restoreArgumentReference();
1585 Call callArityCheck1 = call();
1586 move(src: regT1, dest: callFrameRegister);
1587 emitGetJITStubArg(argumentNumber: 2, dst: regT1); // argCount
1588 restoreReturnAddressBeforeReturn(reg: regT3);
1589 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), dest: regT2);
1590 arityCheckOkay3.link(masm: this);
1591
1592 isNativeFunc3.link(masm: this);
1593
1594 compileOpCallInitializeCallFrame();
1595 loadPtr(address: Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), dest: regT0);
1596 jump(target: regT0);
1597
1598 Label nativeCallThunk = align();
1599 preserveReturnAddressAfterCall(reg: regT0);
1600 emitPutToCallFrameHeader(from: regT0, entry: RegisterFile::ReturnPC); // Push return address
1601
1602 // Load caller frame's scope chain into this callframe so that whatever we call can
1603 // get to its global data.
1604 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::CallerFrame, to: regT1);
1605 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ScopeChain, to: regT1, from: regT1);
1606 emitPutToCallFrameHeader(from: regT1, entry: RegisterFile::ScopeChain);
1607
1608
1609#if CPU(X86_64)
1610 emitGetFromCallFrameHeader32(entry: RegisterFile::ArgumentCount, to: X86Registers::ecx);
1611
1612 // Allocate stack space for our arglist
1613 subPtr(imm: Imm32(sizeof(ArgList)), dest: stackPointerRegister);
1614 COMPILE_ASSERT((sizeof(ArgList) & 0xf) == 0, ArgList_should_by_16byte_aligned);
1615
1616 // Set up arguments
1617 subPtr(imm: Imm32(1), dest: X86Registers::ecx); // Don't include 'this' in argcount
1618
1619 // Push argcount
1620 storePtr(src: X86Registers::ecx, address: Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1621
1622 // Calculate the start of the callframe header, and store in edx
1623 addPtr(imm: Imm32(-RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), src: callFrameRegister, dest: X86Registers::edx);
1624
1625 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (ecx)
1626 mul32(imm: Imm32(sizeof(Register)), src: X86Registers::ecx, dest: X86Registers::ecx);
1627 subPtr(src: X86Registers::ecx, dest: X86Registers::edx);
1628
1629 // push pointer to arguments
1630 storePtr(src: X86Registers::edx, address: Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1631
1632 // ArgList is passed by reference so is stackPointerRegister
1633 move(src: stackPointerRegister, dest: X86Registers::ecx);
1634
1635 // edx currently points to the first argument, edx-sizeof(Register) points to 'this'
1636 loadPtr(address: Address(X86Registers::edx, -(int32_t)sizeof(Register)), dest: X86Registers::edx);
1637
1638 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::Callee, to: X86Registers::esi);
1639
1640 move(src: callFrameRegister, dest: X86Registers::edi);
1641
1642 call(address: Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_data)));
1643
1644 addPtr(imm: Imm32(sizeof(ArgList)), srcDest: stackPointerRegister);
1645#elif CPU(X86)
1646 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1647
1648 /* We have two structs that we use to describe the stackframe we set up for our
1649 * call to native code. NativeCallFrameStructure describes the how we set up the stack
1650 * in advance of the call. NativeFunctionCalleeSignature describes the callframe
1651 * as the native code expects it. We do this as we are using the fastcall calling
1652 * convention which results in the callee popping its arguments off the stack, but
1653 * not the rest of the callframe so we need a nice way to ensure we increment the
1654 * stack pointer by the right amount after the call.
1655 */
1656#if COMPILER(MSVC) || OS(LINUX)
1657 struct NativeCallFrameStructure {
1658 // CallFrame* callFrame; // passed in EDX
1659 JSObject* callee;
1660 JSValue thisValue;
1661 ArgList* argPointer;
1662 ArgList args;
1663 JSValue result;
1664 };
1665 struct NativeFunctionCalleeSignature {
1666 JSObject* callee;
1667 JSValue thisValue;
1668 ArgList* argPointer;
1669 };
1670#else
1671 struct NativeCallFrameStructure {
1672 // CallFrame* callFrame; // passed in ECX
1673 // JSObject* callee; // passed in EDX
1674 JSValue thisValue;
1675 ArgList* argPointer;
1676 ArgList args;
1677 };
1678 struct NativeFunctionCalleeSignature {
1679 JSValue thisValue;
1680 ArgList* argPointer;
1681 };
1682#endif
1683 const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
1684 // Allocate system stack frame
1685 subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
1686
1687 // Set up arguments
1688 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1689
1690 // push argcount
1691 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
1692
1693 // Calculate the start of the callframe header, and store in regT1
1694 addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
1695
1696 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
1697 mul32(Imm32(sizeof(Register)), regT0, regT0);
1698 subPtr(regT0, regT1);
1699 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
1700
1701 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1702 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
1703 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
1704
1705 // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
1706 loadPtr(Address(regT1, -(int)sizeof(Register)), regT1);
1707 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue)));
1708
1709#if COMPILER(MSVC) || OS(LINUX)
1710 // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
1711 addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
1712
1713 // Plant callee
1714 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
1715 storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
1716
1717 // Plant callframe
1718 move(callFrameRegister, X86Registers::edx);
1719
1720 call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
1721
1722 // JSValue is a non-POD type
1723 loadPtr(Address(X86Registers::eax), X86Registers::eax);
1724#else
1725 // Plant callee
1726 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx);
1727
1728 // Plant callframe
1729 move(callFrameRegister, X86Registers::ecx);
1730 call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
1731#endif
1732
1733 // We've put a few temporaries on the stack in addition to the actual arguments
1734 // so pull them off now
1735 addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
1736
1737#elif CPU(ARM)
1738 emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1739
1740 // Allocate stack space for our arglist
1741 COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0, ArgList_should_by_8byte_aligned);
1742 subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1743
1744 // Set up arguments
1745 subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
1746
1747 // Push argcount
1748 storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
1749
1750 // Calculate the start of the callframe header, and store in regT1
1751 move(callFrameRegister, regT1);
1752 sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
1753
1754 // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
1755 mul32(Imm32(sizeof(Register)), regT0, regT0);
1756 subPtr(regT0, regT1);
1757
1758 // push pointer to arguments
1759 storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
1760
1761 // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
1762 loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
1763
1764 // Setup arg2:
1765 emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
1766
1767 // Setup arg1:
1768 move(callFrameRegister, regT0);
1769
1770 // Setup arg4: This is a plain hack
1771 move(stackPointerRegister, ARMRegisters::r3);
1772
1773 call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
1774
1775 addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
1776
1777#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
1778#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
1779#else
1780 breakpoint();
1781#endif
1782
1783 // Check for an exception
1784 loadPtr(address: &(globalData->exception), dest: regT2);
1785 Jump exceptionHandler = branchTestPtr(cond: NonZero, reg: regT2);
1786
1787 // Grab the return address.
1788 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ReturnPC, to: regT1);
1789
1790 // Restore our caller's "r".
1791 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::CallerFrame, to: callFrameRegister);
1792
1793 // Return.
1794 restoreReturnAddressBeforeReturn(reg: regT1);
1795 ret();
1796
1797 // Handle an exception
1798 exceptionHandler.link(masm: this);
1799 // Grab the return address.
1800 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ReturnPC, to: regT1);
1801 move(imm: ImmPtr(&globalData->exceptionLocation), dest: regT2);
1802 storePtr(src: regT1, address: regT2);
1803 move(imm: ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), dest: regT2);
1804 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::CallerFrame, to: callFrameRegister);
1805 poke(src: callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
1806 restoreReturnAddressBeforeReturn(reg: regT2);
1807 ret();
1808
1809
1810#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1811 Call string_failureCases1Call = makeTailRecursiveCall(oldJump: string_failureCases1);
1812 Call string_failureCases2Call = makeTailRecursiveCall(oldJump: string_failureCases2);
1813 Call string_failureCases3Call = makeTailRecursiveCall(oldJump: string_failureCases3);
1814#endif
1815
1816 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
1817 LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(n: m_assembler.size()));
1818
1819#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1820 patchBuffer.link(call: string_failureCases1Call, function: FunctionPtr(cti_op_get_by_id_string_fail));
1821 patchBuffer.link(call: string_failureCases2Call, function: FunctionPtr(cti_op_get_by_id_string_fail));
1822 patchBuffer.link(call: string_failureCases3Call, function: FunctionPtr(cti_op_get_by_id_string_fail));
1823#endif
1824 patchBuffer.link(call: callArityCheck1, function: FunctionPtr(cti_op_call_arityCheck));
1825 patchBuffer.link(call: callJSFunction1, function: FunctionPtr(cti_op_call_JSFunction));
1826#if ENABLE(JIT_OPTIMIZE_CALL)
1827 patchBuffer.link(call: callArityCheck2, function: FunctionPtr(cti_op_call_arityCheck));
1828 patchBuffer.link(call: callJSFunction2, function: FunctionPtr(cti_op_call_JSFunction));
1829 patchBuffer.link(call: callLazyLinkCall, function: FunctionPtr(cti_vm_lazyLinkCall));
1830#endif
1831
1832 CodeRef finalCode = patchBuffer.finalizeCode();
1833 *executablePool = finalCode.m_executablePool;
1834
1835 *ctiVirtualCallLink = trampolineAt(ref: finalCode, label: virtualCallLinkBegin);
1836 *ctiVirtualCall = trampolineAt(ref: finalCode, label: virtualCallBegin);
1837 *ctiNativeCallThunk = trampolineAt(ref: finalCode, label: nativeCallThunk);
1838#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1839 *ctiStringLengthTrampoline = trampolineAt(ref: finalCode, label: stringLengthBegin);
1840#else
1841 UNUSED_PARAM(ctiStringLengthTrampoline);
1842#endif
1843}
1844
1845void JIT::emit_op_mov(Instruction* currentInstruction)
1846{
1847 int dst = currentInstruction[1].u.operand;
1848 int src = currentInstruction[2].u.operand;
1849
1850 if (m_codeBlock->isConstantRegisterIndex(index: src)) {
1851 storePtr(imm: ImmPtr(JSValue::encode(value: getConstantOperand(src))), address: Address(callFrameRegister, dst * sizeof(Register)));
1852 if (dst == m_lastResultBytecodeRegister)
1853 killLastResultRegister();
1854 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
1855 // If either the src or dst is the cached register go though
1856 // get/put registers to make sure we track this correctly.
1857 emitGetVirtualRegister(src, dst: regT0);
1858 emitPutVirtualRegister(dst);
1859 } else {
1860 // Perform the copy via regT1; do not disturb any mapping in regT0.
1861 loadPtr(address: Address(callFrameRegister, src * sizeof(Register)), dest: regT1);
1862 storePtr(src: regT1, address: Address(callFrameRegister, dst * sizeof(Register)));
1863 }
1864}
1865
1866void JIT::emit_op_end(Instruction* currentInstruction)
1867{
1868 if (m_codeBlock->needsFullScopeChain())
1869 JITStubCall(this, cti_op_end).call();
1870 ASSERT(returnValueRegister != callFrameRegister);
1871 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: returnValueRegister);
1872 restoreReturnAddressBeforeReturn(address: Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
1873 ret();
1874}
1875
1876void JIT::emit_op_jmp(Instruction* currentInstruction)
1877{
1878 unsigned target = currentInstruction[1].u.operand;
1879 addJump(jump: jump(), relativeOffset: target);
1880 RECORD_JUMP_TARGET(target);
1881}
1882
1883void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1884{
1885 emitTimeoutCheck();
1886
1887 unsigned op1 = currentInstruction[1].u.operand;
1888 unsigned op2 = currentInstruction[2].u.operand;
1889 unsigned target = currentInstruction[3].u.operand;
1890 if (isOperandConstantImmediateInt(src: op2)) {
1891 emitGetVirtualRegister(src: op1, dst: regT0);
1892 emitJumpSlowCaseIfNotImmediateInteger(reg: regT0);
1893#if USE(JSVALUE64)
1894 int32_t op2imm = getConstantOperandImmediateInt(src: op2);
1895#else
1896 int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
1897#endif
1898 addJump(jump: branch32(cond: LessThanOrEqual, left: regT0, right: Imm32(op2imm)), relativeOffset: target);
1899 } else {
1900 emitGetVirtualRegisters(src1: op1, dst1: regT0, src2: op2, dst2: regT1);
1901 emitJumpSlowCaseIfNotImmediateInteger(reg: regT0);
1902 emitJumpSlowCaseIfNotImmediateInteger(reg: regT1);
1903 addJump(jump: branch32(cond: LessThanOrEqual, left: regT0, right: regT1), relativeOffset: target);
1904 }
1905}
1906
1907void JIT::emit_op_new_object(Instruction* currentInstruction)
1908{
1909 JITStubCall(this, cti_op_new_object).call(dst: currentInstruction[1].u.operand);
1910}
1911
1912void JIT::emit_op_instanceof(Instruction* currentInstruction)
1913{
1914 unsigned dst = currentInstruction[1].u.operand;
1915 unsigned value = currentInstruction[2].u.operand;
1916 unsigned baseVal = currentInstruction[3].u.operand;
1917 unsigned proto = currentInstruction[4].u.operand;
1918
1919 // Load the operands (baseVal, proto, and value respectively) into registers.
1920 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
1921 emitGetVirtualRegister(src: value, dst: regT2);
1922 emitGetVirtualRegister(src: baseVal, dst: regT0);
1923 emitGetVirtualRegister(src: proto, dst: regT1);
1924
1925 // Check that baseVal & proto are cells.
1926 emitJumpSlowCaseIfNotJSCell(reg: regT2, vReg: value);
1927 emitJumpSlowCaseIfNotJSCell(reg: regT0, vReg: baseVal);
1928 emitJumpSlowCaseIfNotJSCell(reg: regT1, vReg: proto);
1929
1930 // Check that baseVal 'ImplementsDefaultHasInstance'.
1931 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT0);
1932 addSlowCase(jump: branchTest32(cond: Zero, address: Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(ImplementsDefaultHasInstance)));
1933
1934 // Optimistically load the result true, and start looping.
1935 // Initially, regT1 still contains proto and regT2 still contains value.
1936 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
1937 move(imm: ImmPtr(JSValue::encode(value: jsBoolean(b: true))), dest: regT0);
1938 Label loop(this);
1939
1940 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
1941 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
1942 loadPtr(address: Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
1943 loadPtr(address: Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), dest: regT2);
1944 Jump isInstance = branchPtr(cond: Equal, left: regT2, right: regT1);
1945 emitJumpIfJSCell(reg: regT2).linkTo(label: loop, masm: this);
1946
1947 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
1948 move(imm: ImmPtr(JSValue::encode(value: jsBoolean(b: false))), dest: regT0);
1949
1950 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
1951 isInstance.link(masm: this);
1952 emitPutVirtualRegister(dst);
1953}
1954
1955void JIT::emit_op_new_func(Instruction* currentInstruction)
1956{
1957 JITStubCall stubCall(this, cti_op_new_func);
1958 stubCall.addArgument(argument: ImmPtr(m_codeBlock->functionDecl(index: currentInstruction[2].u.operand)));
1959 stubCall.call(dst: currentInstruction[1].u.operand);
1960}
1961
1962void JIT::emit_op_call(Instruction* currentInstruction)
1963{
1964 compileOpCall(op_call, instruction: currentInstruction, callLinkInfoIndex: m_callLinkInfoIndex++);
1965}
1966
1967void JIT::emit_op_call_eval(Instruction* currentInstruction)
1968{
1969 compileOpCall(op_call_eval, instruction: currentInstruction, callLinkInfoIndex: m_callLinkInfoIndex++);
1970}
1971
1972void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1973{
1974 int argCountDst = currentInstruction[1].u.operand;
1975 int argsOffset = currentInstruction[2].u.operand;
1976
1977 JITStubCall stubCall(this, cti_op_load_varargs);
1978 stubCall.addArgument(argument: Imm32(argsOffset));
1979 stubCall.call();
1980 // Stores a naked int32 in the register file.
1981 store32(src: returnValueRegister, address: Address(callFrameRegister, argCountDst * sizeof(Register)));
1982}
1983
1984void JIT::emit_op_call_varargs(Instruction* currentInstruction)
1985{
1986 compileOpCallVarargs(instruction: currentInstruction);
1987}
1988
1989void JIT::emit_op_construct(Instruction* currentInstruction)
1990{
1991 compileOpCall(op_construct, instruction: currentInstruction, callLinkInfoIndex: m_callLinkInfoIndex++);
1992}
1993
1994void JIT::emit_op_get_global_var(Instruction* currentInstruction)
1995{
1996 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
1997 move(imm: ImmPtr(globalObject), dest: regT0);
1998 emitGetVariableObjectRegister(variableObject: regT0, index: currentInstruction[3].u.operand, dst: regT0);
1999 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2000}
2001
2002void JIT::emit_op_put_global_var(Instruction* currentInstruction)
2003{
2004 emitGetVirtualRegister(src: currentInstruction[3].u.operand, dst: regT1);
2005 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
2006 move(imm: ImmPtr(globalObject), dest: regT0);
2007 emitPutVariableObjectRegister(src: regT1, variableObject: regT0, index: currentInstruction[2].u.operand);
2008}
2009
2010void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
2011{
2012 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
2013
2014 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ScopeChain, to: regT0);
2015 while (skip--)
2016 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), dest: regT0);
2017
2018 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), dest: regT0);
2019 emitGetVariableObjectRegister(variableObject: regT0, index: currentInstruction[2].u.operand, dst: regT0);
2020 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2021}
2022
2023void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
2024{
2025 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
2026
2027 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ScopeChain, to: regT1);
2028 emitGetVirtualRegister(src: currentInstruction[3].u.operand, dst: regT0);
2029 while (skip--)
2030 loadPtr(address: Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), dest: regT1);
2031
2032 loadPtr(address: Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), dest: regT1);
2033 emitPutVariableObjectRegister(src: regT0, variableObject: regT1, index: currentInstruction[1].u.operand);
2034}
2035
2036void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
2037{
2038 JITStubCall stubCall(this, cti_op_tear_off_activation);
2039 stubCall.addArgument(src: currentInstruction[1].u.operand, scratchRegister: regT2);
2040 stubCall.call();
2041}
2042
2043void JIT::emit_op_tear_off_arguments(Instruction*)
2044{
2045 JITStubCall(this, cti_op_tear_off_arguments).call();
2046}
2047
2048void JIT::emit_op_ret(Instruction* currentInstruction)
2049{
2050#ifdef QT_BUILD_SCRIPT_LIB
2051 JITStubCall stubCall(this, cti_op_debug_return);
2052 stubCall.addArgument(argument: Imm32(currentInstruction[1].u.operand));
2053 stubCall.call();
2054#endif
2055 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
2056 if (m_codeBlock->needsFullScopeChain())
2057 JITStubCall(this, cti_op_ret_scopeChain).call();
2058
2059 ASSERT(callFrameRegister != regT1);
2060 ASSERT(regT1 != returnValueRegister);
2061 ASSERT(returnValueRegister != callFrameRegister);
2062
2063 // Return the result in %eax.
2064 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: returnValueRegister);
2065
2066 // Grab the return address.
2067 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::ReturnPC, to: regT1);
2068
2069 // Restore our caller's "r".
2070 emitGetFromCallFrameHeaderPtr(entry: RegisterFile::CallerFrame, to: callFrameRegister);
2071
2072 // Return.
2073 restoreReturnAddressBeforeReturn(reg: regT1);
2074 ret();
2075}
2076
2077void JIT::emit_op_new_array(Instruction* currentInstruction)
2078{
2079 JITStubCall stubCall(this, cti_op_new_array);
2080 stubCall.addArgument(argument: Imm32(currentInstruction[2].u.operand));
2081 stubCall.addArgument(argument: Imm32(currentInstruction[3].u.operand));
2082 stubCall.call(dst: currentInstruction[1].u.operand);
2083}
2084
2085void JIT::emit_op_resolve(Instruction* currentInstruction)
2086{
2087 JITStubCall stubCall(this, cti_op_resolve);
2088 stubCall.addArgument(argument: ImmPtr(&m_codeBlock->identifier(index: currentInstruction[2].u.operand)));
2089 stubCall.call(dst: currentInstruction[1].u.operand);
2090}
2091
2092void JIT::emit_op_construct_verify(Instruction* currentInstruction)
2093{
2094 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: regT0);
2095
2096 emitJumpSlowCaseIfNotJSCell(reg: regT0);
2097 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2098 addSlowCase(jump: branch32(cond: NotEqual, left: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), right: Imm32(ObjectType)));
2099
2100}
2101
2102void JIT::emit_op_to_primitive(Instruction* currentInstruction)
2103{
2104 int dst = currentInstruction[1].u.operand;
2105 int src = currentInstruction[2].u.operand;
2106
2107 emitGetVirtualRegister(src, dst: regT0);
2108
2109 Jump isImm = emitJumpIfNotJSCell(reg: regT0);
2110 addSlowCase(jump: branchPtr(cond: NotEqual, left: Address(regT0), right: ImmPtr(m_globalData->jsStringVPtr)));
2111 isImm.link(masm: this);
2112
2113 if (dst != src)
2114 emitPutVirtualRegister(dst);
2115
2116}
2117
2118void JIT::emit_op_strcat(Instruction* currentInstruction)
2119{
2120 JITStubCall stubCall(this, cti_op_strcat);
2121 stubCall.addArgument(argument: Imm32(currentInstruction[2].u.operand));
2122 stubCall.addArgument(argument: Imm32(currentInstruction[3].u.operand));
2123 stubCall.call(dst: currentInstruction[1].u.operand);
2124}
2125
2126void JIT::emit_op_resolve_base(Instruction* currentInstruction)
2127{
2128 JITStubCall stubCall(this, cti_op_resolve_base);
2129 stubCall.addArgument(argument: ImmPtr(&m_codeBlock->identifier(index: currentInstruction[2].u.operand)));
2130 stubCall.call(dst: currentInstruction[1].u.operand);
2131}
2132
2133void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
2134{
2135 JITStubCall stubCall(this, cti_op_resolve_skip);
2136 stubCall.addArgument(argument: ImmPtr(&m_codeBlock->identifier(index: currentInstruction[2].u.operand)));
2137 stubCall.addArgument(argument: Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
2138 stubCall.call(dst: currentInstruction[1].u.operand);
2139}
2140
2141void JIT::emit_op_resolve_global(Instruction* currentInstruction)
2142{
2143 // Fast case
2144 void* globalObject = currentInstruction[2].u.jsCell;
2145 Identifier* ident = &m_codeBlock->identifier(index: currentInstruction[3].u.operand);
2146
2147 unsigned currentIndex = m_globalResolveInfoIndex++;
2148 void* structureAddress = &(m_codeBlock->globalResolveInfo(index: currentIndex).structure);
2149 void* offsetAddr = &(m_codeBlock->globalResolveInfo(index: currentIndex).offset);
2150
2151 // Check Structure of global object
2152 move(imm: ImmPtr(globalObject), dest: regT0);
2153 loadPtr(address: structureAddress, dest: regT1);
2154 Jump noMatch = branchPtr(cond: NotEqual, left: regT1, right: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
2155
2156 // Load cached property
2157 // Assume that the global object always uses external storage.
2158 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), dest: regT0);
2159 load32(address: offsetAddr, dest: regT1);
2160 loadPtr(address: BaseIndex(regT0, regT1, ScalePtr), dest: regT0);
2161 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2162 Jump end = jump();
2163
2164 // Slow case
2165 noMatch.link(masm: this);
2166 JITStubCall stubCall(this, cti_op_resolve_global);
2167 stubCall.addArgument(argument: ImmPtr(globalObject));
2168 stubCall.addArgument(argument: ImmPtr(ident));
2169 stubCall.addArgument(argument: Imm32(currentIndex));
2170 stubCall.call(dst: currentInstruction[1].u.operand);
2171 end.link(masm: this);
2172}
2173
2174void JIT::emit_op_not(Instruction* currentInstruction)
2175{
2176 emitGetVirtualRegister(src: currentInstruction[2].u.operand, dst: regT0);
2177 xorPtr(imm: Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), srcDest: regT0);
2178 addSlowCase(jump: branchTestPtr(cond: NonZero, reg: regT0, mask: Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
2179 xorPtr(imm: Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), srcDest: regT0);
2180 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2181}
2182
2183void JIT::emit_op_jfalse(Instruction* currentInstruction)
2184{
2185 unsigned target = currentInstruction[2].u.operand;
2186 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: regT0);
2187
2188 addJump(jump: branchPtr(cond: Equal, left: regT0, right: ImmPtr(JSValue::encode(value: jsNumber(globalData: m_globalData, i: 0)))), relativeOffset: target);
2189 Jump isNonZero = emitJumpIfImmediateInteger(reg: regT0);
2190
2191 addJump(jump: branchPtr(cond: Equal, left: regT0, right: ImmPtr(JSValue::encode(value: jsBoolean(b: false)))), relativeOffset: target);
2192 addSlowCase(jump: branchPtr(cond: NotEqual, left: regT0, right: ImmPtr(JSValue::encode(value: jsBoolean(b: true)))));
2193
2194 isNonZero.link(masm: this);
2195 RECORD_JUMP_TARGET(target);
2196};
2197void JIT::emit_op_jeq_null(Instruction* currentInstruction)
2198{
2199 unsigned src = currentInstruction[1].u.operand;
2200 unsigned target = currentInstruction[2].u.operand;
2201
2202 emitGetVirtualRegister(src, dst: regT0);
2203 Jump isImmediate = emitJumpIfNotJSCell(reg: regT0);
2204
2205 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2206 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2207 addJump(jump: branchTest32(cond: NonZero, address: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(MasqueradesAsUndefined)), relativeOffset: target);
2208 Jump wasNotImmediate = jump();
2209
2210 // Now handle the immediate cases - undefined & null
2211 isImmediate.link(masm: this);
2212 andPtr(imm: Imm32(~JSImmediate::ExtendedTagBitUndefined), srcDest: regT0);
2213 addJump(jump: branchPtr(cond: Equal, left: regT0, right: ImmPtr(JSValue::encode(value: jsNull()))), relativeOffset: target);
2214
2215 wasNotImmediate.link(masm: this);
2216 RECORD_JUMP_TARGET(target);
2217};
2218void JIT::emit_op_jneq_null(Instruction* currentInstruction)
2219{
2220 unsigned src = currentInstruction[1].u.operand;
2221 unsigned target = currentInstruction[2].u.operand;
2222
2223 emitGetVirtualRegister(src, dst: regT0);
2224 Jump isImmediate = emitJumpIfNotJSCell(reg: regT0);
2225
2226 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
2227 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2228 addJump(jump: branchTest32(cond: Zero, address: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(MasqueradesAsUndefined)), relativeOffset: target);
2229 Jump wasNotImmediate = jump();
2230
2231 // Now handle the immediate cases - undefined & null
2232 isImmediate.link(masm: this);
2233 andPtr(imm: Imm32(~JSImmediate::ExtendedTagBitUndefined), srcDest: regT0);
2234 addJump(jump: branchPtr(cond: NotEqual, left: regT0, right: ImmPtr(JSValue::encode(value: jsNull()))), relativeOffset: target);
2235
2236 wasNotImmediate.link(masm: this);
2237 RECORD_JUMP_TARGET(target);
2238}
2239
2240void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
2241{
2242 unsigned src = currentInstruction[1].u.operand;
2243 JSCell* ptr = currentInstruction[2].u.jsCell;
2244 unsigned target = currentInstruction[3].u.operand;
2245
2246 emitGetVirtualRegister(src, dst: regT0);
2247 addJump(jump: branchPtr(cond: NotEqual, left: regT0, right: ImmPtr(JSValue::encode(value: JSValue(ptr)))), relativeOffset: target);
2248
2249 RECORD_JUMP_TARGET(target);
2250}
2251
2252void JIT::emit_op_jsr(Instruction* currentInstruction)
2253{
2254 int retAddrDst = currentInstruction[1].u.operand;
2255 int target = currentInstruction[2].u.operand;
2256 DataLabelPtr storeLocation = storePtrWithPatch(initialValue: ImmPtr(0), address: Address(callFrameRegister, sizeof(Register) * retAddrDst));
2257 addJump(jump: jump(), relativeOffset: target);
2258 m_jsrSites.append(val: JSRInfo(storeLocation, label()));
2259 killLastResultRegister();
2260 RECORD_JUMP_TARGET(target);
2261}
2262
2263void JIT::emit_op_sret(Instruction* currentInstruction)
2264{
2265 jump(address: Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
2266 killLastResultRegister();
2267}
2268
2269void JIT::emit_op_eq(Instruction* currentInstruction)
2270{
2271 emitGetVirtualRegisters(src1: currentInstruction[2].u.operand, dst1: regT0, src2: currentInstruction[3].u.operand, dst2: regT1);
2272 emitJumpSlowCaseIfNotImmediateIntegers(reg1: regT0, reg2: regT1, scratch: regT2);
2273 set32(cond: Equal, left: regT1, right: regT0, dest: regT0);
2274 emitTagAsBoolImmediate(reg: regT0);
2275 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2276}
2277
2278void JIT::emit_op_bitnot(Instruction* currentInstruction)
2279{
2280 emitGetVirtualRegister(src: currentInstruction[2].u.operand, dst: regT0);
2281 emitJumpSlowCaseIfNotImmediateInteger(reg: regT0);
2282#if USE(JSVALUE64)
2283 not32(srcDest: regT0);
2284 emitFastArithIntToImmNoCheck(src: regT0, dest: regT0);
2285#else
2286 xorPtr(Imm32(~JSImmediate::TagTypeNumber), regT0);
2287#endif
2288 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2289}
2290
2291void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
2292{
2293 JITStubCall stubCall(this, cti_op_resolve_with_base);
2294 stubCall.addArgument(argument: ImmPtr(&m_codeBlock->identifier(index: currentInstruction[3].u.operand)));
2295 stubCall.addArgument(argument: Imm32(currentInstruction[1].u.operand));
2296 stubCall.call(dst: currentInstruction[2].u.operand);
2297}
2298
2299void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
2300{
2301 JITStubCall stubCall(this, cti_op_new_func_exp);
2302 stubCall.addArgument(argument: ImmPtr(m_codeBlock->functionExpr(index: currentInstruction[2].u.operand)));
2303 stubCall.call(dst: currentInstruction[1].u.operand);
2304}
2305
2306void JIT::emit_op_jtrue(Instruction* currentInstruction)
2307{
2308 unsigned target = currentInstruction[2].u.operand;
2309 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: regT0);
2310
2311 Jump isZero = branchPtr(cond: Equal, left: regT0, right: ImmPtr(JSValue::encode(value: jsNumber(globalData: m_globalData, i: 0))));
2312 addJump(jump: emitJumpIfImmediateInteger(reg: regT0), relativeOffset: target);
2313
2314 addJump(jump: branchPtr(cond: Equal, left: regT0, right: ImmPtr(JSValue::encode(value: jsBoolean(b: true)))), relativeOffset: target);
2315 addSlowCase(jump: branchPtr(cond: NotEqual, left: regT0, right: ImmPtr(JSValue::encode(value: jsBoolean(b: false)))));
2316
2317 isZero.link(masm: this);
2318 RECORD_JUMP_TARGET(target);
2319}
2320
2321void JIT::emit_op_neq(Instruction* currentInstruction)
2322{
2323 emitGetVirtualRegisters(src1: currentInstruction[2].u.operand, dst1: regT0, src2: currentInstruction[3].u.operand, dst2: regT1);
2324 emitJumpSlowCaseIfNotImmediateIntegers(reg1: regT0, reg2: regT1, scratch: regT2);
2325 set32(cond: NotEqual, left: regT1, right: regT0, dest: regT0);
2326 emitTagAsBoolImmediate(reg: regT0);
2327
2328 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2329
2330}
2331
2332void JIT::emit_op_bitxor(Instruction* currentInstruction)
2333{
2334 emitGetVirtualRegisters(src1: currentInstruction[2].u.operand, dst1: regT0, src2: currentInstruction[3].u.operand, dst2: regT1);
2335 emitJumpSlowCaseIfNotImmediateIntegers(reg1: regT0, reg2: regT1, scratch: regT2);
2336 xorPtr(src: regT1, dest: regT0);
2337 emitFastArithReTagImmediate(src: regT0, dest: regT0);
2338 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2339}
2340
2341void JIT::emit_op_new_regexp(Instruction* currentInstruction)
2342{
2343 JITStubCall stubCall(this, cti_op_new_regexp);
2344 stubCall.addArgument(argument: ImmPtr(m_codeBlock->regexp(index: currentInstruction[2].u.operand)));
2345 stubCall.call(dst: currentInstruction[1].u.operand);
2346}
2347
2348void JIT::emit_op_bitor(Instruction* currentInstruction)
2349{
2350 emitGetVirtualRegisters(src1: currentInstruction[2].u.operand, dst1: regT0, src2: currentInstruction[3].u.operand, dst2: regT1);
2351 emitJumpSlowCaseIfNotImmediateIntegers(reg1: regT0, reg2: regT1, scratch: regT2);
2352 orPtr(src: regT1, dest: regT0);
2353 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2354}
2355
2356void JIT::emit_op_throw(Instruction* currentInstruction)
2357{
2358 JITStubCall stubCall(this, cti_op_throw);
2359 stubCall.addArgument(src: currentInstruction[1].u.operand, scratchRegister: regT2);
2360 stubCall.call();
2361 ASSERT(regT0 == returnValueRegister);
2362#ifndef NDEBUG
2363 // cti_op_throw always changes it's return address,
2364 // this point in the code should never be reached.
2365 breakpoint();
2366#endif
2367}
2368
2369void JIT::emit_op_get_pnames(Instruction* currentInstruction)
2370{
2371 int dst = currentInstruction[1].u.operand;
2372 int base = currentInstruction[2].u.operand;
2373 int i = currentInstruction[3].u.operand;
2374 int size = currentInstruction[4].u.operand;
2375 int breakTarget = currentInstruction[5].u.operand;
2376
2377 JumpList isNotObject;
2378
2379 emitGetVirtualRegister(src: base, dst: regT0);
2380 if (!m_codeBlock->isKnownNotImmediate(index: base))
2381 isNotObject.append(jump: emitJumpIfNotJSCell(reg: regT0));
2382 if (base != m_codeBlock->thisRegister()) {
2383 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2384 isNotObject.append(jump: branch32(cond: NotEqual, left: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), right: Imm32(ObjectType)));
2385 }
2386
2387 // We could inline the case where you have a valid cache, but
2388 // this call doesn't seem to be hot.
2389 Label isObject(this);
2390 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
2391 getPnamesStubCall.addArgument(argument: regT0);
2392 getPnamesStubCall.call(dst);
2393 load32(address: Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), dest: regT3);
2394 store32(imm: Imm32(0), address: addressFor(index: i));
2395 store32(src: regT3, address: addressFor(index: size));
2396 Jump end = jump();
2397
2398 isNotObject.link(masm: this);
2399 move(src: regT0, dest: regT1);
2400 and32(imm: Imm32(~JSImmediate::ExtendedTagBitUndefined), dest: regT1);
2401 addJump(jump: branch32(cond: Equal, left: regT1, right: Imm32(JSImmediate::FullTagTypeNull)), relativeOffset: breakTarget);
2402
2403 JITStubCall toObjectStubCall(this, cti_to_object);
2404 toObjectStubCall.addArgument(argument: regT0);
2405 toObjectStubCall.call(dst: base);
2406 jump().linkTo(label: isObject, masm: this);
2407
2408 end.link(masm: this);
2409}
2410
2411void JIT::emit_op_next_pname(Instruction* currentInstruction)
2412{
2413 int dst = currentInstruction[1].u.operand;
2414 int base = currentInstruction[2].u.operand;
2415 int i = currentInstruction[3].u.operand;
2416 int size = currentInstruction[4].u.operand;
2417 int it = currentInstruction[5].u.operand;
2418 int target = currentInstruction[6].u.operand;
2419
2420 JumpList callHasProperty;
2421
2422 Label begin(this);
2423 load32(address: addressFor(index: i), dest: regT0);
2424 Jump end = branch32(cond: Equal, left: regT0, right: addressFor(index: size));
2425
2426 // Grab key @ i
2427 loadPtr(address: addressFor(index: it), dest: regT1);
2428 loadPtr(address: Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), dest: regT2);
2429
2430#if USE(JSVALUE64)
2431 loadPtr(address: BaseIndex(regT2, regT0, TimesEight), dest: regT2);
2432#else
2433 loadPtr(BaseIndex(regT2, regT0, TimesFour), regT2);
2434#endif
2435
2436 emitPutVirtualRegister(dst, from: regT2);
2437
2438 // Increment i
2439 add32(imm: Imm32(1), dest: regT0);
2440 store32(src: regT0, address: addressFor(index: i));
2441
2442 // Verify that i is valid:
2443 emitGetVirtualRegister(src: base, dst: regT0);
2444
2445 // Test base's structure
2446 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2447 callHasProperty.append(jump: branchPtr(cond: NotEqual, left: regT2, right: Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
2448
2449 // Test base's prototype chain
2450 loadPtr(address: Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), dest: regT3);
2451 loadPtr(address: Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), dest: regT3);
2452 addJump(jump: branchTestPtr(cond: Zero, address: Address(regT3)), relativeOffset: target);
2453
2454 Label checkPrototype(this);
2455 loadPtr(address: Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), dest: regT2);
2456 callHasProperty.append(jump: emitJumpIfNotJSCell(reg: regT2));
2457 loadPtr(address: Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2458 callHasProperty.append(jump: branchPtr(cond: NotEqual, left: regT2, right: Address(regT3)));
2459 addPtr(imm: Imm32(sizeof(Structure*)), srcDest: regT3);
2460 branchTestPtr(cond: NonZero, address: Address(regT3)).linkTo(label: checkPrototype, masm: this);
2461
2462 // Continue loop.
2463 addJump(jump: jump(), relativeOffset: target);
2464
2465 // Slow case: Ask the object if i is valid.
2466 callHasProperty.link(masm: this);
2467 emitGetVirtualRegister(src: dst, dst: regT1);
2468 JITStubCall stubCall(this, cti_has_property);
2469 stubCall.addArgument(argument: regT0);
2470 stubCall.addArgument(argument: regT1);
2471 stubCall.call();
2472
2473 // Test for valid key.
2474 addJump(jump: branchTest32(cond: NonZero, reg: regT0), relativeOffset: target);
2475 jump().linkTo(label: begin, masm: this);
2476
2477 // End of loop.
2478 end.link(masm: this);
2479}
2480
2481void JIT::emit_op_push_scope(Instruction* currentInstruction)
2482{
2483 JITStubCall stubCall(this, cti_op_push_scope);
2484 stubCall.addArgument(src: currentInstruction[1].u.operand, scratchRegister: regT2);
2485 stubCall.call(dst: currentInstruction[1].u.operand);
2486}
2487
2488void JIT::emit_op_pop_scope(Instruction*)
2489{
2490 JITStubCall(this, cti_op_pop_scope).call();
2491}
2492
2493void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
2494{
2495 unsigned dst = currentInstruction[1].u.operand;
2496 unsigned src1 = currentInstruction[2].u.operand;
2497 unsigned src2 = currentInstruction[3].u.operand;
2498
2499 emitGetVirtualRegisters(src1, dst1: regT0, src2, dst2: regT1);
2500
2501 // Jump to a slow case if either operand is a number, or if both are JSCell*s.
2502 move(src: regT0, dest: regT2);
2503 orPtr(src: regT1, dest: regT2);
2504 addSlowCase(jump: emitJumpIfJSCell(reg: regT2));
2505 addSlowCase(jump: emitJumpIfImmediateNumber(reg: regT2));
2506
2507 if (type == OpStrictEq)
2508 set32(cond: Equal, left: regT1, right: regT0, dest: regT0);
2509 else
2510 set32(cond: NotEqual, left: regT1, right: regT0, dest: regT0);
2511 emitTagAsBoolImmediate(reg: regT0);
2512
2513 emitPutVirtualRegister(dst);
2514}
2515
2516void JIT::emit_op_stricteq(Instruction* currentInstruction)
2517{
2518 compileOpStrictEq(currentInstruction, type: OpStrictEq);
2519}
2520
2521void JIT::emit_op_nstricteq(Instruction* currentInstruction)
2522{
2523 compileOpStrictEq(currentInstruction, type: OpNStrictEq);
2524}
2525
2526void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
2527{
2528 int srcVReg = currentInstruction[2].u.operand;
2529 emitGetVirtualRegister(src: srcVReg, dst: regT0);
2530
2531 Jump wasImmediate = emitJumpIfImmediateInteger(reg: regT0);
2532
2533 emitJumpSlowCaseIfNotJSCell(reg: regT0, vReg: srcVReg);
2534 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2535 addSlowCase(jump: branch32(cond: NotEqual, left: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), right: Imm32(NumberType)));
2536
2537 wasImmediate.link(masm: this);
2538
2539 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2540}
2541
2542void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
2543{
2544 JITStubCall stubCall(this, cti_op_push_new_scope);
2545 stubCall.addArgument(argument: ImmPtr(&m_codeBlock->identifier(index: currentInstruction[2].u.operand)));
2546 stubCall.addArgument(src: currentInstruction[3].u.operand, scratchRegister: regT2);
2547 stubCall.call(dst: currentInstruction[1].u.operand);
2548}
2549
2550void JIT::emit_op_catch(Instruction* currentInstruction)
2551{
2552 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
2553 peek(dest: callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
2554 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2555#ifdef QT_BUILD_SCRIPT_LIB
2556 JITStubCall stubCall(this, cti_op_debug_catch);
2557 stubCall.addArgument(argument: Imm32(currentInstruction[1].u.operand));
2558 stubCall.call();
2559#endif
2560}
2561
2562void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
2563{
2564 JITStubCall stubCall(this, cti_op_jmp_scopes);
2565 stubCall.addArgument(argument: Imm32(currentInstruction[1].u.operand));
2566 stubCall.call();
2567 addJump(jump: jump(), relativeOffset: currentInstruction[2].u.operand);
2568 RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
2569}
2570
2571void JIT::emit_op_switch_imm(Instruction* currentInstruction)
2572{
2573 unsigned tableIndex = currentInstruction[1].u.operand;
2574 unsigned defaultOffset = currentInstruction[2].u.operand;
2575 unsigned scrutinee = currentInstruction[3].u.operand;
2576
2577 // create jump table for switch destinations, track this switch statement.
2578 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
2579 m_switches.append(val: SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
2580 jumpTable->ctiOffsets.grow(size: jumpTable->branchOffsets.size());
2581
2582 JITStubCall stubCall(this, cti_op_switch_imm);
2583 stubCall.addArgument(src: scrutinee, scratchRegister: regT2);
2584 stubCall.addArgument(argument: Imm32(tableIndex));
2585 stubCall.call();
2586 jump(target: regT0);
2587}
2588
2589void JIT::emit_op_switch_char(Instruction* currentInstruction)
2590{
2591 unsigned tableIndex = currentInstruction[1].u.operand;
2592 unsigned defaultOffset = currentInstruction[2].u.operand;
2593 unsigned scrutinee = currentInstruction[3].u.operand;
2594
2595 // create jump table for switch destinations, track this switch statement.
2596 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
2597 m_switches.append(val: SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
2598 jumpTable->ctiOffsets.grow(size: jumpTable->branchOffsets.size());
2599
2600 JITStubCall stubCall(this, cti_op_switch_char);
2601 stubCall.addArgument(src: scrutinee, scratchRegister: regT2);
2602 stubCall.addArgument(argument: Imm32(tableIndex));
2603 stubCall.call();
2604 jump(target: regT0);
2605}
2606
2607void JIT::emit_op_switch_string(Instruction* currentInstruction)
2608{
2609 unsigned tableIndex = currentInstruction[1].u.operand;
2610 unsigned defaultOffset = currentInstruction[2].u.operand;
2611 unsigned scrutinee = currentInstruction[3].u.operand;
2612
2613 // create jump table for switch destinations, track this switch statement.
2614 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
2615 m_switches.append(val: SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
2616
2617 JITStubCall stubCall(this, cti_op_switch_string);
2618 stubCall.addArgument(src: scrutinee, scratchRegister: regT2);
2619 stubCall.addArgument(argument: Imm32(tableIndex));
2620 stubCall.call();
2621 jump(target: regT0);
2622}
2623
2624void JIT::emit_op_new_error(Instruction* currentInstruction)
2625{
2626 JITStubCall stubCall(this, cti_op_new_error);
2627 stubCall.addArgument(argument: Imm32(currentInstruction[2].u.operand));
2628 stubCall.addArgument(argument: ImmPtr(JSValue::encode(value: m_codeBlock->getConstant(index: currentInstruction[3].u.operand))));
2629 stubCall.addArgument(argument: Imm32(m_bytecodeIndex));
2630 stubCall.call(dst: currentInstruction[1].u.operand);
2631}
2632
2633void JIT::emit_op_debug(Instruction* currentInstruction)
2634{
2635 JITStubCall stubCall(this, cti_op_debug);
2636 stubCall.addArgument(argument: Imm32(currentInstruction[1].u.operand));
2637 stubCall.addArgument(argument: Imm32(currentInstruction[2].u.operand));
2638 stubCall.addArgument(argument: Imm32(currentInstruction[3].u.operand));
2639 stubCall.call();
2640}
2641
2642void JIT::emit_op_eq_null(Instruction* currentInstruction)
2643{
2644 unsigned dst = currentInstruction[1].u.operand;
2645 unsigned src1 = currentInstruction[2].u.operand;
2646
2647 emitGetVirtualRegister(src: src1, dst: regT0);
2648 Jump isImmediate = emitJumpIfNotJSCell(reg: regT0);
2649
2650 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2651 setTest32(cond: NonZero, address: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(MasqueradesAsUndefined), dest: regT0);
2652
2653 Jump wasNotImmediate = jump();
2654
2655 isImmediate.link(masm: this);
2656
2657 andPtr(imm: Imm32(~JSImmediate::ExtendedTagBitUndefined), srcDest: regT0);
2658 setPtr(cond: Equal, left: regT0, right: Imm32(JSImmediate::FullTagTypeNull), dest: regT0);
2659
2660 wasNotImmediate.link(masm: this);
2661
2662 emitTagAsBoolImmediate(reg: regT0);
2663 emitPutVirtualRegister(dst);
2664
2665}
2666
2667void JIT::emit_op_neq_null(Instruction* currentInstruction)
2668{
2669 unsigned dst = currentInstruction[1].u.operand;
2670 unsigned src1 = currentInstruction[2].u.operand;
2671
2672 emitGetVirtualRegister(src: src1, dst: regT0);
2673 Jump isImmediate = emitJumpIfNotJSCell(reg: regT0);
2674
2675 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT2);
2676 setTest32(cond: Zero, address: Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(MasqueradesAsUndefined), dest: regT0);
2677
2678 Jump wasNotImmediate = jump();
2679
2680 isImmediate.link(masm: this);
2681
2682 andPtr(imm: Imm32(~JSImmediate::ExtendedTagBitUndefined), srcDest: regT0);
2683 setPtr(cond: NotEqual, left: regT0, right: Imm32(JSImmediate::FullTagTypeNull), dest: regT0);
2684
2685 wasNotImmediate.link(masm: this);
2686
2687 emitTagAsBoolImmediate(reg: regT0);
2688 emitPutVirtualRegister(dst);
2689
2690}
2691
2692void JIT::emit_op_enter(Instruction*)
2693{
2694 // Even though CTI doesn't use them, we initialize our constant
2695 // registers to zap stale pointers, to avoid unnecessarily prolonging
2696 // object lifetime and increasing GC pressure.
2697 size_t count = m_codeBlock->m_numVars;
2698 for (size_t j = 0; j < count; ++j)
2699 emitInitRegister(dst: j);
2700
2701}
2702
2703void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
2704{
2705 // Even though CTI doesn't use them, we initialize our constant
2706 // registers to zap stale pointers, to avoid unnecessarily prolonging
2707 // object lifetime and increasing GC pressure.
2708 size_t count = m_codeBlock->m_numVars;
2709 for (size_t j = 0; j < count; ++j)
2710 emitInitRegister(dst: j);
2711
2712 JITStubCall(this, cti_op_push_activation).call(dst: currentInstruction[1].u.operand);
2713}
2714
2715void JIT::emit_op_create_arguments(Instruction*)
2716{
2717 Jump argsCreated = branchTestPtr(cond: NonZero, address: Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2718 if (m_codeBlock->m_numParameters == 1)
2719 JITStubCall(this, cti_op_create_arguments_no_params).call();
2720 else
2721 JITStubCall(this, cti_op_create_arguments).call();
2722 argsCreated.link(masm: this);
2723}
2724
2725void JIT::emit_op_init_arguments(Instruction*)
2726{
2727 storePtr(imm: ImmPtr(0), address: Address(callFrameRegister, sizeof(Register) * RegisterFile::ArgumentsRegister));
2728}
2729
2730void JIT::emit_op_convert_this(Instruction* currentInstruction)
2731{
2732 emitGetVirtualRegister(src: currentInstruction[1].u.operand, dst: regT0);
2733
2734 emitJumpSlowCaseIfNotJSCell(reg: regT0);
2735 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), dest: regT1);
2736 addSlowCase(jump: branchTest32(cond: NonZero, address: Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), mask: Imm32(NeedsThisConversion)));
2737
2738}
2739
2740void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
2741{
2742 peek(dest: regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2743 Jump noProfiler = branchTestPtr(cond: Zero, address: Address(regT1));
2744
2745 JITStubCall stubCall(this, cti_op_profile_will_call);
2746 stubCall.addArgument(src: currentInstruction[1].u.operand, scratchRegister: regT1);
2747 stubCall.call();
2748 noProfiler.link(masm: this);
2749
2750}
2751
2752void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
2753{
2754 peek(dest: regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
2755 Jump noProfiler = branchTestPtr(cond: Zero, address: Address(regT1));
2756
2757 JITStubCall stubCall(this, cti_op_profile_did_call);
2758 stubCall.addArgument(src: currentInstruction[1].u.operand, scratchRegister: regT1);
2759 stubCall.call();
2760 noProfiler.link(masm: this);
2761}
2762
2763
2764// Slow cases
2765
2766void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2767{
2768 linkSlowCase(iter);
2769 linkSlowCase(iter);
2770 JITStubCall stubCall(this, cti_op_convert_this);
2771 stubCall.addArgument(argument: regT0);
2772 stubCall.call(dst: currentInstruction[1].u.operand);
2773}
2774
2775void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2776{
2777 linkSlowCase(iter);
2778 linkSlowCase(iter);
2779 emitGetVirtualRegister(src: currentInstruction[2].u.operand, dst: regT0);
2780 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2781}
2782
2783void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2784{
2785 linkSlowCase(iter);
2786
2787 JITStubCall stubCall(this, cti_op_to_primitive);
2788 stubCall.addArgument(argument: regT0);
2789 stubCall.call(dst: currentInstruction[1].u.operand);
2790}
2791
2792void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2793{
2794 unsigned dst = currentInstruction[1].u.operand;
2795 unsigned base = currentInstruction[2].u.operand;
2796 unsigned property = currentInstruction[3].u.operand;
2797
2798 linkSlowCase(iter); // property int32 check
2799 linkSlowCaseIfNotJSCell(iter, vReg: base); // base cell check
2800 linkSlowCase(iter); // base array check
2801 linkSlowCase(iter); // vector length check
2802 linkSlowCase(iter); // empty value
2803
2804 JITStubCall stubCall(this, cti_op_get_by_val);
2805 stubCall.addArgument(src: base, scratchRegister: regT2);
2806 stubCall.addArgument(src: property, scratchRegister: regT2);
2807 stubCall.call(dst);
2808}
2809
2810void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2811{
2812 unsigned op2 = currentInstruction[2].u.operand;
2813 unsigned target = currentInstruction[3].u.operand;
2814 if (isOperandConstantImmediateInt(src: op2)) {
2815 linkSlowCase(iter);
2816 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2817 stubCall.addArgument(argument: regT0);
2818 stubCall.addArgument(src: currentInstruction[2].u.operand, scratchRegister: regT2);
2819 stubCall.call();
2820 emitJumpSlowToHot(jump: branchTest32(cond: NonZero, reg: regT0), relativeOffset: target);
2821 } else {
2822 linkSlowCase(iter);
2823 linkSlowCase(iter);
2824 JITStubCall stubCall(this, cti_op_loop_if_lesseq);
2825 stubCall.addArgument(argument: regT0);
2826 stubCall.addArgument(argument: regT1);
2827 stubCall.call();
2828 emitJumpSlowToHot(jump: branchTest32(cond: NonZero, reg: regT0), relativeOffset: target);
2829 }
2830}
2831
2832void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2833{
2834 unsigned base = currentInstruction[1].u.operand;
2835 unsigned property = currentInstruction[2].u.operand;
2836 unsigned value = currentInstruction[3].u.operand;
2837
2838 linkSlowCase(iter); // property int32 check
2839 linkSlowCaseIfNotJSCell(iter, vReg: base); // base cell check
2840 linkSlowCase(iter); // base not array check
2841 linkSlowCase(iter); // in vector check
2842
2843 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
2844 stubPutByValCall.addArgument(argument: regT0);
2845 stubPutByValCall.addArgument(src: property, scratchRegister: regT2);
2846 stubPutByValCall.addArgument(src: value, scratchRegister: regT2);
2847 stubPutByValCall.call();
2848}
2849
2850void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2851{
2852 linkSlowCase(iter);
2853 xorPtr(imm: Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), srcDest: regT0);
2854 JITStubCall stubCall(this, cti_op_not);
2855 stubCall.addArgument(argument: regT0);
2856 stubCall.call(dst: currentInstruction[1].u.operand);
2857}
2858
2859void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2860{
2861 linkSlowCase(iter);
2862 JITStubCall stubCall(this, cti_op_jtrue);
2863 stubCall.addArgument(argument: regT0);
2864 stubCall.call();
2865 emitJumpSlowToHot(jump: branchTest32(cond: Zero, reg: regT0), relativeOffset: currentInstruction[2].u.operand); // inverted!
2866}
2867
2868void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2869{
2870 linkSlowCase(iter);
2871 JITStubCall stubCall(this, cti_op_bitnot);
2872 stubCall.addArgument(argument: regT0);
2873 stubCall.call(dst: currentInstruction[1].u.operand);
2874}
2875
2876void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2877{
2878 linkSlowCase(iter);
2879 JITStubCall stubCall(this, cti_op_jtrue);
2880 stubCall.addArgument(argument: regT0);
2881 stubCall.call();
2882 emitJumpSlowToHot(jump: branchTest32(cond: NonZero, reg: regT0), relativeOffset: currentInstruction[2].u.operand);
2883}
2884
2885void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2886{
2887 linkSlowCase(iter);
2888 JITStubCall stubCall(this, cti_op_bitxor);
2889 stubCall.addArgument(argument: regT0);
2890 stubCall.addArgument(argument: regT1);
2891 stubCall.call(dst: currentInstruction[1].u.operand);
2892}
2893
2894void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2895{
2896 linkSlowCase(iter);
2897 JITStubCall stubCall(this, cti_op_bitor);
2898 stubCall.addArgument(argument: regT0);
2899 stubCall.addArgument(argument: regT1);
2900 stubCall.call(dst: currentInstruction[1].u.operand);
2901}
2902
2903void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2904{
2905 linkSlowCase(iter);
2906 JITStubCall stubCall(this, cti_op_eq);
2907 stubCall.addArgument(argument: regT0);
2908 stubCall.addArgument(argument: regT1);
2909 stubCall.call();
2910 emitTagAsBoolImmediate(reg: regT0);
2911 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2912}
2913
2914void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2915{
2916 linkSlowCase(iter);
2917 JITStubCall stubCall(this, cti_op_eq);
2918 stubCall.addArgument(argument: regT0);
2919 stubCall.addArgument(argument: regT1);
2920 stubCall.call();
2921 xor32(imm: Imm32(0x1), dest: regT0);
2922 emitTagAsBoolImmediate(reg: regT0);
2923 emitPutVirtualRegister(dst: currentInstruction[1].u.operand);
2924}
2925
2926void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2927{
2928 linkSlowCase(iter);
2929 linkSlowCase(iter);
2930 JITStubCall stubCall(this, cti_op_stricteq);
2931 stubCall.addArgument(argument: regT0);
2932 stubCall.addArgument(argument: regT1);
2933 stubCall.call(dst: currentInstruction[1].u.operand);
2934}
2935
2936void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2937{
2938 linkSlowCase(iter);
2939 linkSlowCase(iter);
2940 JITStubCall stubCall(this, cti_op_nstricteq);
2941 stubCall.addArgument(argument: regT0);
2942 stubCall.addArgument(argument: regT1);
2943 stubCall.call(dst: currentInstruction[1].u.operand);
2944}
2945
2946void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2947{
2948 unsigned dst = currentInstruction[1].u.operand;
2949 unsigned value = currentInstruction[2].u.operand;
2950 unsigned baseVal = currentInstruction[3].u.operand;
2951 unsigned proto = currentInstruction[4].u.operand;
2952
2953 linkSlowCaseIfNotJSCell(iter, vReg: value);
2954 linkSlowCaseIfNotJSCell(iter, vReg: baseVal);
2955 linkSlowCaseIfNotJSCell(iter, vReg: proto);
2956 linkSlowCase(iter);
2957 JITStubCall stubCall(this, cti_op_instanceof);
2958 stubCall.addArgument(src: value, scratchRegister: regT2);
2959 stubCall.addArgument(src: baseVal, scratchRegister: regT2);
2960 stubCall.addArgument(src: proto, scratchRegister: regT2);
2961 stubCall.call(dst);
2962}
2963
2964void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2965{
2966 compileOpCallSlowCase(instruction: currentInstruction, iter, callLinkInfoIndex: m_callLinkInfoIndex++, opcodeID: op_call);
2967}
2968
2969void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2970{
2971 compileOpCallSlowCase(instruction: currentInstruction, iter, callLinkInfoIndex: m_callLinkInfoIndex++, opcodeID: op_call_eval);
2972}
2973
2974void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2975{
2976 compileOpCallVarargsSlowCase(instruction: currentInstruction, iter);
2977}
2978
2979void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2980{
2981 compileOpCallSlowCase(instruction: currentInstruction, iter, callLinkInfoIndex: m_callLinkInfoIndex++, opcodeID: op_construct);
2982}
2983
2984void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
2985{
2986 linkSlowCaseIfNotJSCell(iter, vReg: currentInstruction[2].u.operand);
2987 linkSlowCase(iter);
2988
2989 JITStubCall stubCall(this, cti_op_to_jsnumber);
2990 stubCall.addArgument(argument: regT0);
2991 stubCall.call(dst: currentInstruction[1].u.operand);
2992}
2993
2994#endif // USE(JSVALUE32_64)
2995
2996} // namespace JSC
2997
2998#endif // ENABLE(JIT)
2999

source code of qtscript/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITOpcodes.cpp