1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JIT.h"
28
29#if ENABLE(JIT)
30
31#include "CodeBlock.h"
32#include "JITInlineMethods.h"
33#include "JITStubCall.h"
34#include "JSArray.h"
35#include "JSFunction.h"
36#include "Interpreter.h"
37#include "ResultType.h"
38#include "SamplingTool.h"
39
40#ifndef NDEBUG
41#include <stdio.h>
42#endif
43
44using namespace std;
45
46namespace JSC {
47
48#if USE(JSVALUE32_64)
49
50void JIT::compileOpCallInitializeCallFrame()
51{
52 // regT0 holds callee, regT1 holds argCount
53 store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
54
55 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // scopeChain
56
57 emitStore(static_cast<unsigned>(RegisterFile::OptionalCalleeArguments), JSValue());
58 storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register)))); // callee
59 storePtr(regT1, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)))); // scopeChain
60}
61
62void JIT::compileOpCallSetupArgs(Instruction* instruction)
63{
64 int argCount = instruction[3].u.operand;
65 int registerOffset = instruction[4].u.operand;
66
67 emitPutJITStubArg(regT1, regT0, 0);
68 emitPutJITStubArgConstant(registerOffset, 1);
69 emitPutJITStubArgConstant(argCount, 2);
70}
71
72void JIT::compileOpConstructSetupArgs(Instruction* instruction)
73{
74 int argCount = instruction[3].u.operand;
75 int registerOffset = instruction[4].u.operand;
76 int proto = instruction[5].u.operand;
77 int thisRegister = instruction[6].u.operand;
78
79 emitPutJITStubArg(regT1, regT0, 0);
80 emitPutJITStubArgConstant(registerOffset, 1);
81 emitPutJITStubArgConstant(argCount, 2);
82 emitPutJITStubArgFromVirtualRegister(proto, 3, regT2, regT3);
83 emitPutJITStubArgConstant(thisRegister, 4);
84}
85
86void JIT::compileOpCallVarargsSetupArgs(Instruction*)
87{
88 emitPutJITStubArg(regT1, regT0, 0);
89 emitPutJITStubArg(regT3, 1); // registerOffset
90 emitPutJITStubArg(regT2, 2); // argCount
91}
92
93void JIT::compileOpCallVarargs(Instruction* instruction)
94{
95 int dst = instruction[1].u.operand;
96 int callee = instruction[2].u.operand;
97 int argCountRegister = instruction[3].u.operand;
98 int registerOffset = instruction[4].u.operand;
99
100 emitLoad(callee, regT1, regT0);
101 emitLoadPayload(argCountRegister, regT2); // argCount
102 addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
103
104 compileOpCallVarargsSetupArgs(instruction);
105
106 emitJumpSlowCaseIfNotJSCell(callee, regT1);
107 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
108
109 // Speculatively roll the callframe, assuming argCount will match the arity.
110 mul32(Imm32(sizeof(Register)), regT3, regT3);
111 addPtr(callFrameRegister, regT3);
112 storePtr(callFrameRegister, Address(regT3, RegisterFile::CallerFrame * static_cast<int>(sizeof(Register))));
113 move(regT3, callFrameRegister);
114
115 move(regT2, regT1); // argCount
116
117 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
118
119 emitStore(dst, regT1, regT0);
120
121 sampleCodeBlock(m_codeBlock);
122}
123
124void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
125{
126 int dst = instruction[1].u.operand;
127 int callee = instruction[2].u.operand;
128
129 linkSlowCaseIfNotJSCell(iter, callee);
130 linkSlowCase(iter);
131
132 JITStubCall stubCall(this, cti_op_call_NotJSFunction);
133 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
134
135 map(m_bytecodeIndex + OPCODE_LENGTH(op_call_varargs), dst, regT1, regT0);
136 sampleCodeBlock(m_codeBlock);
137}
138
139void JIT::emit_op_ret(Instruction* currentInstruction)
140{
141 unsigned dst = currentInstruction[1].u.operand;
142
143#ifdef QT_BUILD_SCRIPT_LIB
144 JITStubCall stubCall(this, cti_op_debug_return);
145 stubCall.addArgument(Imm32(dst));
146 stubCall.call();
147#endif
148
149 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
150 if (m_codeBlock->needsFullScopeChain())
151 JITStubCall(this, cti_op_ret_scopeChain).call();
152
153 emitLoad(dst, regT1, regT0);
154 emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
155 emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
156
157 restoreReturnAddressBeforeReturn(regT2);
158 ret();
159}
160
161void JIT::emit_op_construct_verify(Instruction* currentInstruction)
162{
163 unsigned dst = currentInstruction[1].u.operand;
164
165 emitLoad(dst, regT1, regT0);
166 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
167 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
168 addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
169}
170
171void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
172{
173 unsigned dst = currentInstruction[1].u.operand;
174 unsigned src = currentInstruction[2].u.operand;
175
176 linkSlowCase(iter);
177 linkSlowCase(iter);
178 emitLoad(src, regT1, regT0);
179 emitStore(dst, regT1, regT0);
180}
181
182void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
183{
184 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
185}
186
187void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
188{
189 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
190}
191
192void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
193{
194 compileOpCallVarargsSlowCase(currentInstruction, iter);
195}
196
197void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
198{
199 compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
200}
201
202void JIT::emit_op_call(Instruction* currentInstruction)
203{
204 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
205}
206
207void JIT::emit_op_call_eval(Instruction* currentInstruction)
208{
209 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
210}
211
212void JIT::emit_op_load_varargs(Instruction* currentInstruction)
213{
214 int argCountDst = currentInstruction[1].u.operand;
215 int argsOffset = currentInstruction[2].u.operand;
216
217 JITStubCall stubCall(this, cti_op_load_varargs);
218 stubCall.addArgument(Imm32(argsOffset));
219 stubCall.call();
220 // Stores a naked int32 in the register file.
221 store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
222}
223
224void JIT::emit_op_call_varargs(Instruction* currentInstruction)
225{
226 compileOpCallVarargs(currentInstruction);
227}
228
229void JIT::emit_op_construct(Instruction* currentInstruction)
230{
231 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
232}
233
234#if !ENABLE(JIT_OPTIMIZE_CALL)
235
236/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
237
238void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
239{
240 int dst = instruction[1].u.operand;
241 int callee = instruction[2].u.operand;
242 int argCount = instruction[3].u.operand;
243 int registerOffset = instruction[4].u.operand;
244
245 Jump wasEval;
246 if (opcodeID == op_call_eval) {
247 JITStubCall stubCall(this, cti_op_call_eval);
248 stubCall.addArgument(callee);
249 stubCall.addArgument(JIT::Imm32(registerOffset));
250 stubCall.addArgument(JIT::Imm32(argCount));
251 stubCall.call();
252 wasEval = branch32(NotEqual, regT1, Imm32(JSValue::EmptyValueTag));
253 }
254
255 emitLoad(callee, regT1, regT0);
256
257 if (opcodeID == op_call)
258 compileOpCallSetupArgs(instruction);
259 else if (opcodeID == op_construct)
260 compileOpConstructSetupArgs(instruction);
261
262 emitJumpSlowCaseIfNotJSCell(callee, regT1);
263 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
264
265 // First, in the case of a construct, allocate the new object.
266 if (opcodeID == op_construct) {
267 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
268 emitLoad(callee, regT1, regT0);
269 }
270
271 // Speculatively roll the callframe, assuming argCount will match the arity.
272 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
273 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
274 move(Imm32(argCount), regT1);
275
276 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
277
278 if (opcodeID == op_call_eval)
279 wasEval.link(this);
280
281 emitStore(dst, regT1, regT0);
282
283 sampleCodeBlock(m_codeBlock);
284}
285
286void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
287{
288 int dst = instruction[1].u.operand;
289 int callee = instruction[2].u.operand;
290
291 linkSlowCaseIfNotJSCell(iter, callee);
292 linkSlowCase(iter);
293
294 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
295 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
296
297 sampleCodeBlock(m_codeBlock);
298}
299
300#else // !ENABLE(JIT_OPTIMIZE_CALL)
301
302/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
303
304void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
305{
306 int dst = instruction[1].u.operand;
307 int callee = instruction[2].u.operand;
308 int argCount = instruction[3].u.operand;
309 int registerOffset = instruction[4].u.operand;
310
311 Jump wasEval;
312 if (opcodeID == op_call_eval) {
313 JITStubCall stubCall(this, cti_op_call_eval);
314 stubCall.addArgument(callee);
315 stubCall.addArgument(JIT::Imm32(registerOffset));
316 stubCall.addArgument(JIT::Imm32(argCount));
317 stubCall.call();
318 wasEval = branch32(NotEqual, regT1, Imm32(JSValue::EmptyValueTag));
319 }
320
321 emitLoad(callee, regT1, regT0);
322
323 DataLabelPtr addressOfLinkedFunctionCheck;
324
325 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
326
327 Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(0));
328
329 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
330
331 addSlowCase(jumpToSlow);
332 ASSERT(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow) == patchOffsetOpCallCompareToJump);
333 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
334
335 addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
336
337 // The following is the fast case, only used whan a callee can be linked.
338
339 // In the case of OpConstruct, call out to a cti_ function to create the new object.
340 if (opcodeID == op_construct) {
341 int proto = instruction[5].u.operand;
342 int thisRegister = instruction[6].u.operand;
343
344 JITStubCall stubCall(this, cti_op_construct_JSConstruct);
345 stubCall.addArgument(regT1, regT0);
346 stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
347 stubCall.addArgument(Imm32(0)); // FIXME: Remove this unused JITStub argument.
348 stubCall.addArgument(proto);
349 stubCall.call(thisRegister);
350
351 emitLoad(callee, regT1, regT0);
352 }
353
354 // Fast version of stack frame initialization, directly relative to edi.
355 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
356 emitStore(registerOffset + RegisterFile::OptionalCalleeArguments, JSValue());
357 emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
358
359 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), regT1); // newScopeChain
360 store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
361 storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
362 storePtr(regT1, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
363 addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
364
365 // Call to the callee
366 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
367
368 if (opcodeID == op_call_eval)
369 wasEval.link(this);
370
371 // Put the return value in dst. In the interpreter, op_ret does this.
372 emitStore(dst, regT1, regT0);
373 map(m_bytecodeIndex + opcodeLengths[opcodeID], dst, regT1, regT0);
374
375 sampleCodeBlock(m_codeBlock);
376}
377
378void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
379{
380 int dst = instruction[1].u.operand;
381 int callee = instruction[2].u.operand;
382 int argCount = instruction[3].u.operand;
383 int registerOffset = instruction[4].u.operand;
384
385 linkSlowCase(iter);
386 linkSlowCase(iter);
387
388 // The arguments have been set up on the hot path for op_call_eval
389 if (opcodeID == op_call)
390 compileOpCallSetupArgs(instruction);
391 else if (opcodeID == op_construct)
392 compileOpConstructSetupArgs(instruction);
393
394 // Fast check for JS function.
395 Jump callLinkFailNotObject = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
396 Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr));
397
398 // First, in the case of a construct, allocate the new object.
399 if (opcodeID == op_construct) {
400 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
401 emitLoad(callee, regT1, regT0);
402 }
403
404 // Speculatively roll the callframe, assuming argCount will match the arity.
405 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
406 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
407 move(Imm32(argCount), regT1);
408
409 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallLink());
410
411 // Put the return value in dst.
412 emitStore(dst, regT1, regT0);;
413 sampleCodeBlock(m_codeBlock);
414
415 // If not, we need an extra case in the if below!
416 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
417
418 // Done! - return back to the hot path.
419 if (opcodeID == op_construct)
420 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_construct));
421 else
422 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
423
424 // This handles host functions
425 callLinkFailNotObject.link(this);
426 callLinkFailNotJSFunction.link(this);
427 JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
428
429 emitStore(dst, regT1, regT0);;
430 sampleCodeBlock(m_codeBlock);
431}
432
433/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
434
435#endif // !ENABLE(JIT_OPTIMIZE_CALL)
436
437#else // USE(JSVALUE32_64)
438
439void JIT::compileOpCallInitializeCallFrame()
440{
441 store32(src: regT1, address: Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
442
443 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), dest: regT1); // newScopeChain
444
445 storePtr(imm: ImmPtr(JSValue::encode(value: JSValue())), address: Address(callFrameRegister, RegisterFile::OptionalCalleeArguments * static_cast<int>(sizeof(Register))));
446 storePtr(src: regT0, address: Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register))));
447 storePtr(src: regT1, address: Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register))));
448}
449
450void JIT::compileOpCallSetupArgs(Instruction* instruction)
451{
452 int argCount = instruction[3].u.operand;
453 int registerOffset = instruction[4].u.operand;
454
455 // ecx holds func
456 emitPutJITStubArg(src: regT0, argumentNumber: 0);
457 emitPutJITStubArgConstant(value: argCount, argumentNumber: 2);
458 emitPutJITStubArgConstant(value: registerOffset, argumentNumber: 1);
459}
460
461void JIT::compileOpCallVarargsSetupArgs(Instruction* instruction)
462{
463 int registerOffset = instruction[4].u.operand;
464
465 // ecx holds func
466 emitPutJITStubArg(src: regT0, argumentNumber: 0);
467 emitPutJITStubArg(src: regT1, argumentNumber: 2);
468 addPtr(imm: Imm32(registerOffset), src: regT1, dest: regT2);
469 emitPutJITStubArg(src: regT2, argumentNumber: 1);
470}
471
472void JIT::compileOpConstructSetupArgs(Instruction* instruction)
473{
474 int argCount = instruction[3].u.operand;
475 int registerOffset = instruction[4].u.operand;
476 int proto = instruction[5].u.operand;
477 int thisRegister = instruction[6].u.operand;
478
479 // ecx holds func
480 emitPutJITStubArg(src: regT0, argumentNumber: 0);
481 emitPutJITStubArgConstant(value: registerOffset, argumentNumber: 1);
482 emitPutJITStubArgConstant(value: argCount, argumentNumber: 2);
483 emitPutJITStubArgFromVirtualRegister(src: proto, argumentNumber: 3, scratch: regT2);
484 emitPutJITStubArgConstant(value: thisRegister, argumentNumber: 4);
485}
486
487void JIT::compileOpCallVarargs(Instruction* instruction)
488{
489 int dst = instruction[1].u.operand;
490 int callee = instruction[2].u.operand;
491 int argCountRegister = instruction[3].u.operand;
492
493 emitGetVirtualRegister(src: argCountRegister, dst: regT1);
494 emitGetVirtualRegister(src: callee, dst: regT0);
495 compileOpCallVarargsSetupArgs(instruction);
496
497 // Check for JSFunctions.
498 emitJumpSlowCaseIfNotJSCell(reg: regT0);
499 addSlowCase(jump: branchPtr(cond: NotEqual, left: Address(regT0), right: ImmPtr(m_globalData->jsFunctionVPtr)));
500
501 // Speculatively roll the callframe, assuming argCount will match the arity.
502 mul32(imm: Imm32(sizeof(Register)), src: regT2, dest: regT2);
503 intptr_t offset = (intptr_t)sizeof(Register) * (intptr_t)RegisterFile::CallerFrame;
504 addPtr(imm: Imm32((int32_t)offset), src: regT2, dest: regT3);
505 addPtr(src: callFrameRegister, dest: regT3);
506 storePtr(src: callFrameRegister, address: regT3);
507 addPtr(src: regT2, dest: callFrameRegister);
508 emitNakedCall(function: m_globalData->jitStubs.ctiVirtualCall());
509
510 // Put the return value in dst. In the interpreter, op_ret does this.
511 emitPutVirtualRegister(dst);
512
513 sampleCodeBlock(m_codeBlock);
514}
515
516void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
517{
518 int dst = instruction[1].u.operand;
519
520 linkSlowCase(iter);
521 linkSlowCase(iter);
522 JITStubCall stubCall(this, cti_op_call_NotJSFunction);
523 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
524
525 sampleCodeBlock(m_codeBlock);
526}
527
528#if !ENABLE(JIT_OPTIMIZE_CALL)
529
530/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
531
532void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
533{
534 int dst = instruction[1].u.operand;
535 int callee = instruction[2].u.operand;
536 int argCount = instruction[3].u.operand;
537 int registerOffset = instruction[4].u.operand;
538
539 // Handle eval
540 Jump wasEval;
541 if (opcodeID == op_call_eval) {
542 JITStubCall stubCall(this, cti_op_call_eval);
543 stubCall.addArgument(callee, regT0);
544 stubCall.addArgument(JIT::Imm32(registerOffset));
545 stubCall.addArgument(JIT::Imm32(argCount));
546 stubCall.call();
547 wasEval = branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue())));
548 }
549
550 emitGetVirtualRegister(callee, regT0);
551 // The arguments have been set up on the hot path for op_call_eval
552 if (opcodeID == op_call)
553 compileOpCallSetupArgs(instruction);
554 else if (opcodeID == op_construct)
555 compileOpConstructSetupArgs(instruction);
556
557 // Check for JSFunctions.
558 emitJumpSlowCaseIfNotJSCell(regT0);
559 addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
560
561 // First, in the case of a construct, allocate the new object.
562 if (opcodeID == op_construct) {
563 JITStubCall(this, cti_op_construct_JSConstruct).call(registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
564 emitGetVirtualRegister(callee, regT0);
565 }
566
567 // Speculatively roll the callframe, assuming argCount will match the arity.
568 storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
569 addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
570 move(Imm32(argCount), regT1);
571
572 emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
573
574 if (opcodeID == op_call_eval)
575 wasEval.link(this);
576
577 // Put the return value in dst. In the interpreter, op_ret does this.
578 emitPutVirtualRegister(dst);
579
580 sampleCodeBlock(m_codeBlock);
581}
582
583void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
584{
585 int dst = instruction[1].u.operand;
586
587 linkSlowCase(iter);
588 linkSlowCase(iter);
589 JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
590 stubCall.call(dst); // In the interpreter, the callee puts the return value in dst.
591
592 sampleCodeBlock(m_codeBlock);
593}
594
595#else // !ENABLE(JIT_OPTIMIZE_CALL)
596
597/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
598
599void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
600{
601 int dst = instruction[1].u.operand;
602 int callee = instruction[2].u.operand;
603 int argCount = instruction[3].u.operand;
604 int registerOffset = instruction[4].u.operand;
605
606 // Handle eval
607 Jump wasEval;
608 if (opcodeID == op_call_eval) {
609 JITStubCall stubCall(this, cti_op_call_eval);
610 stubCall.addArgument(src: callee, scratchRegister: regT0);
611 stubCall.addArgument(argument: JIT::Imm32(registerOffset));
612 stubCall.addArgument(argument: JIT::Imm32(argCount));
613 stubCall.call();
614 wasEval = branchPtr(cond: NotEqual, left: regT0, right: ImmPtr(JSValue::encode(value: JSValue())));
615 }
616
617 // This plants a check for a cached JSFunction value, so we can plant a fast link to the callee.
618 // This deliberately leaves the callee in ecx, used when setting up the stack frame below
619 emitGetVirtualRegister(src: callee, dst: regT0);
620 DataLabelPtr addressOfLinkedFunctionCheck;
621
622 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
623
624 Jump jumpToSlow = branchPtrWithPatch(cond: NotEqual, left: regT0, dataLabel&: addressOfLinkedFunctionCheck, initialRightValue: ImmPtr(JSValue::encode(value: JSValue())));
625
626 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
627
628 addSlowCase(jump: jumpToSlow);
629 ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
630 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
631
632 // The following is the fast case, only used whan a callee can be linked.
633
634 // In the case of OpConstruct, call out to a cti_ function to create the new object.
635 if (opcodeID == op_construct) {
636 int proto = instruction[5].u.operand;
637 int thisRegister = instruction[6].u.operand;
638
639 emitPutJITStubArg(src: regT0, argumentNumber: 0);
640 emitPutJITStubArgFromVirtualRegister(src: proto, argumentNumber: 3, scratch: regT2);
641 JITStubCall stubCall(this, cti_op_construct_JSConstruct);
642 stubCall.call(dst: thisRegister);
643 emitGetVirtualRegister(src: callee, dst: regT0);
644 }
645
646 // Fast version of stack frame initialization, directly relative to edi.
647 // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
648 storePtr(imm: ImmPtr(JSValue::encode(value: JSValue())), address: Address(callFrameRegister, (registerOffset + RegisterFile::OptionalCalleeArguments) * static_cast<int>(sizeof(Register))));
649 storePtr(src: regT0, address: Address(callFrameRegister, (registerOffset + RegisterFile::Callee) * static_cast<int>(sizeof(Register))));
650 loadPtr(address: Address(regT0, OBJECT_OFFSETOF(JSFunction, m_data) + OBJECT_OFFSETOF(ScopeChain, m_node)), dest: regT1); // newScopeChain
651 store32(imm: Imm32(argCount), address: Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
652 storePtr(src: callFrameRegister, address: Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
653 storePtr(src: regT1, address: Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
654 addPtr(imm: Imm32(registerOffset * sizeof(Register)), srcDest: callFrameRegister);
655
656 // Call to the callee
657 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
658
659 if (opcodeID == op_call_eval)
660 wasEval.link(masm: this);
661
662 // Put the return value in dst. In the interpreter, op_ret does this.
663 emitPutVirtualRegister(dst);
664
665 sampleCodeBlock(m_codeBlock);
666}
667
668void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
669{
670 int dst = instruction[1].u.operand;
671 int callee = instruction[2].u.operand;
672 int argCount = instruction[3].u.operand;
673 int registerOffset = instruction[4].u.operand;
674
675 linkSlowCase(iter);
676
677 // The arguments have been set up on the hot path for op_call_eval
678 if (opcodeID == op_call)
679 compileOpCallSetupArgs(instruction);
680 else if (opcodeID == op_construct)
681 compileOpConstructSetupArgs(instruction);
682
683 // Fast check for JS function.
684 Jump callLinkFailNotObject = emitJumpIfNotJSCell(reg: regT0);
685 Jump callLinkFailNotJSFunction = branchPtr(cond: NotEqual, left: Address(regT0), right: ImmPtr(m_globalData->jsFunctionVPtr));
686
687 // First, in the case of a construct, allocate the new object.
688 if (opcodeID == op_construct) {
689 JITStubCall(this, cti_op_construct_JSConstruct).call(dst: registerOffset - RegisterFile::CallFrameHeaderSize - argCount);
690 emitGetVirtualRegister(src: callee, dst: regT0);
691 }
692
693 // Speculatively roll the callframe, assuming argCount will match the arity.
694 storePtr(src: callFrameRegister, address: Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
695 addPtr(imm: Imm32(registerOffset * static_cast<int>(sizeof(Register))), srcDest: callFrameRegister);
696 move(imm: Imm32(argCount), dest: regT1);
697
698 move(src: regT0, dest: regT2);
699
700 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(function: m_globalData->jitStubs.ctiVirtualCallLink());
701
702 // Put the return value in dst.
703 emitPutVirtualRegister(dst);
704 sampleCodeBlock(m_codeBlock);
705
706 // If not, we need an extra case in the if below!
707 ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
708
709 // Done! - return back to the hot path.
710 if (opcodeID == op_construct)
711 emitJumpSlowToHot(jump: jump(), OPCODE_LENGTH(op_construct));
712 else
713 emitJumpSlowToHot(jump: jump(), OPCODE_LENGTH(op_call));
714
715 // This handles host functions
716 callLinkFailNotObject.link(masm: this);
717 callLinkFailNotJSFunction.link(masm: this);
718 JITStubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction).call();
719
720 emitPutVirtualRegister(dst);
721 sampleCodeBlock(m_codeBlock);
722}
723
724/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
725
726#endif // !ENABLE(JIT_OPTIMIZE_CALL)
727
728#endif // USE(JSVALUE32_64)
729
730} // namespace JSC
731
732#endif // ENABLE(JIT)
733

source code of qtscript/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITCall.cpp