2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "CodeBlock.h"
32 #include "JITInlineMethods.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "ResultType.h"
37 #include "SamplingTool.h"
47 COMPILE_ASSERT(STUB_ARGS_code == 0xC, STUB_ARGS_code_is_C);
48 COMPILE_ASSERT(STUB_ARGS_callFrame == 0xE, STUB_ARGS_callFrame_is_E);
50 #if COMPILER(GCC) && PLATFORM(X86)
53 #define SYMBOL_STRING(name) "_" #name
55 #define SYMBOL_STRING(name) #name
59 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
60 SYMBOL_STRING(ctiTrampoline) ":" "\n"
62 "movl %esp, %ebp" "\n"
66 "subl $0x1c, %esp" "\n"
67 "movl $512, %esi" "\n"
68 "movl 0x38(%esp), %edi" "\n" // Ox38 = 0x0E * 4, 0x0E = STUB_ARGS_callFrame (see assertion above)
69 "call *0x30(%esp)" "\n" // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
70 "addl $0x1c, %esp" "\n"
79 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
80 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
81 #if USE(JIT_STUB_ARGUMENT_VA_LIST)
82 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPvz) "\n"
84 #if USE(JIT_STUB_ARGUMENT_REGISTER)
85 "movl %esp, %ecx" "\n"
86 #else // JIT_STUB_ARGUMENT_STACK
87 "movl %esp, 0(%esp)" "\n"
89 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
91 "addl $0x1c, %esp" "\n"
99 #elif COMPILER(GCC) && PLATFORM(X86_64)
102 #define SYMBOL_STRING(name) "_" #name
104 #define SYMBOL_STRING(name) #name
108 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
109 SYMBOL_STRING(ctiTrampoline) ":" "\n"
111 "movq %rsp, %rbp" "\n"
115 "subq $0x38, %rsp" "\n"
116 "movq $512, %r12" "\n"
117 "movq 0x70(%rsp), %r13" "\n" // Ox70 = 0x0E * 8, 0x0E = STUB_ARGS_callFrame (see assertion above)
118 "call *0x60(%rsp)" "\n" // Ox60 = 0x0C * 8, 0x0C = STUB_ARGS_code (see assertion above)
119 "addq $0x38, %rsp" "\n"
128 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
129 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
130 #if USE(JIT_STUB_ARGUMENT_REGISTER)
131 "movq %rsp, %rdi" "\n"
132 "call " SYMBOL_STRING(_ZN3JSC11Interpreter12cti_vm_throwEPPv) "\n"
133 #else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
134 #error "JIT_STUB_ARGUMENT configuration not supported."
136 "addq $0x38, %rsp" "\n"
148 __declspec(naked) JSValue* ctiTrampoline(void* code, RegisterFile*, CallFrame*, JSValue** exception, Profiler**, JSGlobalData*)
159 mov edi, [esp + 0x38];
160 call [esp + 0x30]; // Ox30 = 0x0C * 4, 0x0C = STUB_ARGS_code (see assertion above)
170 __declspec(naked) void ctiVMThrowTrampoline()
173 #if USE(JIT_STUB_ARGUMENT_REGISTER)
175 #else // JIT_STUB_ARGUMENT_VA_LIST or JIT_STUB_ARGUMENT_STACK
176 #error "JIT_STUB_ARGUMENT configuration not supported."
178 call JSC::Interpreter::cti_vm_throw;
192 void ctiSetReturnAddress(void** where, void* what)
197 void ctiPatchCallByReturnAddress(void* where, void* what)
199 MacroAssembler::Jump::patch(where, what);
202 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
203 : m_interpreter(globalData->interpreter)
204 , m_globalData(globalData)
205 , m_codeBlock(codeBlock)
206 , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
207 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
208 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
209 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
210 , m_jumpTargetsPosition(0)
214 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
216 bool negated = (type == OpNStrictEq);
218 unsigned dst = currentInstruction[1].u.operand;
219 unsigned src1 = currentInstruction[2].u.operand;
220 unsigned src2 = currentInstruction[3].u.operand;
222 emitGetVirtualRegisters(src1, X86::eax, src2, X86::edx);
224 // Check that bot are immediates, if so check if they're equal
225 Jump firstNotImmediate = emitJumpIfJSCell(X86::eax);
226 Jump secondNotImmediate = emitJumpIfJSCell(X86::edx);
227 Jump bothWereImmediatesButNotEqual = jne32(X86::edx, X86::eax);
229 // They are equal - set the result to true. (Or false, if negated).
230 move(Imm32(asInteger(jsBoolean(!negated))), X86::eax);
231 Jump bothWereImmediatesAndEqual = jump();
233 // eax was not an immediate, we haven't yet checked edx.
234 // If edx is also a JSCell, or is 0, then jump to a slow case,
235 // otherwise these values are not equal.
236 firstNotImmediate.link(this);
237 emitJumpSlowCaseIfJSCell(X86::edx);
238 addSlowCase(je32(X86::edx, Imm32(asInteger(JSImmediate::zeroImmediate()))));
239 Jump firstWasNotImmediate = jump();
241 // eax was an immediate, but edx wasn't.
242 // If eax is 0 jump to a slow case, otherwise these values are not equal.
243 secondNotImmediate.link(this);
244 addSlowCase(je32(X86::eax, Imm32(asInteger(JSImmediate::zeroImmediate()))));
246 // We get here if the two values are different immediates, or one is 0 and the other is a JSCell.
247 // Vaelues are not equal, set the result to false.
248 bothWereImmediatesButNotEqual.link(this);
249 firstWasNotImmediate.link(this);
250 move(Imm32(asInteger(jsBoolean(negated))), X86::eax);
252 bothWereImmediatesAndEqual.link(this);
253 emitPutVirtualRegister(dst);
256 void JIT::emitSlowScriptCheck()
258 Jump skipTimeout = jnzSub32(Imm32(1), timeoutCheckRegister);
259 emitCTICall(Interpreter::cti_timeout_check);
260 move(X86::eax, timeoutCheckRegister);
261 skipTimeout.link(this);
263 killLastResultRegister();
267 #define NEXT_OPCODE(name) \
268 m_bytecodeIndex += OPCODE_LENGTH(name); \
271 #define CTI_COMPILE_BINARY_OP(name) \
273 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
274 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx); \
275 emitCTICall(Interpreter::cti_##name); \
276 emitPutVirtualRegister(currentInstruction[1].u.operand); \
280 #define CTI_COMPILE_UNARY_OP(name) \
282 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx); \
283 emitCTICall(Interpreter::cti_##name); \
284 emitPutVirtualRegister(currentInstruction[1].u.operand); \
288 void JIT::privateCompileMainPass()
290 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
291 unsigned instructionCount = m_codeBlock->instructions().size();
292 unsigned propertyAccessInstructionIndex = 0;
293 unsigned globalResolveInfoIndex = 0;
294 unsigned callLinkInfoIndex = 0;
296 for (m_bytecodeIndex = 0; m_bytecodeIndex < instructionCount; ) {
297 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
298 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeIndex);
300 #if ENABLE(OPCODE_SAMPLING)
301 if (m_bytecodeIndex > 0) // Avoid the overhead of sampling op_enter twice.
302 store32(m_interpreter->sampler()->encodeSample(currentInstruction), m_interpreter->sampler()->sampleSlot());
305 m_labels[m_bytecodeIndex] = label();
306 OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode);
310 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
311 emitPutVirtualRegister(currentInstruction[1].u.operand);
315 unsigned dst = currentInstruction[1].u.operand;
316 unsigned src1 = currentInstruction[2].u.operand;
317 unsigned src2 = currentInstruction[3].u.operand;
319 if (JSValue* value = getConstantImmediateNumericArg(src1)) {
320 emitGetVirtualRegister(src2, X86::eax);
321 emitJumpSlowCaseIfNotImmNum(X86::eax);
322 addSlowCase(joAdd32(Imm32(getDeTaggedConstantImmediate(value)), X86::eax));
323 signExtend32ToPtr(X86::eax, X86::eax);
324 emitPutVirtualRegister(dst);
325 } else if (JSValue* value = getConstantImmediateNumericArg(src2)) {
326 emitGetVirtualRegister(src1, X86::eax);
327 emitJumpSlowCaseIfNotImmNum(X86::eax);
328 addSlowCase(joAdd32(Imm32(getDeTaggedConstantImmediate(value)), X86::eax));
329 signExtend32ToPtr(X86::eax, X86::eax);
330 emitPutVirtualRegister(dst);
332 OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
333 if (types.first().mightBeNumber() && types.second().mightBeNumber())
334 compileBinaryArithOp(op_add, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
336 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
337 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
338 emitCTICall(Interpreter::cti_op_add);
339 emitPutVirtualRegister(currentInstruction[1].u.operand);
345 if (m_codeBlock->needsFullScopeChain())
346 emitCTICall(Interpreter::cti_op_end);
347 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
348 push(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
353 unsigned target = currentInstruction[1].u.operand;
354 addJump(jump(), target + 1);
358 int srcDst = currentInstruction[1].u.operand;
359 emitGetVirtualRegister(srcDst, X86::eax);
360 emitJumpSlowCaseIfNotImmNum(X86::eax);
361 addSlowCase(joAdd32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::eax));
362 signExtend32ToPtr(X86::eax, X86::eax);
363 emitPutVirtualRegister(srcDst);
364 NEXT_OPCODE(op_pre_inc);
367 emitSlowScriptCheck();
369 unsigned target = currentInstruction[1].u.operand;
370 addJump(jump(), target + 1);
373 case op_loop_if_less: {
374 emitSlowScriptCheck();
376 unsigned target = currentInstruction[3].u.operand;
377 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
379 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
380 emitJumpSlowCaseIfNotImmNum(X86::eax);
381 addJump(jl32(X86::eax, Imm32(asInteger(src2imm))), target + 3);
383 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
384 emitJumpSlowCaseIfNotImmNum(X86::eax);
385 emitJumpSlowCaseIfNotImmNum(X86::edx);
386 addJump(jl32(X86::eax, X86::edx), target + 3);
388 NEXT_OPCODE(op_loop_if_less);
390 case op_loop_if_lesseq: {
391 emitSlowScriptCheck();
393 unsigned target = currentInstruction[3].u.operand;
394 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
396 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
397 emitJumpSlowCaseIfNotImmNum(X86::eax);
398 addJump(jle32(X86::eax, Imm32(asInteger(src2imm))), target + 3);
400 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
401 emitJumpSlowCaseIfNotImmNum(X86::eax);
402 emitJumpSlowCaseIfNotImmNum(X86::edx);
403 addJump(jle32(X86::eax, X86::edx), target + 3);
405 NEXT_OPCODE(op_loop_if_lesseq);
407 case op_new_object: {
408 emitCTICall(Interpreter::cti_op_new_object);
409 emitPutVirtualRegister(currentInstruction[1].u.operand);
410 NEXT_OPCODE(op_new_object);
413 compilePutByIdHotPath(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, propertyAccessInstructionIndex++);
414 NEXT_OPCODE(op_put_by_id);
417 compileGetByIdHotPath(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), propertyAccessInstructionIndex++);
418 NEXT_OPCODE(op_get_by_id);
420 case op_instanceof: {
421 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax); // value
422 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx); // baseVal
423 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // proto
425 // check if any are immediates
426 move(X86::eax, X86::ebx);
427 or32(X86::ecx, X86::ebx);
428 or32(X86::edx, X86::ebx);
429 emitJumpSlowCaseIfNotJSCell(X86::ebx);
431 // check that all are object type - this is a bit of a bithack to avoid excess branching;
432 // we check that the sum of the three type codes from Structures is exactly 3 * ObjectType,
433 // this works because NumberType and StringType are smaller
434 move(Imm32(3 * ObjectType), X86::ebx);
435 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::eax);
436 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
437 loadPtr(Address(X86::edx, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
438 sub32(Address(X86::eax, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
439 sub32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx);
440 addSlowCase(jne32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), X86::ebx));
442 // check that baseVal's flags include ImplementsHasInstance but not OverridesHasInstance
443 load32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), X86::ecx);
444 and32(Imm32(ImplementsHasInstance | OverridesHasInstance), X86::ecx);
445 addSlowCase(jne32(X86::ecx, Imm32(ImplementsHasInstance)));
447 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::ecx); // reload value
448 emitGetVirtualRegister(currentInstruction[4].u.operand, X86::edx); // reload proto
450 // optimistically load true result
451 move(Imm32(asInteger(jsBoolean(true))), X86::eax);
455 // load value's prototype
456 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
457 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
459 Jump exit = jePtr(X86::ecx, X86::edx);
461 jne32(X86::ecx, Imm32(asInteger(jsNull())), loop);
463 move(Imm32(asInteger(jsBoolean(false))), X86::eax);
467 emitPutVirtualRegister(currentInstruction[1].u.operand);
469 NEXT_OPCODE(op_instanceof);
472 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
473 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
474 emitPutJITStubArgConstant(ident, 2);
475 emitCTICall(Interpreter::cti_op_del_by_id);
476 emitPutVirtualRegister(currentInstruction[1].u.operand);
477 NEXT_OPCODE(op_del_by_id);
480 unsigned dst = currentInstruction[1].u.operand;
481 unsigned src1 = currentInstruction[2].u.operand;
482 unsigned src2 = currentInstruction[3].u.operand;
484 // For now, only plant a fast int case if the constant operand is greater than zero.
485 JSValue* src1Value = getConstantImmediateNumericArg(src1);
486 JSValue* src2Value = getConstantImmediateNumericArg(src2);
488 if (src1Value && ((value = JSImmediate::intValue(src1Value)) > 0)) {
489 emitGetVirtualRegister(src2, X86::eax);
490 emitJumpSlowCaseIfNotImmNum(X86::eax);
491 emitFastArithDeTagImmediate(X86::eax);
492 addSlowCase(joMul32(Imm32(value), X86::eax, X86::eax));
493 emitFastArithReTagImmediate(X86::eax);
494 emitPutVirtualRegister(dst);
495 } else if (src2Value && ((value = JSImmediate::intValue(src2Value)) > 0)) {
496 emitGetVirtualRegister(src1, X86::eax);
497 emitJumpSlowCaseIfNotImmNum(X86::eax);
498 emitFastArithDeTagImmediate(X86::eax);
499 addSlowCase(joMul32(Imm32(value), X86::eax, X86::eax));
500 emitFastArithReTagImmediate(X86::eax);
501 emitPutVirtualRegister(dst);
503 compileBinaryArithOp(op_mul, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
508 FuncDeclNode* func = m_codeBlock->function(currentInstruction[2].u.operand);
509 emitPutJITStubArgConstant(func, 1);
510 emitCTICall(Interpreter::cti_op_new_func);
511 emitPutVirtualRegister(currentInstruction[1].u.operand);
512 NEXT_OPCODE(op_new_func);
515 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
516 NEXT_OPCODE(op_call);
519 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
520 NEXT_OPCODE(op_call_eval);
523 compileOpCall(opcodeID, currentInstruction, callLinkInfoIndex++);
524 NEXT_OPCODE(op_construct);
526 case op_get_global_var: {
527 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[2].u.jsCell);
528 move(ImmPtr(globalObject), X86::eax);
529 emitGetVariableObjectRegister(X86::eax, currentInstruction[3].u.operand, X86::eax);
530 emitPutVirtualRegister(currentInstruction[1].u.operand);
531 NEXT_OPCODE(op_get_global_var);
533 case op_put_global_var: {
534 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::edx);
535 JSVariableObject* globalObject = static_cast<JSVariableObject*>(currentInstruction[1].u.jsCell);
536 move(ImmPtr(globalObject), X86::eax);
537 emitPutVariableObjectRegister(X86::edx, X86::eax, currentInstruction[2].u.operand);
538 NEXT_OPCODE(op_put_global_var);
540 case op_get_scoped_var: {
541 int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
543 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::eax);
545 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, next)), X86::eax);
547 loadPtr(Address(X86::eax, FIELD_OFFSET(ScopeChainNode, object)), X86::eax);
548 emitGetVariableObjectRegister(X86::eax, currentInstruction[2].u.operand, X86::eax);
549 emitPutVirtualRegister(currentInstruction[1].u.operand);
550 NEXT_OPCODE(op_get_scoped_var);
552 case op_put_scoped_var: {
553 int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
555 emitGetFromCallFrameHeader(RegisterFile::ScopeChain, X86::edx);
556 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
558 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, next)), X86::edx);
560 loadPtr(Address(X86::edx, FIELD_OFFSET(ScopeChainNode, object)), X86::edx);
561 emitPutVariableObjectRegister(X86::eax, X86::edx, currentInstruction[1].u.operand);
562 NEXT_OPCODE(op_put_scoped_var);
564 case op_tear_off_activation: {
565 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
566 emitCTICall(Interpreter::cti_op_tear_off_activation);
567 NEXT_OPCODE(op_tear_off_activation);
569 case op_tear_off_arguments: {
570 emitCTICall(Interpreter::cti_op_tear_off_arguments);
571 NEXT_OPCODE(op_tear_off_arguments);
574 // We could JIT generate the deref, only calling out to C when the refcount hits zero.
575 if (m_codeBlock->needsFullScopeChain())
576 emitCTICall(Interpreter::cti_op_ret_scopeChain);
578 // Return the result in %eax.
579 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
581 // Grab the return address.
582 emitGetFromCallFrameHeader(RegisterFile::ReturnPC, X86::edx);
584 // Restore our caller's "r".
585 emitGetFromCallFrameHeader(RegisterFile::CallerFrame, callFrameRegister);
594 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
595 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 2);
596 emitCTICall(Interpreter::cti_op_new_array);
597 emitPutVirtualRegister(currentInstruction[1].u.operand);
598 NEXT_OPCODE(op_new_array);
601 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
602 emitPutJITStubArgConstant(ident, 1);
603 emitCTICall(Interpreter::cti_op_resolve);
604 emitPutVirtualRegister(currentInstruction[1].u.operand);
605 NEXT_OPCODE(op_resolve);
607 case op_construct_verify: {
608 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
610 emitJumpSlowCaseIfNotJSCell(X86::eax);
611 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
612 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
614 NEXT_OPCODE(op_construct_verify);
616 case op_get_by_val: {
617 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
618 emitJumpSlowCaseIfNotImmNum(X86::edx);
619 emitFastArithImmToInt(X86::edx);
620 emitJumpSlowCaseIfNotJSCell(X86::eax);
621 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
623 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
624 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
625 addSlowCase(jae32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff))));
627 // Get the value from the vector
628 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::eax);
629 emitPutVirtualRegister(currentInstruction[1].u.operand);
630 NEXT_OPCODE(op_get_by_val);
632 case op_resolve_func: {
633 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
634 emitPutJITStubArgConstant(ident, 1);
635 emitCTICall(Interpreter::cti_op_resolve_func);
636 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
637 emitPutVirtualRegister(currentInstruction[1].u.operand);
638 NEXT_OPCODE(op_resolve_func);
641 compileBinaryArithOp(op_sub, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
644 case op_put_by_val: {
645 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
646 emitJumpSlowCaseIfNotImmNum(X86::edx);
647 emitFastArithImmToInt(X86::edx);
648 emitJumpSlowCaseIfNotJSCell(X86::eax);
649 addSlowCase(jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr)));
651 // This is an array; get the m_storage pointer into ecx, then check if the index is below the fast cutoff
652 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
653 Jump inFastVector = jb32(X86::edx, Address(X86::eax, FIELD_OFFSET(JSArray, m_fastAccessCutoff)));
654 // No; oh well, check if the access if within the vector - if so, we may still be okay.
655 addSlowCase(jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength))));
657 // This is a write to the slow part of the vector; first, we have to check if this would be the first write to this location.
658 // FIXME: should be able to handle initial write to array; increment the the number of items in the array, and potentially update fast access cutoff.
659 addSlowCase(jzPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0]))));
661 // All good - put the value into the array.
662 inFastVector.link(this);
663 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::eax);
664 storePtr(X86::eax, BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])));
665 NEXT_OPCODE(op_put_by_val);
667 CTI_COMPILE_BINARY_OP(op_lesseq)
668 case op_loop_if_true: {
669 emitSlowScriptCheck();
671 unsigned target = currentInstruction[2].u.operand;
672 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
674 Jump isZero = je32(X86::eax, Imm32(asInteger(JSImmediate::zeroImmediate())));
675 addJump(jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger)), target + 2);
677 addJump(je32(X86::eax, Imm32(asInteger(JSImmediate::trueImmediate()))), target + 2);
678 addSlowCase(jne32(X86::eax, Imm32(asInteger(JSImmediate::falseImmediate()))));
681 NEXT_OPCODE(op_loop_if_true);
683 case op_resolve_base: {
684 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
685 emitPutJITStubArgConstant(ident, 1);
686 emitCTICall(Interpreter::cti_op_resolve_base);
687 emitPutVirtualRegister(currentInstruction[1].u.operand);
688 NEXT_OPCODE(op_resolve_base);
691 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
692 emitCTICall(Interpreter::cti_op_negate);
693 emitPutVirtualRegister(currentInstruction[1].u.operand);
694 NEXT_OPCODE(op_negate);
696 case op_resolve_skip: {
697 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
698 emitPutJITStubArgConstant(ident, 1);
699 emitPutJITStubArgConstant(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain(), 2);
700 emitCTICall(Interpreter::cti_op_resolve_skip);
701 emitPutVirtualRegister(currentInstruction[1].u.operand);
702 NEXT_OPCODE(op_resolve_skip);
704 case op_resolve_global: {
706 void* globalObject = currentInstruction[2].u.jsCell;
707 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
709 unsigned currentIndex = globalResolveInfoIndex++;
710 void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
711 void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
713 // Check Structure of global object
714 move(ImmPtr(globalObject), X86::eax);
715 loadPtr(structureAddress, X86::edx);
716 Jump noMatch = jnePtr(X86::edx, Address(X86::eax, FIELD_OFFSET(JSCell, m_structure))); // Structures don't match
718 // Load cached property
719 loadPtr(Address(X86::eax, FIELD_OFFSET(JSGlobalObject, m_propertyStorage)), X86::eax);
720 load32(offsetAddr, X86::edx);
721 loadPtr(BaseIndex(X86::eax, X86::edx, ScalePtr), X86::eax);
722 emitPutVirtualRegister(currentInstruction[1].u.operand);
727 emitPutJITStubArgConstant(globalObject, 1);
728 emitPutJITStubArgConstant(ident, 2);
729 emitPutJITStubArgConstant(currentIndex, 3);
730 emitCTICall(Interpreter::cti_op_resolve_global);
731 emitPutVirtualRegister(currentInstruction[1].u.operand);
733 NEXT_OPCODE(op_resolve_global);
735 CTI_COMPILE_BINARY_OP(op_div)
737 int srcDst = currentInstruction[1].u.operand;
738 emitGetVirtualRegister(srcDst, X86::eax);
739 emitJumpSlowCaseIfNotImmNum(X86::eax);
740 addSlowCase(joSub32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::eax));
741 signExtend32ToPtr(X86::eax, X86::eax);
742 emitPutVirtualRegister(srcDst);
743 NEXT_OPCODE(op_pre_dec);
746 unsigned target = currentInstruction[3].u.operand;
747 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
749 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::edx);
750 emitJumpSlowCaseIfNotImmNum(X86::edx);
751 addJump(jge32(X86::edx, Imm32(asInteger(src2imm))), target + 3);
753 emitGetVirtualRegisters(currentInstruction[1].u.operand, X86::eax, currentInstruction[2].u.operand, X86::edx);
754 emitJumpSlowCaseIfNotImmNum(X86::eax);
755 emitJumpSlowCaseIfNotImmNum(X86::edx);
756 addJump(jge32(X86::eax, X86::edx), target + 3);
758 NEXT_OPCODE(op_jnless);
761 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
762 xorPtr(Imm32(JSImmediate::FullTagTypeBool), X86::eax);
763 addSlowCase(jnz32(X86::eax, Imm32(JSImmediate::FullTagTypeMask)));
764 xorPtr(Imm32(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue), X86::eax);
765 emitPutVirtualRegister(currentInstruction[1].u.operand);
769 unsigned target = currentInstruction[2].u.operand;
770 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
772 addJump(je32(X86::eax, Imm32(asInteger(JSImmediate::zeroImmediate()))), target + 2);
773 Jump isNonZero = jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger));
775 addJump(je32(X86::eax, Imm32(asInteger(JSImmediate::falseImmediate()))), target + 2);
776 addSlowCase(jne32(X86::eax, Imm32(asInteger(JSImmediate::trueImmediate()))));
778 isNonZero.link(this);
779 NEXT_OPCODE(op_jfalse);
782 unsigned src = currentInstruction[1].u.operand;
783 unsigned target = currentInstruction[2].u.operand;
785 emitGetVirtualRegister(src, X86::eax);
786 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
788 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
789 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
790 addJump(jnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
791 Jump wasNotImmediate = jump();
793 // Now handle the immediate cases - undefined & null
794 isImmediate.link(this);
795 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
796 addJump(je32(X86::eax, Imm32(asInteger(jsNull()))), target + 2);
798 wasNotImmediate.link(this);
799 NEXT_OPCODE(op_jeq_null);
802 unsigned src = currentInstruction[1].u.operand;
803 unsigned target = currentInstruction[2].u.operand;
805 emitGetVirtualRegister(src, X86::eax);
806 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
808 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
809 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
810 addJump(jz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target + 2);
811 Jump wasNotImmediate = jump();
813 // Now handle the immediate cases - undefined & null
814 isImmediate.link(this);
815 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
816 addJump(jne32(X86::eax, Imm32(asInteger(jsNull()))), target + 2);
818 wasNotImmediate.link(this);
819 NEXT_OPCODE(op_jneq_null);
822 int srcDst = currentInstruction[2].u.operand;
823 emitGetVirtualRegister(srcDst, X86::eax);
824 move(X86::eax, X86::edx);
825 emitJumpSlowCaseIfNotImmNum(X86::eax);
826 addSlowCase(joAdd32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::edx));
827 signExtend32ToPtr(X86::edx, X86::edx);
828 emitPutVirtualRegister(srcDst, X86::edx);
829 emitPutVirtualRegister(currentInstruction[1].u.operand);
830 NEXT_OPCODE(op_post_inc);
832 case op_unexpected_load: {
833 JSValue* v = m_codeBlock->unexpectedConstant(currentInstruction[2].u.operand);
834 move(ImmPtr(v), X86::eax);
835 emitPutVirtualRegister(currentInstruction[1].u.operand);
836 NEXT_OPCODE(op_unexpected_load);
839 int retAddrDst = currentInstruction[1].u.operand;
840 int target = currentInstruction[2].u.operand;
841 DataLabelPtr storeLocation = storePtrWithPatch(Address(callFrameRegister, sizeof(Register) * retAddrDst));
842 addJump(jump(), target + 2);
843 m_jsrSites.append(JSRInfo(storeLocation, label()));
847 jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
848 NEXT_OPCODE(op_sret);
851 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
852 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
853 sete32(X86::edx, X86::eax);
854 emitTagAsBoolImmediate(X86::eax);
855 emitPutVirtualRegister(currentInstruction[1].u.operand);
859 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::ecx);
860 emitJumpSlowCaseIfNotImmNum(X86::eax);
861 emitJumpSlowCaseIfNotImmNum(X86::ecx);
862 emitFastArithImmToInt(X86::eax);
863 emitFastArithImmToInt(X86::ecx);
864 lshift32(X86::ecx, X86::eax);
865 emitFastArithIntToImmOrSlowCase(X86::eax);
866 emitPutVirtualRegister(currentInstruction[1].u.operand);
867 NEXT_OPCODE(op_lshift);
870 unsigned src1 = currentInstruction[2].u.operand;
871 unsigned src2 = currentInstruction[3].u.operand;
872 unsigned dst = currentInstruction[1].u.operand;
873 if (JSValue* value = getConstantImmediateNumericArg(src1)) {
874 emitGetVirtualRegister(src2, X86::eax);
875 emitJumpSlowCaseIfNotImmNum(X86::eax);
876 andPtr(Imm32(asInteger(value)), X86::eax); // FIXME: make it more obvious this is relying on the format of JSImmediate
877 emitPutVirtualRegister(dst);
878 } else if (JSValue* value = getConstantImmediateNumericArg(src2)) {
879 emitGetVirtualRegister(src1, X86::eax);
880 emitJumpSlowCaseIfNotImmNum(X86::eax);
881 andPtr(Imm32(asInteger(value)), X86::eax);
882 emitPutVirtualRegister(dst);
884 emitGetVirtualRegisters(src1, X86::eax, src2, X86::edx);
885 andPtr(X86::edx, X86::eax);
886 emitJumpSlowCaseIfNotImmNum(X86::eax);
887 emitPutVirtualRegister(dst);
889 NEXT_OPCODE(op_bitand);
892 unsigned src1 = currentInstruction[2].u.operand;
893 unsigned src2 = currentInstruction[3].u.operand;
894 if (JSValue* value = getConstantImmediateNumericArg(src2)) {
895 emitGetVirtualRegister(src1, X86::eax);
896 emitJumpSlowCaseIfNotImmNum(X86::eax);
897 // Mask with 0x1f as per ecma-262 11.7.2 step 7.
898 rshift32(Imm32(JSImmediate::getTruncatedUInt32(value) & 0x1f), X86::eax);
900 emitGetVirtualRegisters(src1, X86::eax, src2, X86::ecx);
901 emitJumpSlowCaseIfNotImmNum(X86::eax);
902 emitJumpSlowCaseIfNotImmNum(X86::ecx);
903 emitFastArithImmToInt(X86::ecx);
904 rshift32(X86::ecx, X86::eax);
906 emitFastArithPotentiallyReTagImmediate(X86::eax);
907 emitPutVirtualRegister(currentInstruction[1].u.operand);
908 NEXT_OPCODE(op_rshift);
911 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
912 emitJumpSlowCaseIfNotImmNum(X86::eax);
913 xorPtr(Imm32(~JSImmediate::TagBitTypeInteger), X86::eax);
914 emitPutVirtualRegister(currentInstruction[1].u.operand);
915 NEXT_OPCODE(op_bitnot);
917 case op_resolve_with_base: {
918 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
919 emitPutJITStubArgConstant(ident, 1);
920 emitCTICall(Interpreter::cti_op_resolve_with_base);
921 emitPutVirtualRegister(currentInstruction[2].u.operand, X86::edx);
922 emitPutVirtualRegister(currentInstruction[1].u.operand);
923 NEXT_OPCODE(op_resolve_with_base);
925 case op_new_func_exp: {
926 FuncExprNode* func = m_codeBlock->functionExpression(currentInstruction[2].u.operand);
927 emitPutJITStubArgConstant(func, 1);
928 emitCTICall(Interpreter::cti_op_new_func_exp);
929 emitPutVirtualRegister(currentInstruction[1].u.operand);
930 NEXT_OPCODE(op_new_func_exp);
933 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::ecx);
934 emitJumpSlowCaseIfNotImmNum(X86::eax);
935 emitJumpSlowCaseIfNotImmNum(X86::ecx);
936 emitFastArithDeTagImmediate(X86::eax);
937 addSlowCase(emitFastArithDeTagImmediateJumpIfZero(X86::ecx));
938 mod32(X86::ecx, X86::eax, X86::edx);
939 emitFastArithReTagImmediate(X86::edx);
940 move(X86::edx, X86::eax);
941 emitPutVirtualRegister(currentInstruction[1].u.operand);
945 unsigned target = currentInstruction[2].u.operand;
946 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
948 Jump isZero = je32(X86::eax, Imm32(asInteger(JSImmediate::zeroImmediate())));
949 addJump(jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger)), target + 2);
951 addJump(je32(X86::eax, Imm32(asInteger(JSImmediate::trueImmediate()))), target + 2);
952 addSlowCase(jne32(X86::eax, Imm32(asInteger(JSImmediate::falseImmediate()))));
955 NEXT_OPCODE(op_jtrue);
957 CTI_COMPILE_BINARY_OP(op_less)
959 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
960 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
961 setne32(X86::edx, X86::eax);
962 emitTagAsBoolImmediate(X86::eax);
964 emitPutVirtualRegister(currentInstruction[1].u.operand);
969 int srcDst = currentInstruction[2].u.operand;
970 emitGetVirtualRegister(srcDst, X86::eax);
971 move(X86::eax, X86::edx);
972 emitJumpSlowCaseIfNotImmNum(X86::eax);
973 addSlowCase(joSub32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::edx));
974 signExtend32ToPtr(X86::edx, X86::edx);
975 emitPutVirtualRegister(srcDst, X86::edx);
976 emitPutVirtualRegister(currentInstruction[1].u.operand);
977 NEXT_OPCODE(op_post_dec);
979 CTI_COMPILE_BINARY_OP(op_urshift)
981 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
982 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
983 xor32(X86::edx, X86::eax);
984 emitFastArithReTagImmediate(X86::eax);
985 emitPutVirtualRegister(currentInstruction[1].u.operand);
986 NEXT_OPCODE(op_bitxor);
988 case op_new_regexp: {
989 RegExp* regExp = m_codeBlock->regexp(currentInstruction[2].u.operand);
990 emitPutJITStubArgConstant(regExp, 1);
991 emitCTICall(Interpreter::cti_op_new_regexp);
992 emitPutVirtualRegister(currentInstruction[1].u.operand);
993 NEXT_OPCODE(op_new_regexp);
996 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::edx);
997 emitJumpSlowCaseIfNotImmNums(X86::eax, X86::edx, X86::ecx);
998 orPtr(X86::edx, X86::eax);
999 emitPutVirtualRegister(currentInstruction[1].u.operand);
1000 NEXT_OPCODE(op_bitor);
1003 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1004 emitCTICall(Interpreter::cti_op_throw);
1005 #if PLATFORM(X86_64)
1006 addPtr(Imm32(0x38), X86::esp);
1013 addPtr(Imm32(0x1c), X86::esp);
1020 NEXT_OPCODE(op_throw);
1022 case op_get_pnames: {
1023 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1024 emitCTICall(Interpreter::cti_op_get_pnames);
1025 emitPutVirtualRegister(currentInstruction[1].u.operand);
1026 NEXT_OPCODE(op_get_pnames);
1028 case op_next_pname: {
1029 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1030 unsigned target = currentInstruction[3].u.operand;
1031 emitCTICall(Interpreter::cti_op_next_pname);
1032 Jump endOfIter = jzPtr(X86::eax);
1033 emitPutVirtualRegister(currentInstruction[1].u.operand);
1034 addJump(jump(), target + 3);
1035 endOfIter.link(this);
1036 NEXT_OPCODE(op_next_pname);
1038 case op_push_scope: {
1039 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1040 emitCTICall(Interpreter::cti_op_push_scope);
1041 emitPutVirtualRegister(currentInstruction[1].u.operand);
1042 NEXT_OPCODE(op_push_scope);
1044 case op_pop_scope: {
1045 emitCTICall(Interpreter::cti_op_pop_scope);
1046 NEXT_OPCODE(op_pop_scope);
1048 CTI_COMPILE_UNARY_OP(op_typeof)
1049 CTI_COMPILE_UNARY_OP(op_is_undefined)
1050 CTI_COMPILE_UNARY_OP(op_is_boolean)
1051 CTI_COMPILE_UNARY_OP(op_is_number)
1052 CTI_COMPILE_UNARY_OP(op_is_string)
1053 CTI_COMPILE_UNARY_OP(op_is_object)
1054 CTI_COMPILE_UNARY_OP(op_is_function)
1056 compileOpStrictEq(currentInstruction, OpStrictEq);
1057 NEXT_OPCODE(op_stricteq);
1059 case op_nstricteq: {
1060 compileOpStrictEq(currentInstruction, OpNStrictEq);
1061 NEXT_OPCODE(op_nstricteq);
1063 case op_to_jsnumber: {
1064 int srcVReg = currentInstruction[2].u.operand;
1065 emitGetVirtualRegister(srcVReg, X86::eax);
1067 Jump wasImmediate = jnz32(X86::eax, Imm32(JSImmediate::TagBitTypeInteger));
1069 emitJumpSlowCaseIfNotJSCell(X86::eax, srcVReg);
1070 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1071 addSlowCase(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
1073 wasImmediate.link(this);
1075 emitPutVirtualRegister(currentInstruction[1].u.operand);
1076 NEXT_OPCODE(op_to_jsnumber);
1078 CTI_COMPILE_BINARY_OP(op_in)
1079 case op_push_new_scope: {
1080 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1081 emitPutJITStubArgConstant(ident, 1);
1082 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1083 emitCTICall(Interpreter::cti_op_push_new_scope);
1084 emitPutVirtualRegister(currentInstruction[1].u.operand);
1085 NEXT_OPCODE(op_push_new_scope);
1088 emitGetCTIParam(STUB_ARGS_callFrame, callFrameRegister);
1089 emitPutVirtualRegister(currentInstruction[1].u.operand);
1090 NEXT_OPCODE(op_catch);
1092 case op_jmp_scopes: {
1093 unsigned count = currentInstruction[1].u.operand;
1094 emitPutJITStubArgConstant(count, 1);
1095 emitCTICall(Interpreter::cti_op_jmp_scopes);
1096 unsigned target = currentInstruction[2].u.operand;
1097 addJump(jump(), target + 2);
1098 NEXT_OPCODE(op_jmp_scopes);
1100 case op_put_by_index: {
1101 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1102 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1103 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1104 emitCTICall(Interpreter::cti_op_put_by_index);
1105 NEXT_OPCODE(op_put_by_index);
1107 case op_switch_imm: {
1108 unsigned tableIndex = currentInstruction[1].u.operand;
1109 unsigned defaultOffset = currentInstruction[2].u.operand;
1110 unsigned scrutinee = currentInstruction[3].u.operand;
1112 // create jump table for switch destinations, track this switch statement.
1113 SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1114 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
1115 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1117 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1118 emitPutJITStubArgConstant(tableIndex, 2);
1119 emitCTICall(Interpreter::cti_op_switch_imm);
1121 NEXT_OPCODE(op_switch_imm);
1123 case op_switch_char: {
1124 unsigned tableIndex = currentInstruction[1].u.operand;
1125 unsigned defaultOffset = currentInstruction[2].u.operand;
1126 unsigned scrutinee = currentInstruction[3].u.operand;
1128 // create jump table for switch destinations, track this switch statement.
1129 SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1130 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
1131 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1133 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1134 emitPutJITStubArgConstant(tableIndex, 2);
1135 emitCTICall(Interpreter::cti_op_switch_char);
1137 NEXT_OPCODE(op_switch_char);
1139 case op_switch_string: {
1140 unsigned tableIndex = currentInstruction[1].u.operand;
1141 unsigned defaultOffset = currentInstruction[2].u.operand;
1142 unsigned scrutinee = currentInstruction[3].u.operand;
1144 // create jump table for switch destinations, track this switch statement.
1145 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1146 m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
1148 emitPutJITStubArgFromVirtualRegister(scrutinee, 1, X86::ecx);
1149 emitPutJITStubArgConstant(tableIndex, 2);
1150 emitCTICall(Interpreter::cti_op_switch_string);
1152 NEXT_OPCODE(op_switch_string);
1154 case op_del_by_val: {
1155 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1156 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1157 emitCTICall(Interpreter::cti_op_del_by_val);
1158 emitPutVirtualRegister(currentInstruction[1].u.operand);
1159 NEXT_OPCODE(op_del_by_val);
1161 case op_put_getter: {
1162 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1163 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1164 emitPutJITStubArgConstant(ident, 2);
1165 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1166 emitCTICall(Interpreter::cti_op_put_getter);
1167 NEXT_OPCODE(op_put_getter);
1169 case op_put_setter: {
1170 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::ecx);
1171 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
1172 emitPutJITStubArgConstant(ident, 2);
1173 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 3, X86::ecx);
1174 emitCTICall(Interpreter::cti_op_put_setter);
1175 NEXT_OPCODE(op_put_setter);
1177 case op_new_error: {
1178 JSValue* message = m_codeBlock->unexpectedConstant(currentInstruction[3].u.operand);
1179 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 1);
1180 emitPutJITStubArgConstant(message, 2);
1181 emitPutJITStubArgConstant(m_codeBlock->lineNumberForBytecodeOffset(m_bytecodeIndex), 3);
1182 emitCTICall(Interpreter::cti_op_new_error);
1183 emitPutVirtualRegister(currentInstruction[1].u.operand);
1184 NEXT_OPCODE(op_new_error);
1187 emitPutJITStubArgConstant(currentInstruction[1].u.operand, 1);
1188 emitPutJITStubArgConstant(currentInstruction[2].u.operand, 2);
1189 emitPutJITStubArgConstant(currentInstruction[3].u.operand, 3);
1190 emitCTICall(Interpreter::cti_op_debug);
1191 NEXT_OPCODE(op_debug);
1194 unsigned dst = currentInstruction[1].u.operand;
1195 unsigned src1 = currentInstruction[2].u.operand;
1197 emitGetVirtualRegister(src1, X86::eax);
1198 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1200 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1201 setnz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1203 Jump wasNotImmediate = jump();
1205 isImmediate.link(this);
1207 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1208 sete32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1210 wasNotImmediate.link(this);
1212 emitTagAsBoolImmediate(X86::eax);
1213 emitPutVirtualRegister(dst);
1215 NEXT_OPCODE(op_eq_null);
1218 unsigned dst = currentInstruction[1].u.operand;
1219 unsigned src1 = currentInstruction[2].u.operand;
1221 emitGetVirtualRegister(src1, X86::eax);
1222 Jump isImmediate = emitJumpIfNotJSCell(X86::eax);
1224 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
1225 setz32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), X86::eax);
1227 Jump wasNotImmediate = jump();
1229 isImmediate.link(this);
1231 and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), X86::eax);
1232 setne32(Imm32(JSImmediate::FullTagTypeNull), X86::eax);
1234 wasNotImmediate.link(this);
1236 emitTagAsBoolImmediate(X86::eax);
1237 emitPutVirtualRegister(dst);
1239 NEXT_OPCODE(op_neq_null);
1242 // Even though CTI doesn't use them, we initialize our constant
1243 // registers to zap stale pointers, to avoid unnecessarily prolonging
1244 // object lifetime and increasing GC pressure.
1245 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1246 for (size_t j = 0; j < count; ++j)
1247 emitInitRegister(j);
1249 NEXT_OPCODE(op_enter);
1251 case op_enter_with_activation: {
1252 // Even though CTI doesn't use them, we initialize our constant
1253 // registers to zap stale pointers, to avoid unnecessarily prolonging
1254 // object lifetime and increasing GC pressure.
1255 size_t count = m_codeBlock->m_numVars + m_codeBlock->numberOfConstantRegisters();
1256 for (size_t j = 0; j < count; ++j)
1257 emitInitRegister(j);
1259 emitCTICall(Interpreter::cti_op_push_activation);
1260 emitPutVirtualRegister(currentInstruction[1].u.operand);
1262 NEXT_OPCODE(op_enter_with_activation);
1264 case op_create_arguments: {
1265 if (m_codeBlock->m_numParameters == 1)
1266 emitCTICall(Interpreter::cti_op_create_arguments_no_params);
1268 emitCTICall(Interpreter::cti_op_create_arguments);
1269 NEXT_OPCODE(op_create_arguments);
1271 case op_convert_this: {
1272 emitGetVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1274 emitJumpSlowCaseIfNotJSCell(X86::eax);
1275 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::edx);
1276 addSlowCase(jnz32(Address(X86::edx, FIELD_OFFSET(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1278 NEXT_OPCODE(op_convert_this);
1280 case op_profile_will_call: {
1281 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1282 Jump noProfiler = jzPtr(Address(X86::eax));
1283 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1284 emitCTICall(Interpreter::cti_op_profile_will_call);
1285 noProfiler.link(this);
1287 NEXT_OPCODE(op_profile_will_call);
1289 case op_profile_did_call: {
1290 emitGetCTIParam(STUB_ARGS_profilerReference, X86::eax);
1291 Jump noProfiler = jzPtr(Address(X86::eax));
1292 emitPutJITStubArgFromVirtualRegister(currentInstruction[1].u.operand, 1, X86::eax);
1293 emitCTICall(Interpreter::cti_op_profile_did_call);
1294 noProfiler.link(this);
1296 NEXT_OPCODE(op_profile_did_call);
1298 case op_get_array_length:
1299 case op_get_by_id_chain:
1300 case op_get_by_id_generic:
1301 case op_get_by_id_proto:
1302 case op_get_by_id_proto_list:
1303 case op_get_by_id_self:
1304 case op_get_by_id_self_list:
1305 case op_get_string_length:
1306 case op_put_by_id_generic:
1307 case op_put_by_id_replace:
1308 case op_put_by_id_transition:
1309 ASSERT_NOT_REACHED();
1313 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1314 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1317 // reset this, in order to guard it's use with asserts
1318 m_bytecodeIndex = (unsigned)-1;
1323 void JIT::privateCompileLinkPass()
1325 unsigned jmpTableCount = m_jmpTable.size();
1326 for (unsigned i = 0; i < jmpTableCount; ++i)
1327 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeIndex], this);
1331 void JIT::privateCompileSlowCases()
1333 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
1334 unsigned propertyAccessInstructionIndex = 0;
1335 unsigned callLinkInfoIndex = 0;
1337 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
1338 // FIXME: enable peephole optimizations for slow cases when applicable
1339 killLastResultRegister();
1341 m_bytecodeIndex = iter->to;
1343 unsigned firstTo = m_bytecodeIndex;
1345 Instruction* currentInstruction = instructionsBegin + m_bytecodeIndex;
1347 switch (OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
1348 case op_convert_this: {
1351 emitPutJITStubArg(X86::eax, 1);
1352 emitCTICall(Interpreter::cti_op_convert_this);
1353 emitPutVirtualRegister(currentInstruction[1].u.operand);
1354 NEXT_OPCODE(op_convert_this);
1357 unsigned dst = currentInstruction[1].u.operand;
1358 unsigned src1 = currentInstruction[2].u.operand;
1359 unsigned src2 = currentInstruction[3].u.operand;
1360 if (JSValue* value = getConstantImmediateNumericArg(src1)) {
1361 Jump notImm = getSlowCase(iter);
1363 sub32(Imm32(getDeTaggedConstantImmediate(value)), X86::eax);
1365 emitPutJITStubArgFromVirtualRegister(src1, 1, X86::ecx);
1366 emitPutJITStubArg(X86::eax, 2);
1367 emitCTICall(Interpreter::cti_op_add);
1368 emitPutVirtualRegister(dst);
1369 } else if (JSValue* value = getConstantImmediateNumericArg(src2)) {
1370 Jump notImm = getSlowCase(iter);
1372 sub32(Imm32(getDeTaggedConstantImmediate(value)), X86::eax);
1374 emitPutJITStubArg(X86::eax, 1);
1375 emitPutJITStubArgFromVirtualRegister(src2, 2, X86::ecx);
1376 emitCTICall(Interpreter::cti_op_add);
1377 emitPutVirtualRegister(dst);
1379 OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
1380 ASSERT(types.first().mightBeNumber() && types.second().mightBeNumber());
1381 compileBinaryArithOpSlowCase(op_add, iter, dst, src1, src2, types);
1384 NEXT_OPCODE(op_add);
1386 case op_construct_verify: {
1389 emitGetVirtualRegister(currentInstruction[2].u.operand, X86::eax);
1390 emitPutVirtualRegister(currentInstruction[1].u.operand);
1392 NEXT_OPCODE(op_construct_verify);
1394 case op_get_by_val: {
1395 // The slow case that handles accesses to arrays (below) may jump back up to here.
1396 Label beginGetByValSlow(this);
1398 Jump notImm = getSlowCase(iter);
1401 emitFastArithIntToImmNoCheck(X86::edx);
1403 emitPutJITStubArg(X86::eax, 1);
1404 emitPutJITStubArg(X86::edx, 2);
1405 emitCTICall(Interpreter::cti_op_get_by_val);
1406 emitPutVirtualRegister(currentInstruction[1].u.operand);
1407 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
1409 // This is slow case that handles accesses to arrays above the fast cut-off.
1410 // First, check if this is an access to the vector
1412 jae32(X86::edx, Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_vectorLength)), beginGetByValSlow);
1414 // okay, missed the fast region, but it is still in the vector. Get the value.
1415 loadPtr(BaseIndex(X86::ecx, X86::edx, ScalePtr, FIELD_OFFSET(ArrayStorage, m_vector[0])), X86::ecx);
1416 // Check whether the value loaded is zero; if so we need to return undefined.
1417 jzPtr(X86::ecx, beginGetByValSlow);
1418 move(X86::ecx, X86::eax);
1419 emitPutVirtualRegister(currentInstruction[1].u.operand, X86::eax);
1421 NEXT_OPCODE(op_get_by_val);
1424 compileBinaryArithOpSlowCase(op_sub, iter, currentInstruction[1].u.operand, currentInstruction[2].u.operand, currentInstruction[3].u.operand, OperandTypes::fromInt(currentInstruction[4].u.operand));
1425 NEXT_OPCODE(op_sub);
1428 unsigned src2 = currentInstruction[3].u.operand;
1430 if (getConstantImmediateNumericArg(src2))
1431 emitPutJITStubArgFromVirtualRegister(src2, 2, X86::ecx);
1434 emitPutJITStubArg(X86::ecx, 2);
1437 emitPutJITStubArg(X86::eax, 1);
1438 emitCTICall(Interpreter::cti_op_rshift);
1439 emitPutVirtualRegister(currentInstruction[1].u.operand);
1440 NEXT_OPCODE(op_rshift);
1443 Jump notImm1 = getSlowCase(iter);
1444 Jump notImm2 = getSlowCase(iter);
1446 emitGetVirtualRegisters(currentInstruction[2].u.operand, X86::eax, currentInstruction[3].u.operand, X86::ecx);
1449 emitPutJITStubArg(X86::eax, 1);
1450 emitPutJITStubArg(X86::ecx, 2);
1451 emitCTICall(Interpreter::cti_op_lshift);
1452 emitPutVirtualRegister(currentInstruction[1].u.operand);
1453 NEXT_OPCODE(op_lshift);
1455 case op_loop_if_less: {
1456 unsigned target = currentInstruction[3].u.operand;
1457 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1460 emitPutJITStubArg(X86::eax, 1);
1461 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1462 emitCTICall(Interpreter::cti_op_loop_if_less);
1463 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1467 emitPutJITStubArg(X86::eax, 1);
1468 emitPutJITStubArg(X86::edx, 2);
1469 emitCTICall(Interpreter::cti_op_loop_if_less);
1470 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1472 NEXT_OPCODE(op_loop_if_less);
1474 case op_put_by_id: {
1475 compilePutByIdSlowCase(currentInstruction[1].u.operand, &(m_codeBlock->identifier(currentInstruction[2].u.operand)), currentInstruction[3].u.operand, iter, propertyAccessInstructionIndex++);
1476 NEXT_OPCODE(op_put_by_id);
1478 case op_get_by_id: {
1479 compileGetByIdSlowCase(currentInstruction[1].u.operand, currentInstruction[2].u.operand, &(m_codeBlock->identifier(currentInstruction[3].u.operand)), iter, propertyAccessInstructionIndex++);
1480 NEXT_OPCODE(op_get_by_id);
1482 case op_loop_if_lesseq: {
1483 unsigned target = currentInstruction[3].u.operand;
1484 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1487 emitPutJITStubArg(X86::eax, 1);
1488 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1489 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1490 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1494 emitPutJITStubArg(X86::eax, 1);
1495 emitPutJITStubArg(X86::edx, 2);
1496 emitCTICall(Interpreter::cti_op_loop_if_lesseq);
1497 emitJumpSlowToHot(jnz32(X86::eax), target + 3);
1499 NEXT_OPCODE(op_loop_if_lesseq);
1502 unsigned srcDst = currentInstruction[1].u.operand;
1503 Jump notImm = getSlowCase(iter);
1505 sub32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::eax);
1507 emitPutJITStubArg(X86::eax, 1);
1508 emitCTICall(Interpreter::cti_op_pre_inc);
1509 emitPutVirtualRegister(srcDst);
1510 NEXT_OPCODE(op_pre_inc);
1512 case op_put_by_val: {
1513 // Normal slow cases - either is not an immediate imm, or is an array.
1514 Jump notImm = getSlowCase(iter);
1517 emitFastArithIntToImmNoCheck(X86::edx);
1519 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1520 emitPutJITStubArg(X86::eax, 1);
1521 emitPutJITStubArg(X86::edx, 2);
1522 emitPutJITStubArg(X86::ecx, 3);
1523 emitCTICall(Interpreter::cti_op_put_by_val);
1524 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_put_by_val));
1526 // slow cases for immediate int accesses to arrays
1529 emitGetVirtualRegister(currentInstruction[3].u.operand, X86::ecx);
1530 emitPutJITStubArg(X86::eax, 1);
1531 emitPutJITStubArg(X86::edx, 2);
1532 emitPutJITStubArg(X86::ecx, 3);
1533 emitCTICall(Interpreter::cti_op_put_by_val_array);
1535 NEXT_OPCODE(op_put_by_val);
1537 case op_loop_if_true: {
1539 emitPutJITStubArg(X86::eax, 1);
1540 emitCTICall(Interpreter::cti_op_jtrue);
1541 unsigned target = currentInstruction[2].u.operand;
1542 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1543 NEXT_OPCODE(op_loop_if_true);
1546 unsigned srcDst = currentInstruction[1].u.operand;
1547 Jump notImm = getSlowCase(iter);
1549 add32(Imm32(getDeTaggedConstantImmediate(JSImmediate::oneImmediate())), X86::eax);
1551 emitPutJITStubArg(X86::eax, 1);
1552 emitCTICall(Interpreter::cti_op_pre_dec);
1553 emitPutVirtualRegister(srcDst);
1554 NEXT_OPCODE(op_pre_dec);
1557 unsigned target = currentInstruction[3].u.operand;
1558 JSValue* src2imm = getConstantImmediateNumericArg(currentInstruction[2].u.operand);
1561 emitPutJITStubArg(X86::edx, 1);
1562 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 2, X86::ecx);
1563 emitCTICall(Interpreter::cti_op_jless);
1564 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1568 emitPutJITStubArg(X86::eax, 1);
1569 emitPutJITStubArg(X86::edx, 2);
1570 emitCTICall(Interpreter::cti_op_jless);
1571 emitJumpSlowToHot(jz32(X86::eax), target + 3);
1573 NEXT_OPCODE(op_jnless);
1577 xorPtr(Imm32(JSImmediate::FullTagTypeBool), X86::eax);
1578 emitPutJITStubArg(X86::eax, 1);
1579 emitCTICall(Interpreter::cti_op_not);
1580 emitPutVirtualRegister(currentInstruction[1].u.operand);
1581 NEXT_OPCODE(op_not);
1585 emitPutJITStubArg(X86::eax, 1);
1586 emitCTICall(Interpreter::cti_op_jtrue);
1587 unsigned target = currentInstruction[2].u.operand;
1588 emitJumpSlowToHot(jz32(X86::eax), target + 2); // inverted!
1589 NEXT_OPCODE(op_jfalse);
1592 unsigned srcDst = currentInstruction[2].u.operand;
1595 emitPutJITStubArg(X86::eax, 1);
1596 emitCTICall(Interpreter::cti_op_post_inc);
1597 emitPutVirtualRegister(srcDst, X86::edx);
1598 emitPutVirtualRegister(currentInstruction[1].u.operand);
1599 NEXT_OPCODE(op_post_inc);
1603 emitPutJITStubArg(X86::eax, 1);
1604 emitCTICall(Interpreter::cti_op_bitnot);
1605 emitPutVirtualRegister(currentInstruction[1].u.operand);
1606 NEXT_OPCODE(op_bitnot);
1610 unsigned src1 = currentInstruction[2].u.operand;
1611 unsigned src2 = currentInstruction[3].u.operand;
1612 unsigned dst = currentInstruction[1].u.operand;
1613 if (getConstantImmediateNumericArg(src1)) {
1614 emitPutJITStubArgFromVirtualRegister(src1, 1, X86::ecx);
1615 emitPutJITStubArg(X86::eax, 2);
1616 emitCTICall(Interpreter::cti_op_bitand);
1617 emitPutVirtualRegister(dst);
1618 } else if (getConstantImmediateNumericArg(src2)) {
1619 emitPutJITStubArg(X86::eax, 1);
1620 emitPutJITStubArgFromVirtualRegister(src2, 2, X86::ecx);
1621 emitCTICall(Interpreter::cti_op_bitand);
1622 emitPutVirtualRegister(dst);
1624 emitPutJITStubArgFromVirtualRegister(src1, 1, X86::ecx);
1625 emitPutJITStubArg(X86::edx, 2);
1626 emitCTICall(Interpreter::cti_op_bitand);
1627 emitPutVirtualRegister(dst);
1629 NEXT_OPCODE(op_bitand);
1633 emitPutJITStubArg(X86::eax, 1);
1634 emitCTICall(Interpreter::cti_op_jtrue);
1635 unsigned target = currentInstruction[2].u.operand;
1636 emitJumpSlowToHot(jnz32(X86::eax), target + 2);
1637 NEXT_OPCODE(op_jtrue);
1640 unsigned srcDst = currentInstruction[2].u.operand;
1643 emitPutJITStubArg(X86::eax, 1);
1644 emitCTICall(Interpreter::cti_op_post_dec);
1645 emitPutVirtualRegister(srcDst, X86::edx);
1646 emitPutVirtualRegister(currentInstruction[1].u.operand);
1647 NEXT_OPCODE(op_post_dec);
1651 emitPutJITStubArg(X86::eax, 1);
1652 emitPutJITStubArg(X86::edx, 2);
1653 emitCTICall(Interpreter::cti_op_bitxor);
1654 emitPutVirtualRegister(currentInstruction[1].u.operand);
1655 NEXT_OPCODE(op_bitxor);
1659 emitPutJITStubArg(X86::eax, 1);
1660 emitPutJITStubArg(X86::edx, 2);
1661 emitCTICall(Interpreter::cti_op_bitor);
1662 emitPutVirtualRegister(currentInstruction[1].u.operand);
1663 NEXT_OPCODE(op_bitor);
1667 emitPutJITStubArg(X86::eax, 1);
1668 emitPutJITStubArg(X86::edx, 2);
1669 emitCTICall(Interpreter::cti_op_eq);
1670 emitPutVirtualRegister(currentInstruction[1].u.operand);
1675 emitPutJITStubArg(X86::eax, 1);
1676 emitPutJITStubArg(X86::edx, 2);
1677 emitCTICall(Interpreter::cti_op_neq);
1678 emitPutVirtualRegister(currentInstruction[1].u.operand);
1679 NEXT_OPCODE(op_neq);
1685 emitPutJITStubArg(X86::eax, 1);
1686 emitPutJITStubArg(X86::edx, 2);
1687 emitCTICall(Interpreter::cti_op_stricteq);
1688 emitPutVirtualRegister(currentInstruction[1].u.operand);
1689 NEXT_OPCODE(op_stricteq);
1691 case op_nstricteq: {
1695 emitPutJITStubArg(X86::eax, 1);
1696 emitPutJITStubArg(X86::edx, 2);
1697 emitCTICall(Interpreter::cti_op_nstricteq);
1698 emitPutVirtualRegister(currentInstruction[1].u.operand);
1699 NEXT_OPCODE(op_nstricteq);
1701 case op_instanceof: {
1705 emitPutJITStubArgFromVirtualRegister(currentInstruction[2].u.operand, 1, X86::ecx);
1706 emitPutJITStubArgFromVirtualRegister(currentInstruction[3].u.operand, 2, X86::ecx);
1707 emitPutJITStubArgFromVirtualRegister(currentInstruction[4].u.operand, 3, X86::ecx);
1708 emitCTICall(Interpreter::cti_op_instanceof);
1709 emitPutVirtualRegister(currentInstruction[1].u.operand);
1710 NEXT_OPCODE(op_instanceof);
1713 Jump notImm1 = getSlowCase(iter);
1714 Jump notImm2 = getSlowCase(iter);
1716 emitFastArithReTagImmediate(X86::eax);
1717 emitFastArithReTagImmediate(X86::ecx);
1720 emitPutJITStubArg(X86::eax, 1);
1721 emitPutJITStubArg(X86::ecx, 2);
1722 emitCTICall(Interpreter::cti_op_mod);
1723 emitPutVirtualRegister(currentInstruction[1].u.operand);
1724 NEXT_OPCODE(op_mod);
1727 int dst = currentInstruction[1].u.operand;
1728 int src1 = currentInstruction[2].u.operand;
1729 int src2 = currentInstruction[3].u.operand;
1730 JSValue* src1Value = getConstantImmediateNumericArg(src1);
1731 JSValue* src2Value = getConstantImmediateNumericArg(src2);
1733 if (src1Value && ((value = JSImmediate::intValue(src1Value)) > 0)) {
1736 // There is an extra slow case for (op1 * -N) or (-N * op2), to check for 0 since this should produce a result of -0.
1737 emitPutJITStubArgFromVirtualRegister(src1, 1, X86::ecx);
1738 emitPutJITStubArgFromVirtualRegister(src2, 2, X86::ecx);
1739 emitCTICall(Interpreter::cti_op_mul);
1740 emitPutVirtualRegister(dst);
1741 } else if (src2Value && ((value = JSImmediate::intValue(src2Value)) > 0)) {
1744 // There is an extra slow case for (op1 * -N) or (-N * op2), to check for 0 since this should produce a result of -0.
1745 emitPutJITStubArgFromVirtualRegister(src1, 1, X86::ecx);
1746 emitPutJITStubArgFromVirtualRegister(src2, 2, X86::ecx);
1747 emitCTICall(Interpreter::cti_op_mul);
1748 emitPutVirtualRegister(dst);
1750 compileBinaryArithOpSlowCase(op_mul, iter, dst, src1, src2, OperandTypes::fromInt(currentInstruction[4].u.operand));
1751 NEXT_OPCODE(op_mul);
1755 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1756 NEXT_OPCODE(op_call);
1758 case op_call_eval: {
1759 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1760 NEXT_OPCODE(op_call_eval);
1762 case op_construct: {
1763 compileOpCallSlowCase(currentInstruction, iter, callLinkInfoIndex++, opcodeID);
1764 NEXT_OPCODE(op_construct);
1766 case op_to_jsnumber: {
1767 linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1770 emitPutJITStubArg(X86::eax, 1);
1771 emitCTICall(Interpreter::cti_op_to_jsnumber);
1773 emitPutVirtualRegister(currentInstruction[1].u.operand);
1774 NEXT_OPCODE(op_to_jsnumber);
1778 ASSERT_NOT_REACHED();
1781 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
1782 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
1784 emitJumpSlowToHot(jump(), 0);
1787 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1788 ASSERT(propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
1790 ASSERT(callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
1793 // reset this, in order to guard it's use with asserts
1794 m_bytecodeIndex = (unsigned)-1;
1798 void JIT::privateCompile()
1800 #if ENABLE(CODEBLOCK_SAMPLING)
1801 storePtr(ImmPtr(m_codeBlock), m_interpreter->sampler()->codeBlockSlot());
1803 #if ENABLE(OPCODE_SAMPLING)
1804 store32(Imm32(m_interpreter->sampler()->encodeSample(m_codeBlock->instructions().begin())), m_interpreter->sampler()->sampleSlot());
1807 // Could use a pop_m, but would need to offset the following instruction if so.
1809 emitPutToCallFrameHeader(X86::ecx, RegisterFile::ReturnPC);
1811 Jump slowRegisterFileCheck;
1812 Label afterRegisterFileCheck;
1813 if (m_codeBlock->codeType() == FunctionCode) {
1814 // In the case of a fast linked call, we do not set this up in the caller.
1815 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
1817 emitGetCTIParam(STUB_ARGS_registerFile, X86::eax);
1818 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, X86::edx);
1820 slowRegisterFileCheck = jg32(X86::edx, Address(X86::eax, FIELD_OFFSET(RegisterFile, m_end)));
1821 afterRegisterFileCheck = label();
1824 privateCompileMainPass();
1825 privateCompileLinkPass();
1826 privateCompileSlowCases();
1828 if (m_codeBlock->codeType() == FunctionCode) {
1829 slowRegisterFileCheck.link(this);
1830 m_bytecodeIndex = 0; // emitCTICall will add to the map, but doesn't actually need this...
1831 emitCTICall(Interpreter::cti_register_file_check);
1833 // reset this, in order to guard it's use with asserts
1834 m_bytecodeIndex = (unsigned)-1;
1836 jump(afterRegisterFileCheck);
1839 ASSERT(m_jmpTable.isEmpty());
1841 RefPtr<ExecutablePool> allocator = m_globalData->poolForSize(m_assembler.size());
1842 void* code = m_assembler.executableCopy(allocator.get());
1843 JITCodeRef codeRef(code, allocator);
1845 PatchBuffer patchBuffer(code);
1847 // Translate vPC offsets into addresses in JIT generated code, for switch tables.
1848 for (unsigned i = 0; i < m_switches.size(); ++i) {
1849 SwitchRecord record = m_switches[i];
1850 unsigned bytecodeIndex = record.bytecodeIndex;
1852 if (record.type != SwitchRecord::String) {
1853 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
1854 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
1856 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1858 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
1859 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
1860 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
1863 ASSERT(record.type == SwitchRecord::String);
1865 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + record.defaultOffset]);
1867 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
1868 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
1869 unsigned offset = it->second.branchOffset;
1870 it->second.ctiOffset = offset ? patchBuffer.addressOf(m_labels[bytecodeIndex + 3 + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
1875 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
1876 HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
1877 handler.nativeCode = patchBuffer.addressOf(m_labels[handler.target]);
1880 m_codeBlock->pcVector().reserveCapacity(m_calls.size());
1881 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1883 patchBuffer.link(iter->from, iter->to);
1884 m_codeBlock->pcVector().append(PC(patchBuffer.addressOf(iter->from), iter->bytecodeIndex));
1887 // Link absolute addresses for jsr
1888 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
1889 patchBuffer.setPtr(iter->storeLocation, patchBuffer.addressOf(iter->target));
1891 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
1892 StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
1893 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1894 info.callReturnLocation = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
1895 info.hotPathBegin = patchBuffer.addressOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
1897 info.callReturnLocation = 0;
1898 info.hotPathBegin = 0;
1901 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
1902 CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
1903 #if ENABLE(JIT_OPTIMIZE_CALL)
1904 info.callReturnLocation = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].callReturnLocation);
1905 info.hotPathBegin = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
1906 info.hotPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].hotPathOther);
1907 info.coldPathOther = patchBuffer.addressOf(m_callStructureStubCompilationInfo[i].coldPathOther);
1909 info.callReturnLocation = 0;
1910 info.hotPathBegin = 0;
1911 info.hotPathOther = 0;
1912 info.coldPathOther = 0;
1916 m_codeBlock->setJITCode(codeRef);
1919 void JIT::privateCompileCTIMachineTrampolines()
1921 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
1922 // (1) The first function provides fast property access for array length
1923 Label arrayLengthBegin = align();
1925 // Check eax is an array
1926 Jump array_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1927 Jump array_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
1929 // Checks out okay! - get the length from the storage
1930 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::eax);
1931 load32(Address(X86::eax, FIELD_OFFSET(ArrayStorage, m_length)), X86::eax);
1933 Jump array_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1935 add32(X86::eax, X86::eax);
1936 add32(Imm32(1), X86::eax);
1940 // (2) The second function provides fast property access for string length
1941 Label stringLengthBegin = align();
1943 // Check eax is a string
1944 Jump string_failureCases1 = emitJumpIfNotJSCell(X86::eax);
1945 Jump string_failureCases2 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsStringVptr));
1947 // Checks out okay! - get the length from the Ustring.
1948 loadPtr(Address(X86::eax, FIELD_OFFSET(JSString, m_value) + FIELD_OFFSET(UString, m_rep)), X86::eax);
1949 load32(Address(X86::eax, FIELD_OFFSET(UString::Rep, len)), X86::eax);
1951 Jump string_failureCases3 = ja32(X86::eax, Imm32(JSImmediate::maxImmediateInt));
1953 add32(X86::eax, X86::eax);
1954 add32(Imm32(1), X86::eax);
1959 // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
1961 Label virtualCallPreLinkBegin = align();
1963 // Load the callee CodeBlock* into eax
1964 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
1965 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
1966 Jump hasCodeBlock1 = jnzPtr(X86::eax);
1968 restoreArgumentReference();
1969 Jump callJSFunction1 = call();
1970 emitGetJITStubArg(1, X86::ecx);
1971 emitGetJITStubArg(3, X86::edx);
1973 hasCodeBlock1.link(this);
1975 // Check argCount matches callee arity.
1976 Jump arityCheckOkay1 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
1978 emitPutJITStubArg(X86::ebx, 2);
1979 emitPutJITStubArg(X86::eax, 4);
1980 restoreArgumentReference();
1981 Jump callArityCheck1 = call();
1982 move(X86::edx, callFrameRegister);
1983 emitGetJITStubArg(1, X86::ecx);
1984 emitGetJITStubArg(3, X86::edx);
1986 arityCheckOkay1.link(this);
1988 compileOpCallInitializeCallFrame();
1991 emitPutJITStubArg(X86::ebx, 2);
1992 restoreArgumentReference();
1993 Jump callDontLazyLinkCall = call();
1998 Label virtualCallLinkBegin = align();
2000 // Load the callee CodeBlock* into eax
2001 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
2002 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
2003 Jump hasCodeBlock2 = jnzPtr(X86::eax);
2005 restoreArgumentReference();
2006 Jump callJSFunction2 = call();
2007 emitGetJITStubArg(1, X86::ecx);
2008 emitGetJITStubArg(3, X86::edx);
2010 hasCodeBlock2.link(this);
2012 // Check argCount matches callee arity.
2013 Jump arityCheckOkay2 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
2015 emitPutJITStubArg(X86::ebx, 2);
2016 emitPutJITStubArg(X86::eax, 4);
2017 restoreArgumentReference();
2018 Jump callArityCheck2 = call();
2019 move(X86::edx, callFrameRegister);
2020 emitGetJITStubArg(1, X86::ecx);
2021 emitGetJITStubArg(3, X86::edx);
2023 arityCheckOkay2.link(this);
2025 compileOpCallInitializeCallFrame();
2028 emitPutJITStubArg(X86::ebx, 2);
2029 restoreArgumentReference();
2030 Jump callLazyLinkCall = call();
2035 Label virtualCallBegin = align();
2037 // Load the callee CodeBlock* into eax
2038 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSFunction, m_body)), X86::eax);
2039 loadPtr(Address(X86::eax, FIELD_OFFSET(FunctionBodyNode, m_code)), X86::eax);
2040 Jump hasCodeBlock3 = jnzPtr(X86::eax);
2042 restoreArgumentReference();
2043 Jump callJSFunction3 = call();
2044 emitGetJITStubArg(1, X86::ecx);
2045 emitGetJITStubArg(3, X86::edx);
2047 hasCodeBlock3.link(this);
2049 // Check argCount matches callee arity.
2050 Jump arityCheckOkay3 = je32(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_numParameters)), X86::edx);
2052 emitPutJITStubArg(X86::ebx, 2);
2053 emitPutJITStubArg(X86::eax, 4);
2054 restoreArgumentReference();
2055 Jump callArityCheck3 = call();
2056 move(X86::edx, callFrameRegister);
2057 emitGetJITStubArg(1, X86::ecx);
2058 emitGetJITStubArg(3, X86::edx);
2060 arityCheckOkay3.link(this);
2062 compileOpCallInitializeCallFrame();
2064 // load ctiCode from the new codeBlock.
2065 loadPtr(Address(X86::eax, FIELD_OFFSET(CodeBlock, m_jitCode)), X86::eax);
2069 // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
2070 m_interpreter->m_executablePool = m_globalData->poolForSize(m_assembler.size());
2071 void* code = m_assembler.executableCopy(m_interpreter->m_executablePool.get());
2072 PatchBuffer patchBuffer(code);
2074 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
2075 patchBuffer.link(array_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
2076 patchBuffer.link(array_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
2077 patchBuffer.link(array_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
2078 patchBuffer.link(string_failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
2079 patchBuffer.link(string_failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
2080 patchBuffer.link(string_failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_string_fail));
2082 m_interpreter->m_ctiArrayLengthTrampoline = patchBuffer.addressOf(arrayLengthBegin);
2083 m_interpreter->m_ctiStringLengthTrampoline = patchBuffer.addressOf(stringLengthBegin);
2085 patchBuffer.link(callArityCheck1, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
2086 patchBuffer.link(callArityCheck2, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
2087 patchBuffer.link(callArityCheck3, reinterpret_cast<void*>(Interpreter::cti_op_call_arityCheck));
2088 patchBuffer.link(callJSFunction1, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
2089 patchBuffer.link(callJSFunction2, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
2090 patchBuffer.link(callJSFunction3, reinterpret_cast<void*>(Interpreter::cti_op_call_JSFunction));
2091 patchBuffer.link(callDontLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_dontLazyLinkCall));
2092 patchBuffer.link(callLazyLinkCall, reinterpret_cast<void*>(Interpreter::cti_vm_lazyLinkCall));
2094 m_interpreter->m_ctiVirtualCallPreLink = patchBuffer.addressOf(virtualCallPreLinkBegin);
2095 m_interpreter->m_ctiVirtualCallLink = patchBuffer.addressOf(virtualCallLinkBegin);
2096 m_interpreter->m_ctiVirtualCall = patchBuffer.addressOf(virtualCallBegin);
2099 void JIT::emitGetVariableObjectRegister(RegisterID variableObject, int index, RegisterID dst)
2101 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), dst);
2102 loadPtr(Address(dst, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), dst);
2103 loadPtr(Address(dst, index * sizeof(Register)), dst);
2106 void JIT::emitPutVariableObjectRegister(RegisterID src, RegisterID variableObject, int index)
2108 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject, d)), variableObject);
2109 loadPtr(Address(variableObject, FIELD_OFFSET(JSVariableObject::JSVariableObjectData, registers)), variableObject);
2110 storePtr(src, Address(variableObject, index * sizeof(Register)));
2115 #endif // ENABLE(JIT)