7461b364fe9c75e90390a82c41e12ea274670217
[WebKit-https.git] / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "JITInlineMethods.h"
33 #include "JITStubCall.h"
34 #include "JSArray.h"
35 #include "JSCell.h"
36 #include "JSFunction.h"
37 #include "JSPropertyNameIterator.h"
38 #include "LinkBuffer.h"
39
40 namespace JSC {
41
42 #if USE(JSVALUE64)
43
44 #define RECORD_JUMP_TARGET(targetOffset) \
45    do { m_labels[m_bytecodeOffset + (targetOffset)].used(); } while (false)
46
47 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
48 {
49 #if ENABLE(JIT_USE_SOFT_MODULO)
50     Label softModBegin = align();
51     softModulo();
52 #endif
53 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
54     // (2) The second function provides fast property access for string length
55     Label stringLengthBegin = align();
56
57     // Check eax is a string
58     Jump string_failureCases1 = emitJumpIfNotJSCell(regT0);
59     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
60
61     // Checks out okay! - get the length from the Ustring.
62     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
63
64     Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
65
66     // regT0 contains a 64 bit value (is positive, is zero extended) so we don't need sign extend here.
67     emitFastArithIntToImmNoCheck(regT0, regT0);
68     
69     ret();
70 #endif
71
72     // (3) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
73     COMPILE_ASSERT(sizeof(CodeType) == 4, CodeTypeEnumMustBe32Bit);
74
75     // VirtualCallLink Trampoline
76     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
77     JumpList callLinkFailures;
78     Label virtualCallLinkBegin = align();
79     compileOpCallInitializeCallFrame();
80     preserveReturnAddressAfterCall(regT3);
81     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
82     restoreArgumentReference();
83     Call callLazyLinkCall = call();
84     callLinkFailures.append(branchTestPtr(Zero, regT0));
85     restoreReturnAddressBeforeReturn(regT3);
86     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
87     jump(regT0);
88
89     // VirtualConstructLink Trampoline
90     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
91     Label virtualConstructLinkBegin = align();
92     compileOpCallInitializeCallFrame();
93     preserveReturnAddressAfterCall(regT3);
94     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
95     restoreArgumentReference();
96     Call callLazyLinkConstruct = call();
97     callLinkFailures.append(branchTestPtr(Zero, regT0));
98     restoreReturnAddressBeforeReturn(regT3);
99     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
100     jump(regT0);
101
102     // VirtualCall Trampoline
103     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
104     Label virtualCallBegin = align();
105     compileOpCallInitializeCallFrame();
106
107     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
108
109     Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), Imm32(0));
110     preserveReturnAddressAfterCall(regT3);
111     restoreArgumentReference();
112     Call callCompileCall = call();
113     callLinkFailures.append(branchTestPtr(Zero, regT0));
114     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
115     restoreReturnAddressBeforeReturn(regT3);
116     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
117     hasCodeBlock3.link(this);
118
119     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
120     jump(regT0);
121
122     // VirtualConstruct Trampoline
123     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
124     Label virtualConstructBegin = align();
125     compileOpCallInitializeCallFrame();
126
127     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
128
129     Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), Imm32(0));
130     preserveReturnAddressAfterCall(regT3);
131     restoreArgumentReference();
132     Call callCompileConstruct = call();
133     callLinkFailures.append(branchTestPtr(Zero, regT0));
134     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
135     restoreReturnAddressBeforeReturn(regT3);
136     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
137     hasCodeBlock4.link(this);
138
139     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
140     jump(regT0);
141     
142     // If the parser fails we want to be able to be able to keep going,
143     // So we handle this as a parse failure.
144     callLinkFailures.link(this);
145     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
146     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
147     restoreReturnAddressBeforeReturn(regT1);
148     move(ImmPtr(&globalData->exceptionLocation), regT2);
149     storePtr(regT1, regT2);
150     poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
151     poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
152     ret();
153
154     // NativeCall Trampoline
155     Label nativeCallThunk = privateCompileCTINativeCall(globalData);    
156     Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);    
157
158 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
159     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
160     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
161     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
162 #endif
163
164     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
165     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
166
167 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
168     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
169     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
170     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
171 #endif
172 #if ENABLE(JIT_OPTIMIZE_CALL)
173     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
174     patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
175 #endif
176     patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
177     patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
178
179     CodeRef finalCode = patchBuffer.finalizeCode();
180     *executablePool = finalCode.m_executablePool;
181
182     trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
183     trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
184     trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
185     trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
186     trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
187     trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
188 #if ENABLE(JIT_USE_SOFT_MODULO)
189     trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
190 #endif
191 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
192     trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
193 #endif
194 }
195
196 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
197 {
198     int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
199
200     Label nativeCallThunk = align();
201     
202     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
203
204 #if CPU(X86_64)
205     // Load caller frame's scope chain into this callframe so that whatever we call can
206     // get to its global data.
207     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
208     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
209     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
210
211     peek(regT1);
212     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
213
214     // Calling convention:      f(edi, esi, edx, ecx, ...);
215     // Host function signature: f(ExecState*);
216     move(callFrameRegister, X86Registers::edi);
217
218     subPtr(Imm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
219
220     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::esi);
221     loadPtr(Address(X86Registers::esi, OBJECT_OFFSETOF(JSFunction, m_executable)), X86Registers::r9);
222     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
223     call(Address(X86Registers::r9, executableOffsetToFunction));
224
225     addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
226
227 #elif CPU(ARM)
228     // Load caller frame's scope chain into this callframe so that whatever we call can
229     // get to its global data.
230     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
231     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
232     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
233
234     preserveReturnAddressAfterCall(regT3); // Callee preserved
235     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
236
237     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
238     // Host function signature: f(ExecState*);
239     move(callFrameRegister, ARMRegisters::r0);
240
241     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
242     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
243     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
244     call(Address(regT2, executableOffsetToFunction));
245
246     restoreReturnAddressBeforeReturn(regT3);
247
248 #elif CPU(MIPS)
249     // Load caller frame's scope chain into this callframe so that whatever we call can
250     // get to its global data.
251     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
252     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
253     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
254
255     preserveReturnAddressAfterCall(regT3); // Callee preserved
256     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
257
258     // Calling convention:      f(a0, a1, a2, a3);
259     // Host function signature: f(ExecState*);
260
261     // Allocate stack space for 16 bytes (8-byte aligned)
262     // 16 bytes (unused) for 4 arguments
263     subPtr(Imm32(16), stackPointerRegister);
264
265     // Setup arg0
266     move(callFrameRegister, MIPSRegisters::a0);
267
268     // Call
269     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
270     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
271     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
272     call(Address(regT2, executableOffsetToFunction));
273
274     // Restore stack space
275     addPtr(Imm32(16), stackPointerRegister);
276
277     restoreReturnAddressBeforeReturn(regT3);
278
279 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
280 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
281 #else
282     UNUSED_PARAM(executableOffsetToFunction);
283     breakpoint();
284 #endif
285
286     // Check for an exception
287     loadPtr(&(globalData->exception), regT2);
288     Jump exceptionHandler = branchTestPtr(NonZero, regT2);
289
290     // Return.
291     ret();
292
293     // Handle an exception
294     exceptionHandler.link(this);
295
296     // Grab the return address.
297     preserveReturnAddressAfterCall(regT1);
298
299     move(ImmPtr(&globalData->exceptionLocation), regT2);
300     storePtr(regT1, regT2);
301     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
302
303     // Set the return address.
304     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
305     restoreReturnAddressBeforeReturn(regT1);
306
307     ret();
308
309     return nativeCallThunk;
310 }
311
312 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool>, JSGlobalData* globalData, NativeFunction)
313 {
314     return globalData->jitStubs->ctiNativeCall();
315 }
316
317 void JIT::emit_op_mov(Instruction* currentInstruction)
318 {
319     int dst = currentInstruction[1].u.operand;
320     int src = currentInstruction[2].u.operand;
321
322     if (m_codeBlock->isConstantRegisterIndex(src)) {
323         storePtr(ImmPtr(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
324         if (dst == m_lastResultBytecodeRegister)
325             killLastResultRegister();
326     } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
327         // If either the src or dst is the cached register go though
328         // get/put registers to make sure we track this correctly.
329         emitGetVirtualRegister(src, regT0);
330         emitPutVirtualRegister(dst);
331     } else {
332         // Perform the copy via regT1; do not disturb any mapping in regT0.
333         loadPtr(Address(callFrameRegister, src * sizeof(Register)), regT1);
334         storePtr(regT1, Address(callFrameRegister, dst * sizeof(Register)));
335     }
336 }
337
338 void JIT::emit_op_end(Instruction* currentInstruction)
339 {
340     if (m_codeBlock->needsFullScopeChain())
341         JITStubCall(this, cti_op_end).call();
342
343     ASSERT(returnValueRegister != callFrameRegister);
344     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
345     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
346     ret();
347 }
348
349 void JIT::emit_op_jmp(Instruction* currentInstruction)
350 {
351     unsigned target = currentInstruction[1].u.operand;
352     addJump(jump(), target);
353     RECORD_JUMP_TARGET(target);
354 }
355
356 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
357 {
358     emitTimeoutCheck();
359
360     unsigned op1 = currentInstruction[1].u.operand;
361     unsigned op2 = currentInstruction[2].u.operand;
362     unsigned target = currentInstruction[3].u.operand;
363     if (isOperandConstantImmediateInt(op2)) {
364         emitGetVirtualRegister(op1, regT0);
365         emitJumpSlowCaseIfNotImmediateInteger(regT0);
366         int32_t op2imm = getConstantOperandImmediateInt(op2);
367         addJump(branch32(LessThanOrEqual, regT0, Imm32(op2imm)), target);
368     } else {
369         emitGetVirtualRegisters(op1, regT0, op2, regT1);
370         emitJumpSlowCaseIfNotImmediateInteger(regT0);
371         emitJumpSlowCaseIfNotImmediateInteger(regT1);
372         addJump(branch32(LessThanOrEqual, regT0, regT1), target);
373     }
374 }
375
376 void JIT::emit_op_new_object(Instruction* currentInstruction)
377 {
378     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
379 }
380
381 void JIT::emit_op_instanceof(Instruction* currentInstruction)
382 {
383     unsigned dst = currentInstruction[1].u.operand;
384     unsigned value = currentInstruction[2].u.operand;
385     unsigned baseVal = currentInstruction[3].u.operand;
386     unsigned proto = currentInstruction[4].u.operand;
387
388     // Load the operands (baseVal, proto, and value respectively) into registers.
389     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
390     emitGetVirtualRegister(value, regT2);
391     emitGetVirtualRegister(baseVal, regT0);
392     emitGetVirtualRegister(proto, regT1);
393
394     // Check that baseVal & proto are cells.
395     emitJumpSlowCaseIfNotJSCell(regT2, value);
396     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
397     emitJumpSlowCaseIfNotJSCell(regT1, proto);
398
399     // Check that prototype is an object
400     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT3);
401     addSlowCase(branch8(NotEqual, Address(regT3, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
402     
403     // Check that baseVal 'ImplementsDefaultHasInstance'.
404     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
405     addSlowCase(branchTest8(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
406
407     // Optimistically load the result true, and start looping.
408     // Initially, regT1 still contains proto and regT2 still contains value.
409     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
410     move(ImmPtr(JSValue::encode(jsBoolean(true))), regT0);
411     Label loop(this);
412
413     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
414     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
415     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
416     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
417     Jump isInstance = branchPtr(Equal, regT2, regT1);
418     emitJumpIfJSCell(regT2).linkTo(loop, this);
419
420     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
421     move(ImmPtr(JSValue::encode(jsBoolean(false))), regT0);
422
423     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
424     isInstance.link(this);
425     emitPutVirtualRegister(dst);
426 }
427
428 void JIT::emit_op_call(Instruction* currentInstruction)
429 {
430     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
431 }
432
433 void JIT::emit_op_call_eval(Instruction* currentInstruction)
434 {
435     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
436 }
437
438 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
439 {
440     compileOpCallVarargs(currentInstruction);
441 }
442
443 void JIT::emit_op_construct(Instruction* currentInstruction)
444 {
445     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
446 }
447
448 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
449 {
450     JSVariableObject* globalObject = m_codeBlock->globalObject();
451     move(ImmPtr(globalObject), regT0);
452     emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
453     emitPutVirtualRegister(currentInstruction[1].u.operand);
454 }
455
456 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
457 {
458     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
459     JSVariableObject* globalObject = m_codeBlock->globalObject();
460     move(ImmPtr(globalObject), regT0);
461     emitPutVariableObjectRegister(regT1, regT0, currentInstruction[1].u.operand);
462 }
463
464 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
465 {
466     int skip = currentInstruction[3].u.operand;
467
468     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
469     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
470     ASSERT(skip || !checkTopLevel);
471     if (checkTopLevel && skip--) {
472         Jump activationNotCreated;
473         if (checkTopLevel)
474             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
475         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
476         activationNotCreated.link(this);
477     }
478     while (skip--)
479         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
480
481     loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT0);
482     emitGetVariableObjectRegister(regT0, currentInstruction[2].u.operand, regT0);
483     emitPutVirtualRegister(currentInstruction[1].u.operand);
484 }
485
486 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
487 {
488     int skip = currentInstruction[2].u.operand;
489
490     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1);
491     emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
492     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
493     ASSERT(skip || !checkTopLevel);
494     if (checkTopLevel && skip--) {
495         Jump activationNotCreated;
496         if (checkTopLevel)
497             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
498         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
499         activationNotCreated.link(this);
500     }
501     while (skip--)
502         loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, next)), regT1);
503
504     loadPtr(Address(regT1, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
505     emitPutVariableObjectRegister(regT0, regT1, currentInstruction[1].u.operand);
506 }
507
508 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
509 {
510     unsigned activation = currentInstruction[1].u.operand;
511     unsigned arguments = currentInstruction[2].u.operand;
512     Jump activationCreated = branchTestPtr(NonZero, addressFor(activation));
513     Jump argumentsNotCreated = branchTestPtr(Zero, addressFor(arguments));
514     activationCreated.link(this);
515     JITStubCall stubCall(this, cti_op_tear_off_activation);
516     stubCall.addArgument(activation, regT2);
517     stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
518     stubCall.call();
519     argumentsNotCreated.link(this);
520 }
521
522 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
523 {
524     unsigned dst = currentInstruction[1].u.operand;
525
526     Jump argsNotCreated = branchTestPtr(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(dst))));
527     JITStubCall stubCall(this, cti_op_tear_off_arguments);
528     stubCall.addArgument(unmodifiedArgumentsRegister(dst), regT2);
529     stubCall.call();
530     argsNotCreated.link(this);
531 }
532
533 void JIT::emit_op_ret(Instruction* currentInstruction)
534 {
535     // We could JIT generate the deref, only calling out to C when the refcount hits zero.
536     if (m_codeBlock->needsFullScopeChain()) {
537         Jump activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
538         JITStubCall(this, cti_op_ret_scopeChain).call();
539         activationNotCreated.link(this);
540     }
541     ASSERT(callFrameRegister != regT1);
542     ASSERT(regT1 != returnValueRegister);
543     ASSERT(returnValueRegister != callFrameRegister);
544
545     // Return the result in %eax.
546     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
547
548     // Grab the return address.
549     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
550
551     // Restore our caller's "r".
552     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
553
554     // Return.
555     restoreReturnAddressBeforeReturn(regT1);
556     ret();
557 }
558
559 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
560 {
561     // We could JIT generate the deref, only calling out to C when the refcount hits zero.
562     if (m_codeBlock->needsFullScopeChain()) {
563         Jump activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
564         JITStubCall(this, cti_op_ret_scopeChain).call();
565         activationNotCreated.link(this);
566     }
567
568     ASSERT(callFrameRegister != regT1);
569     ASSERT(regT1 != returnValueRegister);
570     ASSERT(returnValueRegister != callFrameRegister);
571
572     // Return the result in %eax.
573     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
574     Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
575     loadPtr(Address(returnValueRegister, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
576     Jump notObject = branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType));
577
578     // Grab the return address.
579     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
580
581     // Restore our caller's "r".
582     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
583
584     // Return.
585     restoreReturnAddressBeforeReturn(regT1);
586     ret();
587
588     // Return 'this' in %eax.
589     notJSCell.link(this);
590     notObject.link(this);
591     emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
592
593     // Grab the return address.
594     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
595
596     // Restore our caller's "r".
597     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
598
599     // Return.
600     restoreReturnAddressBeforeReturn(regT1);
601     ret();
602 }
603
604 void JIT::emit_op_new_array(Instruction* currentInstruction)
605 {
606     JITStubCall stubCall(this, cti_op_new_array);
607     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
608     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
609     stubCall.call(currentInstruction[1].u.operand);
610 }
611
612 void JIT::emit_op_resolve(Instruction* currentInstruction)
613 {
614     JITStubCall stubCall(this, cti_op_resolve);
615     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
616     stubCall.call(currentInstruction[1].u.operand);
617 }
618
619 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
620 {
621     int dst = currentInstruction[1].u.operand;
622     int src = currentInstruction[2].u.operand;
623
624     emitGetVirtualRegister(src, regT0);
625     
626     Jump isImm = emitJumpIfNotJSCell(regT0);
627     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
628     isImm.link(this);
629
630     if (dst != src)
631         emitPutVirtualRegister(dst);
632
633 }
634
635 void JIT::emit_op_strcat(Instruction* currentInstruction)
636 {
637     JITStubCall stubCall(this, cti_op_strcat);
638     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
639     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
640     stubCall.call(currentInstruction[1].u.operand);
641 }
642
643 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
644 {
645     JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
646     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
647     stubCall.call(currentInstruction[1].u.operand);
648 }
649
650 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
651 {
652     JITStubCall stubCall(this, cti_op_ensure_property_exists);
653     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
654     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
655     stubCall.call(currentInstruction[1].u.operand);
656 }
657
658 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
659 {
660     JITStubCall stubCall(this, cti_op_resolve_skip);
661     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
662     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
663     stubCall.call(currentInstruction[1].u.operand);
664 }
665
666 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
667 {
668     // Fast case
669     void* globalObject = m_codeBlock->globalObject();
670     unsigned currentIndex = m_globalResolveInfoIndex++;
671     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
672     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
673
674     // Check Structure of global object
675     move(ImmPtr(globalObject), regT0);
676     loadPtr(structureAddress, regT1);
677     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)))); // Structures don't match
678
679     // Load cached property
680     // Assume that the global object always uses external storage.
681     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT0);
682     load32(offsetAddr, regT1);
683     loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
684     emitPutVirtualRegister(currentInstruction[1].u.operand);
685 }
686
687 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
688 {
689     unsigned dst = currentInstruction[1].u.operand;
690     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
691     
692     unsigned currentIndex = m_globalResolveInfoIndex++;
693     
694     linkSlowCase(iter);
695     JITStubCall stubCall(this, cti_op_resolve_global);
696     stubCall.addArgument(ImmPtr(ident));
697     stubCall.addArgument(Imm32(currentIndex));
698     stubCall.addArgument(regT0);
699     stubCall.call(dst);
700 }
701
702 void JIT::emit_op_not(Instruction* currentInstruction)
703 {
704     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
705     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
706     addSlowCase(branchTestPtr(NonZero, regT0, Imm32(static_cast<int32_t>(~JSImmediate::ExtendedPayloadBitBoolValue))));
707     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool | JSImmediate::ExtendedPayloadBitBoolValue)), regT0);
708     emitPutVirtualRegister(currentInstruction[1].u.operand);
709 }
710
711 void JIT::emit_op_jfalse(Instruction* currentInstruction)
712 {
713     unsigned target = currentInstruction[2].u.operand;
714     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
715
716     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(0)))), target);
717     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
718
719     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))), target);
720     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))));
721
722     isNonZero.link(this);
723     RECORD_JUMP_TARGET(target);
724 }
725
726 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
727 {
728     unsigned src = currentInstruction[1].u.operand;
729     unsigned target = currentInstruction[2].u.operand;
730
731     emitGetVirtualRegister(src, regT0);
732     Jump isImmediate = emitJumpIfNotJSCell(regT0);
733
734     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
735     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
736     addJump(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
737     Jump wasNotImmediate = jump();
738
739     // Now handle the immediate cases - undefined & null
740     isImmediate.link(this);
741     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
742     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNull()))), target);            
743
744     wasNotImmediate.link(this);
745     RECORD_JUMP_TARGET(target);
746 };
747 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
748 {
749     unsigned src = currentInstruction[1].u.operand;
750     unsigned target = currentInstruction[2].u.operand;
751
752     emitGetVirtualRegister(src, regT0);
753     Jump isImmediate = emitJumpIfNotJSCell(regT0);
754
755     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
756     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
757     addJump(branchTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
758     Jump wasNotImmediate = jump();
759
760     // Now handle the immediate cases - undefined & null
761     isImmediate.link(this);
762     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
763     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsNull()))), target);            
764
765     wasNotImmediate.link(this);
766     RECORD_JUMP_TARGET(target);
767 }
768
769 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
770 {
771     unsigned src = currentInstruction[1].u.operand;
772     JSCell* ptr = currentInstruction[2].u.jsCell;
773     unsigned target = currentInstruction[3].u.operand;
774     
775     emitGetVirtualRegister(src, regT0);
776     addJump(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(JSValue(ptr)))), target);            
777
778     RECORD_JUMP_TARGET(target);
779 }
780
781 void JIT::emit_op_jsr(Instruction* currentInstruction)
782 {
783     int retAddrDst = currentInstruction[1].u.operand;
784     int target = currentInstruction[2].u.operand;
785     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
786     addJump(jump(), target);
787     m_jsrSites.append(JSRInfo(storeLocation, label()));
788     killLastResultRegister();
789     RECORD_JUMP_TARGET(target);
790 }
791
792 void JIT::emit_op_sret(Instruction* currentInstruction)
793 {
794     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
795     killLastResultRegister();
796 }
797
798 void JIT::emit_op_eq(Instruction* currentInstruction)
799 {
800     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
801     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
802     set32(Equal, regT1, regT0, regT0);
803     emitTagAsBoolImmediate(regT0);
804     emitPutVirtualRegister(currentInstruction[1].u.operand);
805 }
806
807 void JIT::emit_op_bitnot(Instruction* currentInstruction)
808 {
809     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
810     emitJumpSlowCaseIfNotImmediateInteger(regT0);
811     not32(regT0);
812     emitFastArithIntToImmNoCheck(regT0, regT0);
813     emitPutVirtualRegister(currentInstruction[1].u.operand);
814 }
815
816 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
817 {
818     JITStubCall stubCall(this, cti_op_resolve_with_base);
819     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
820     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
821     stubCall.call(currentInstruction[2].u.operand);
822 }
823
824 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
825 {
826     JITStubCall stubCall(this, cti_op_new_func_exp);
827     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
828     stubCall.call(currentInstruction[1].u.operand);
829 }
830
831 void JIT::emit_op_jtrue(Instruction* currentInstruction)
832 {
833     unsigned target = currentInstruction[2].u.operand;
834     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
835
836     Jump isZero = branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsNumber(0))));
837     addJump(emitJumpIfImmediateInteger(regT0), target);
838
839     addJump(branchPtr(Equal, regT0, ImmPtr(JSValue::encode(jsBoolean(true)))), target);
840     addSlowCase(branchPtr(NotEqual, regT0, ImmPtr(JSValue::encode(jsBoolean(false)))));
841
842     isZero.link(this);
843     RECORD_JUMP_TARGET(target);
844 }
845
846 void JIT::emit_op_neq(Instruction* currentInstruction)
847 {
848     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
849     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
850     set32(NotEqual, regT1, regT0, regT0);
851     emitTagAsBoolImmediate(regT0);
852
853     emitPutVirtualRegister(currentInstruction[1].u.operand);
854
855 }
856
857 void JIT::emit_op_bitxor(Instruction* currentInstruction)
858 {
859     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
860     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
861     xorPtr(regT1, regT0);
862     emitFastArithReTagImmediate(regT0, regT0);
863     emitPutVirtualRegister(currentInstruction[1].u.operand);
864 }
865
866 void JIT::emit_op_bitor(Instruction* currentInstruction)
867 {
868     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
869     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
870     orPtr(regT1, regT0);
871     emitPutVirtualRegister(currentInstruction[1].u.operand);
872 }
873
874 void JIT::emit_op_throw(Instruction* currentInstruction)
875 {
876     JITStubCall stubCall(this, cti_op_throw);
877     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
878     stubCall.call();
879     ASSERT(regT0 == returnValueRegister);
880 #ifndef NDEBUG
881     // cti_op_throw always changes it's return address,
882     // this point in the code should never be reached.
883     breakpoint();
884 #endif
885 }
886
887 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
888 {
889     int dst = currentInstruction[1].u.operand;
890     int base = currentInstruction[2].u.operand;
891     int i = currentInstruction[3].u.operand;
892     int size = currentInstruction[4].u.operand;
893     int breakTarget = currentInstruction[5].u.operand;
894
895     JumpList isNotObject;
896
897     emitGetVirtualRegister(base, regT0);
898     if (!m_codeBlock->isKnownNotImmediate(base))
899         isNotObject.append(emitJumpIfNotJSCell(regT0));
900     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
901         loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
902         isNotObject.append(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
903     }
904
905     // We could inline the case where you have a valid cache, but
906     // this call doesn't seem to be hot.
907     Label isObject(this);
908     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
909     getPnamesStubCall.addArgument(regT0);
910     getPnamesStubCall.call(dst);
911     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
912     store32(Imm32(0), addressFor(i));
913     store32(regT3, addressFor(size));
914     Jump end = jump();
915
916     isNotObject.link(this);
917     move(regT0, regT1);
918     and32(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT1);
919     addJump(branch32(Equal, regT1, Imm32(JSImmediate::FullTagTypeNull)), breakTarget);
920
921     JITStubCall toObjectStubCall(this, cti_to_object);
922     toObjectStubCall.addArgument(regT0);
923     toObjectStubCall.call(base);
924     jump().linkTo(isObject, this);
925     
926     end.link(this);
927 }
928
929 void JIT::emit_op_next_pname(Instruction* currentInstruction)
930 {
931     int dst = currentInstruction[1].u.operand;
932     int base = currentInstruction[2].u.operand;
933     int i = currentInstruction[3].u.operand;
934     int size = currentInstruction[4].u.operand;
935     int it = currentInstruction[5].u.operand;
936     int target = currentInstruction[6].u.operand;
937     
938     JumpList callHasProperty;
939
940     Label begin(this);
941     load32(addressFor(i), regT0);
942     Jump end = branch32(Equal, regT0, addressFor(size));
943
944     // Grab key @ i
945     loadPtr(addressFor(it), regT1);
946     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
947
948     loadPtr(BaseIndex(regT2, regT0, TimesEight), regT2);
949
950     emitPutVirtualRegister(dst, regT2);
951
952     // Increment i
953     add32(Imm32(1), regT0);
954     store32(regT0, addressFor(i));
955
956     // Verify that i is valid:
957     emitGetVirtualRegister(base, regT0);
958
959     // Test base's structure
960     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
961     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
962
963     // Test base's prototype chain
964     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
965     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
966     addJump(branchTestPtr(Zero, Address(regT3)), target);
967
968     Label checkPrototype(this);
969     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
970     callHasProperty.append(emitJumpIfNotJSCell(regT2));
971     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
972     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
973     addPtr(Imm32(sizeof(Structure*)), regT3);
974     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
975
976     // Continue loop.
977     addJump(jump(), target);
978
979     // Slow case: Ask the object if i is valid.
980     callHasProperty.link(this);
981     emitGetVirtualRegister(dst, regT1);
982     JITStubCall stubCall(this, cti_has_property);
983     stubCall.addArgument(regT0);
984     stubCall.addArgument(regT1);
985     stubCall.call();
986
987     // Test for valid key.
988     addJump(branchTest32(NonZero, regT0), target);
989     jump().linkTo(begin, this);
990
991     // End of loop.
992     end.link(this);
993 }
994
995 void JIT::emit_op_push_scope(Instruction* currentInstruction)
996 {
997     JITStubCall stubCall(this, cti_op_push_scope);
998     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
999     stubCall.call(currentInstruction[1].u.operand);
1000 }
1001
1002 void JIT::emit_op_pop_scope(Instruction*)
1003 {
1004     JITStubCall(this, cti_op_pop_scope).call();
1005 }
1006
1007 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1008 {
1009     unsigned dst = currentInstruction[1].u.operand;
1010     unsigned src1 = currentInstruction[2].u.operand;
1011     unsigned src2 = currentInstruction[3].u.operand;
1012
1013     emitGetVirtualRegisters(src1, regT0, src2, regT1);
1014
1015     // Jump to a slow case if either operand is a number, or if both are JSCell*s.
1016     move(regT0, regT2);
1017     orPtr(regT1, regT2);
1018     addSlowCase(emitJumpIfJSCell(regT2));
1019     addSlowCase(emitJumpIfImmediateNumber(regT2));
1020
1021     if (type == OpStrictEq)
1022         set32(Equal, regT1, regT0, regT0);
1023     else
1024         set32(NotEqual, regT1, regT0, regT0);
1025     emitTagAsBoolImmediate(regT0);
1026
1027     emitPutVirtualRegister(dst);
1028 }
1029
1030 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1031 {
1032     compileOpStrictEq(currentInstruction, OpStrictEq);
1033 }
1034
1035 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1036 {
1037     compileOpStrictEq(currentInstruction, OpNStrictEq);
1038 }
1039
1040 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1041 {
1042     int srcVReg = currentInstruction[2].u.operand;
1043     emitGetVirtualRegister(srcVReg, regT0);
1044     
1045     Jump wasImmediate = emitJumpIfImmediateInteger(regT0);
1046
1047     emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
1048     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1049     addSlowCase(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
1050     
1051     wasImmediate.link(this);
1052
1053     emitPutVirtualRegister(currentInstruction[1].u.operand);
1054 }
1055
1056 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1057 {
1058     JITStubCall stubCall(this, cti_op_push_new_scope);
1059     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1060     stubCall.addArgument(currentInstruction[3].u.operand, regT2);
1061     stubCall.call(currentInstruction[1].u.operand);
1062 }
1063
1064 void JIT::emit_op_catch(Instruction* currentInstruction)
1065 {
1066     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
1067     move(regT0, callFrameRegister);
1068     peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, globalData) / sizeof(void*));
1069     loadPtr(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)), regT0);
1070     storePtr(ImmPtr(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception)));
1071     emitPutVirtualRegister(currentInstruction[1].u.operand);
1072 }
1073
1074 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1075 {
1076     JITStubCall stubCall(this, cti_op_jmp_scopes);
1077     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1078     stubCall.call();
1079     addJump(jump(), currentInstruction[2].u.operand);
1080     RECORD_JUMP_TARGET(currentInstruction[2].u.operand);
1081 }
1082
1083 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1084 {
1085     unsigned tableIndex = currentInstruction[1].u.operand;
1086     unsigned defaultOffset = currentInstruction[2].u.operand;
1087     unsigned scrutinee = currentInstruction[3].u.operand;
1088
1089     // create jump table for switch destinations, track this switch statement.
1090     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1091     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1092     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1093
1094     JITStubCall stubCall(this, cti_op_switch_imm);
1095     stubCall.addArgument(scrutinee, regT2);
1096     stubCall.addArgument(Imm32(tableIndex));
1097     stubCall.call();
1098     jump(regT0);
1099 }
1100
1101 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1102 {
1103     unsigned tableIndex = currentInstruction[1].u.operand;
1104     unsigned defaultOffset = currentInstruction[2].u.operand;
1105     unsigned scrutinee = currentInstruction[3].u.operand;
1106
1107     // create jump table for switch destinations, track this switch statement.
1108     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1109     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1110     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1111
1112     JITStubCall stubCall(this, cti_op_switch_char);
1113     stubCall.addArgument(scrutinee, regT2);
1114     stubCall.addArgument(Imm32(tableIndex));
1115     stubCall.call();
1116     jump(regT0);
1117 }
1118
1119 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1120 {
1121     unsigned tableIndex = currentInstruction[1].u.operand;
1122     unsigned defaultOffset = currentInstruction[2].u.operand;
1123     unsigned scrutinee = currentInstruction[3].u.operand;
1124
1125     // create jump table for switch destinations, track this switch statement.
1126     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1127     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1128
1129     JITStubCall stubCall(this, cti_op_switch_string);
1130     stubCall.addArgument(scrutinee, regT2);
1131     stubCall.addArgument(Imm32(tableIndex));
1132     stubCall.call();
1133     jump(regT0);
1134 }
1135
1136 void JIT::emit_op_new_error(Instruction* currentInstruction)
1137 {
1138     JITStubCall stubCall(this, cti_op_new_error);
1139     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1140     stubCall.addArgument(ImmPtr(JSValue::encode(m_codeBlock->getConstant(currentInstruction[3].u.operand))));
1141     stubCall.addArgument(Imm32(m_bytecodeOffset));
1142     stubCall.call(currentInstruction[1].u.operand);
1143 }
1144
1145 void JIT::emit_op_debug(Instruction* currentInstruction)
1146 {
1147 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1148     UNUSED_PARAM(currentInstruction);
1149     breakpoint();
1150 #else
1151     JITStubCall stubCall(this, cti_op_debug);
1152     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1153     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1154     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1155     stubCall.call();
1156 #endif
1157 }
1158
1159 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1160 {
1161     unsigned dst = currentInstruction[1].u.operand;
1162     unsigned src1 = currentInstruction[2].u.operand;
1163
1164     emitGetVirtualRegister(src1, regT0);
1165     Jump isImmediate = emitJumpIfNotJSCell(regT0);
1166
1167     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1168     setTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
1169
1170     Jump wasNotImmediate = jump();
1171
1172     isImmediate.link(this);
1173
1174     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
1175     setPtr(Equal, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
1176
1177     wasNotImmediate.link(this);
1178
1179     emitTagAsBoolImmediate(regT0);
1180     emitPutVirtualRegister(dst);
1181
1182 }
1183
1184 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1185 {
1186     unsigned dst = currentInstruction[1].u.operand;
1187     unsigned src1 = currentInstruction[2].u.operand;
1188
1189     emitGetVirtualRegister(src1, regT0);
1190     Jump isImmediate = emitJumpIfNotJSCell(regT0);
1191
1192     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1193     setTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
1194
1195     Jump wasNotImmediate = jump();
1196
1197     isImmediate.link(this);
1198
1199     andPtr(Imm32(~JSImmediate::ExtendedTagBitUndefined), regT0);
1200     setPtr(NotEqual, regT0, Imm32(JSImmediate::FullTagTypeNull), regT0);
1201
1202     wasNotImmediate.link(this);
1203
1204     emitTagAsBoolImmediate(regT0);
1205     emitPutVirtualRegister(dst);
1206 }
1207
1208 void JIT::emit_op_enter(Instruction*)
1209 {
1210     // Even though CTI doesn't use them, we initialize our constant
1211     // registers to zap stale pointers, to avoid unnecessarily prolonging
1212     // object lifetime and increasing GC pressure.
1213     size_t count = m_codeBlock->m_numVars;
1214     for (size_t j = 0; j < count; ++j)
1215         emitInitRegister(j);
1216
1217 }
1218
1219 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1220 {
1221     unsigned dst = currentInstruction[1].u.operand;
1222     
1223     Jump activationCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1224     JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
1225     emitPutVirtualRegister(dst);
1226     activationCreated.link(this);
1227 }
1228
1229 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1230 {
1231     unsigned dst = currentInstruction[1].u.operand;
1232
1233     Jump argsCreated = branchTestPtr(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
1234     if (m_codeBlock->m_numParameters == 1)
1235         JITStubCall(this, cti_op_create_arguments_no_params).call();
1236     else
1237         JITStubCall(this, cti_op_create_arguments).call();
1238     emitPutVirtualRegister(dst);
1239     emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
1240     argsCreated.link(this);
1241 }
1242
1243 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1244 {
1245     unsigned dst = currentInstruction[1].u.operand;
1246
1247     storePtr(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * dst));
1248 }
1249
1250 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1251 {
1252     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1253
1254     emitJumpSlowCaseIfNotJSCell(regT0);
1255     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1256     addSlowCase(branchTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1257 }
1258
1259 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1260 {
1261     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1262     Jump notNull = branchTestPtr(NonZero, regT0);
1263     move(ImmPtr(JSValue::encode(jsNull())), regT0);
1264     emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
1265     Jump setThis = jump();
1266     notNull.link(this);
1267     Jump isImmediate = emitJumpIfNotJSCell(regT0);
1268     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1269     Jump notAnObject = branch8(NotEqual, Address(regT3, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
1270     addSlowCase(branchTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1271     isImmediate.link(this);
1272     notAnObject.link(this);
1273     setThis.link(this);
1274 }
1275
1276 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1277 {
1278     unsigned result = currentInstruction[1].u.operand;
1279     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1280     emitPutVirtualRegister(result);
1281 }
1282
1283 void JIT::emit_op_create_this(Instruction* currentInstruction)
1284 {
1285     JITStubCall stubCall(this, cti_op_create_this);
1286     stubCall.addArgument(currentInstruction[2].u.operand, regT1);
1287     stubCall.call(currentInstruction[1].u.operand);
1288 }
1289
1290 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1291 {
1292     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1293     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1294
1295     JITStubCall stubCall(this, cti_op_profile_will_call);
1296     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1297     stubCall.call();
1298     noProfiler.link(this);
1299
1300 }
1301
1302 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1303 {
1304     peek(regT1, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1305     Jump noProfiler = branchTestPtr(Zero, Address(regT1));
1306
1307     JITStubCall stubCall(this, cti_op_profile_did_call);
1308     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
1309     stubCall.call();
1310     noProfiler.link(this);
1311 }
1312
1313
1314 // Slow cases
1315
1316 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1317 {
1318     linkSlowCase(iter);
1319     linkSlowCase(iter);
1320     JITStubCall stubCall(this, cti_op_convert_this);
1321     stubCall.addArgument(regT0);
1322     stubCall.call(currentInstruction[1].u.operand);
1323 }
1324
1325 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1326 {
1327     linkSlowCase(iter);
1328     JITStubCall stubCall(this, cti_op_convert_this_strict);
1329     stubCall.addArgument(regT0);
1330     stubCall.call(currentInstruction[1].u.operand);
1331 }
1332
1333 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1334 {
1335     linkSlowCase(iter);
1336
1337     JITStubCall stubCall(this, cti_op_to_primitive);
1338     stubCall.addArgument(regT0);
1339     stubCall.call(currentInstruction[1].u.operand);
1340 }
1341
1342 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1343 {
1344     unsigned op2 = currentInstruction[2].u.operand;
1345     unsigned target = currentInstruction[3].u.operand;
1346     if (isOperandConstantImmediateInt(op2)) {
1347         linkSlowCase(iter);
1348         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1349         stubCall.addArgument(regT0);
1350         stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1351         stubCall.call();
1352         emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1353     } else {
1354         linkSlowCase(iter);
1355         linkSlowCase(iter);
1356         JITStubCall stubCall(this, cti_op_loop_if_lesseq);
1357         stubCall.addArgument(regT0);
1358         stubCall.addArgument(regT1);
1359         stubCall.call();
1360         emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
1361     }
1362 }
1363
1364 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1365 {
1366     unsigned base = currentInstruction[1].u.operand;
1367     unsigned property = currentInstruction[2].u.operand;
1368     unsigned value = currentInstruction[3].u.operand;
1369
1370     linkSlowCase(iter); // property int32 check
1371     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1372     linkSlowCase(iter); // base not array check
1373     linkSlowCase(iter); // in vector check
1374
1375     JITStubCall stubPutByValCall(this, cti_op_put_by_val);
1376     stubPutByValCall.addArgument(regT0);
1377     stubPutByValCall.addArgument(property, regT2);
1378     stubPutByValCall.addArgument(value, regT2);
1379     stubPutByValCall.call();
1380 }
1381
1382 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1383 {
1384     linkSlowCase(iter);
1385     xorPtr(Imm32(static_cast<int32_t>(JSImmediate::FullTagTypeBool)), regT0);
1386     JITStubCall stubCall(this, cti_op_not);
1387     stubCall.addArgument(regT0);
1388     stubCall.call(currentInstruction[1].u.operand);
1389 }
1390
1391 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1392 {
1393     linkSlowCase(iter);
1394     JITStubCall stubCall(this, cti_op_jtrue);
1395     stubCall.addArgument(regT0);
1396     stubCall.call();
1397     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
1398 }
1399
1400 void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1401 {
1402     linkSlowCase(iter);
1403     JITStubCall stubCall(this, cti_op_bitnot);
1404     stubCall.addArgument(regT0);
1405     stubCall.call(currentInstruction[1].u.operand);
1406 }
1407
1408 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1409 {
1410     linkSlowCase(iter);
1411     JITStubCall stubCall(this, cti_op_jtrue);
1412     stubCall.addArgument(regT0);
1413     stubCall.call();
1414     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
1415 }
1416
1417 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1418 {
1419     linkSlowCase(iter);
1420     JITStubCall stubCall(this, cti_op_bitxor);
1421     stubCall.addArgument(regT0);
1422     stubCall.addArgument(regT1);
1423     stubCall.call(currentInstruction[1].u.operand);
1424 }
1425
1426 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1427 {
1428     linkSlowCase(iter);
1429     JITStubCall stubCall(this, cti_op_bitor);
1430     stubCall.addArgument(regT0);
1431     stubCall.addArgument(regT1);
1432     stubCall.call(currentInstruction[1].u.operand);
1433 }
1434
1435 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1436 {
1437     linkSlowCase(iter);
1438     JITStubCall stubCall(this, cti_op_eq);
1439     stubCall.addArgument(regT0);
1440     stubCall.addArgument(regT1);
1441     stubCall.call();
1442     emitTagAsBoolImmediate(regT0);
1443     emitPutVirtualRegister(currentInstruction[1].u.operand);
1444 }
1445
1446 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1447 {
1448     linkSlowCase(iter);
1449     JITStubCall stubCall(this, cti_op_eq);
1450     stubCall.addArgument(regT0);
1451     stubCall.addArgument(regT1);
1452     stubCall.call();
1453     xor32(Imm32(0x1), regT0);
1454     emitTagAsBoolImmediate(regT0);
1455     emitPutVirtualRegister(currentInstruction[1].u.operand);
1456 }
1457
1458 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1459 {
1460     linkSlowCase(iter);
1461     linkSlowCase(iter);
1462     JITStubCall stubCall(this, cti_op_stricteq);
1463     stubCall.addArgument(regT0);
1464     stubCall.addArgument(regT1);
1465     stubCall.call(currentInstruction[1].u.operand);
1466 }
1467
1468 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1469 {
1470     linkSlowCase(iter);
1471     linkSlowCase(iter);
1472     JITStubCall stubCall(this, cti_op_nstricteq);
1473     stubCall.addArgument(regT0);
1474     stubCall.addArgument(regT1);
1475     stubCall.call(currentInstruction[1].u.operand);
1476 }
1477
1478 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1479 {
1480     unsigned dst = currentInstruction[1].u.operand;
1481     unsigned value = currentInstruction[2].u.operand;
1482     unsigned baseVal = currentInstruction[3].u.operand;
1483     unsigned proto = currentInstruction[4].u.operand;
1484
1485     linkSlowCaseIfNotJSCell(iter, value);
1486     linkSlowCaseIfNotJSCell(iter, baseVal);
1487     linkSlowCaseIfNotJSCell(iter, proto);
1488     linkSlowCase(iter);
1489     linkSlowCase(iter);
1490     JITStubCall stubCall(this, cti_op_instanceof);
1491     stubCall.addArgument(value, regT2);
1492     stubCall.addArgument(baseVal, regT2);
1493     stubCall.addArgument(proto, regT2);
1494     stubCall.call(dst);
1495 }
1496
1497 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1498 {
1499     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
1500 }
1501
1502 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1503 {
1504     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
1505 }
1506
1507 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1508 {
1509     compileOpCallVarargsSlowCase(currentInstruction, iter);
1510 }
1511
1512 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1513 {
1514     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
1515 }
1516
1517 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1518 {
1519     linkSlowCaseIfNotJSCell(iter, currentInstruction[2].u.operand);
1520     linkSlowCase(iter);
1521
1522     JITStubCall stubCall(this, cti_op_to_jsnumber);
1523     stubCall.addArgument(regT0);
1524     stubCall.call(currentInstruction[1].u.operand);
1525 }
1526
1527 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1528 {
1529     int dst = currentInstruction[1].u.operand;
1530     int argumentsRegister = currentInstruction[2].u.operand;
1531     addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1532     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1533     sub32(Imm32(1), regT0);
1534     emitFastArithReTagImmediate(regT0, regT0);
1535     emitPutVirtualRegister(dst, regT0);
1536 }
1537
1538 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1539 {
1540     linkSlowCase(iter);
1541     unsigned dst = currentInstruction[1].u.operand;
1542     unsigned base = currentInstruction[2].u.operand;
1543     Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1544     
1545     emitGetVirtualRegister(base, regT0);
1546     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1547     stubCall.addArgument(regT0);
1548     stubCall.addArgument(ImmPtr(ident));
1549     stubCall.call(dst);
1550 }
1551
1552 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1553 {
1554     int dst = currentInstruction[1].u.operand;
1555     int argumentsRegister = currentInstruction[2].u.operand;
1556     int property = currentInstruction[3].u.operand;
1557     addSlowCase(branchTestPtr(NonZero, addressFor(argumentsRegister)));
1558     emitGetVirtualRegister(property, regT1);
1559     addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1560     add32(Imm32(1), regT1);
1561     // regT1 now contains the integer index of the argument we want, including this
1562     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT2);
1563     addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1564     
1565     Jump skipOutofLineParams;
1566     int numArgs = m_codeBlock->m_numParameters;
1567     if (numArgs) {
1568         Jump notInInPlaceArgs = branch32(AboveOrEqual, regT1, Imm32(numArgs));
1569         addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1570         loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1571         skipOutofLineParams = jump();
1572         notInInPlaceArgs.link(this);
1573     }
1574     
1575     addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT0);
1576     mul32(Imm32(sizeof(Register)), regT2, regT2);
1577     subPtr(regT2, regT0);
1578     loadPtr(BaseIndex(regT0, regT1, TimesEight, 0), regT0);
1579     if (numArgs)
1580         skipOutofLineParams.link(this);
1581     emitPutVirtualRegister(dst, regT0);
1582 }
1583
1584 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1585 {
1586     unsigned dst = currentInstruction[1].u.operand;
1587     unsigned arguments = currentInstruction[2].u.operand;
1588     unsigned property = currentInstruction[3].u.operand;
1589     
1590     linkSlowCase(iter);
1591     Jump skipArgumentsCreation = jump();
1592     
1593     linkSlowCase(iter);
1594     linkSlowCase(iter);
1595     if (m_codeBlock->m_numParameters == 1)
1596         JITStubCall(this, cti_op_create_arguments_no_params).call();
1597     else
1598         JITStubCall(this, cti_op_create_arguments).call();
1599     emitPutVirtualRegister(arguments);
1600     emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1601     
1602     skipArgumentsCreation.link(this);
1603     JITStubCall stubCall(this, cti_op_get_by_val);
1604     stubCall.addArgument(arguments, regT2);
1605     stubCall.addArgument(property, regT2);
1606     stubCall.call(dst);
1607 }
1608
1609 #endif // USE(JSVALUE64)
1610
1611 void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
1612 {
1613     int skip = currentInstruction[5].u.operand;
1614     
1615     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
1616     
1617     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1618     ASSERT(skip || !checkTopLevel);
1619     if (checkTopLevel && skip--) {
1620         Jump activationNotCreated;
1621         if (checkTopLevel)
1622             activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
1623         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1624         addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1625         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1626         activationNotCreated.link(this);
1627     }
1628     while (skip--) {
1629         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
1630         addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
1631         loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
1632     }
1633     emit_op_resolve_global(currentInstruction, true);
1634 }
1635
1636 void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1637 {
1638     unsigned dst = currentInstruction[1].u.operand;
1639     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
1640     int skip = currentInstruction[5].u.operand;
1641     while (skip--)
1642         linkSlowCase(iter);
1643     JITStubCall resolveStubCall(this, cti_op_resolve);
1644     resolveStubCall.addArgument(ImmPtr(ident));
1645     resolveStubCall.call(dst);
1646     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
1647     
1648     unsigned currentIndex = m_globalResolveInfoIndex++;
1649     
1650     linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
1651     JITStubCall stubCall(this, cti_op_resolve_global);
1652     stubCall.addArgument(ImmPtr(ident));
1653     stubCall.addArgument(Imm32(currentIndex));
1654     stubCall.addArgument(regT0);
1655     stubCall.call(dst);
1656 }
1657
1658 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1659 {
1660     JITStubCall stubCall(this, cti_op_new_regexp);
1661     stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1662     stubCall.call(currentInstruction[1].u.operand);
1663 }
1664
1665 void JIT::emit_op_load_varargs(Instruction* currentInstruction)
1666 {
1667     int argCountDst = currentInstruction[1].u.operand;
1668     int argsOffset = currentInstruction[2].u.operand;
1669     int registerOffset = currentInstruction[3].u.operand;
1670     ASSERT(argsOffset <= registerOffset);
1671     
1672     int expectedParams = m_codeBlock->m_numParameters - 1;
1673     // Don't do inline copying if we aren't guaranteed to have a single stream
1674     // of arguments
1675     if (expectedParams) {
1676         JITStubCall stubCall(this, cti_op_load_varargs);
1677         stubCall.addArgument(Imm32(argsOffset));
1678         stubCall.call();
1679         // Stores a naked int32 in the register file.
1680         store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1681         return;
1682     }
1683
1684 #if USE(JSVALUE32_64)
1685     addSlowCase(branch32(NotEqual, tagFor(argsOffset), Imm32(JSValue::EmptyValueTag)));
1686 #else
1687     addSlowCase(branchTestPtr(NonZero, addressFor(argsOffset)));
1688 #endif
1689     // Load arg count into regT0
1690     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1691     storePtr(regT0, addressFor(argCountDst));
1692     Jump endBranch = branch32(Equal, regT0, Imm32(1));
1693
1694     mul32(Imm32(sizeof(Register)), regT0, regT3);
1695     addPtr(Imm32(static_cast<unsigned>(sizeof(Register) - RegisterFile::CallFrameHeaderSize * sizeof(Register))), callFrameRegister, regT1);
1696     subPtr(regT3, regT1); // regT1 is now the start of the out of line arguments
1697     addPtr(Imm32(argsOffset * sizeof(Register)), callFrameRegister, regT2); // regT2 is the target buffer
1698     
1699     // Bounds check the registerfile
1700     addPtr(regT2, regT3);
1701     addPtr(Imm32((registerOffset - argsOffset) * sizeof(Register)), regT3);
1702     addSlowCase(branchPtr(Below, AbsoluteAddress(&m_globalData->interpreter->registerFile().m_end), regT3));
1703
1704     sub32(Imm32(1), regT0);
1705     Label loopStart = label();
1706     loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(0 - 2 * sizeof(Register))), regT3);
1707     storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(0 - sizeof(Register))));
1708 #if USE(JSVALUE32_64)
1709     loadPtr(BaseIndex(regT1, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - 2 * sizeof(Register))), regT3);
1710     storePtr(regT3, BaseIndex(regT2, regT0, TimesEight, static_cast<unsigned>(sizeof(void*) - sizeof(Register))));
1711 #endif
1712     branchSubPtr(NonZero, Imm32(1), regT0).linkTo(loopStart, this);
1713     endBranch.link(this);
1714 }
1715
1716 void JIT::emitSlow_op_load_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1717 {
1718     int argCountDst = currentInstruction[1].u.operand;
1719     int argsOffset = currentInstruction[2].u.operand;
1720     int expectedParams = m_codeBlock->m_numParameters - 1;
1721     if (expectedParams)
1722         return;
1723     
1724     linkSlowCase(iter);
1725     linkSlowCase(iter);
1726     JITStubCall stubCall(this, cti_op_load_varargs);
1727     stubCall.addArgument(Imm32(argsOffset));
1728     stubCall.call();
1729     // Stores a naked int32 in the register file.
1730     store32(returnValueRegister, Address(callFrameRegister, argCountDst * sizeof(Register)));
1731 }
1732
1733 void JIT::emit_op_new_func(Instruction* currentInstruction)
1734 {
1735     Jump lazyJump;
1736     int dst = currentInstruction[1].u.operand;
1737     if (currentInstruction[3].u.operand) {
1738 #if USE(JSVALUE32_64)
1739         lazyJump = branch32(NotEqual, tagFor(dst), Imm32(JSValue::EmptyValueTag));
1740 #else
1741         lazyJump = branchTestPtr(NonZero, addressFor(dst));
1742 #endif
1743     }
1744     JITStubCall stubCall(this, cti_op_new_func);
1745     stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1746     stubCall.call(currentInstruction[1].u.operand);
1747     if (currentInstruction[3].u.operand)
1748         lazyJump.link(this);
1749 }
1750
1751 // For both JSValue32_64 and JSValue32
1752 #if ENABLE(JIT_USE_SOFT_MODULO)
1753 #if CPU(ARM_TRADITIONAL)
1754 void JIT::softModulo()
1755 {
1756     push(regS0);
1757     push(regS1);
1758     push(regT1);
1759     push(regT3);
1760 #if USE(JSVALUE32_64)
1761     m_assembler.mov_r(regT3, regT2);
1762     m_assembler.mov_r(regT2, regT0);
1763 #else
1764     m_assembler.mov_r(regT3, m_assembler.asr(regT2, 1));
1765     m_assembler.mov_r(regT2, m_assembler.asr(regT0, 1));
1766 #endif
1767     m_assembler.mov_r(regT1, ARMAssembler::getOp2(0));
1768     
1769     m_assembler.teq_r(regT3, ARMAssembler::getOp2(0));
1770     m_assembler.rsb_r(regT3, regT3, ARMAssembler::getOp2(0), ARMAssembler::MI);
1771     m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(1), ARMAssembler::MI);
1772     
1773     m_assembler.teq_r(regT2, ARMAssembler::getOp2(0));
1774     m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::MI);
1775     m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(2), ARMAssembler::MI);
1776     
1777     Jump exitBranch = branch32(LessThan, regT2, regT3);
1778
1779     m_assembler.sub_r(regS1, regT3, ARMAssembler::getOp2(1));
1780     m_assembler.tst_r(regS1, regT3);
1781     m_assembler.and_r(regT2, regT2, regS1, ARMAssembler::EQ);
1782     m_assembler.and_r(regT0, regS1, regT3);
1783     Jump exitBranch2 = branchTest32(Zero, regT0);
1784     
1785     m_assembler.clz_r(regS1, regT2);
1786     m_assembler.clz_r(regS0, regT3);
1787     m_assembler.sub_r(regS0, regS0, regS1);
1788
1789     m_assembler.rsbs_r(regS0, regS0, ARMAssembler::getOp2(31));
1790
1791     m_assembler.mov_r(regS0, m_assembler.lsl(regS0, 1), ARMAssembler::NE);
1792
1793     m_assembler.add_r(ARMRegisters::pc, ARMRegisters::pc, m_assembler.lsl(regS0, 2), ARMAssembler::NE);
1794     m_assembler.mov_r(regT0, regT0);
1795     
1796     for (int i = 31; i > 0; --i) {
1797         m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
1798         m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
1799     }
1800
1801     m_assembler.cmp_r(regT2, regT3);
1802     m_assembler.sub_r(regT2, regT2, regT3, ARMAssembler::CS);
1803     
1804     exitBranch.link(this);
1805     exitBranch2.link(this);
1806     
1807     m_assembler.teq_r(regT1, ARMAssembler::getOp2(0));
1808     m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::GT);
1809     
1810 #if USE(JSVALUE32_64)
1811     m_assembler.mov_r(regT0, regT2);
1812 #else
1813     m_assembler.mov_r(regT0, m_assembler.lsl(regT2, 1));
1814     m_assembler.eor_r(regT0, regT0, ARMAssembler::getOp2(1));
1815 #endif
1816     pop(regT3);
1817     pop(regT1);
1818     pop(regS1);
1819     pop(regS0);
1820     ret();
1821 }
1822 #else
1823 #error "JIT_OPTIMIZE_MOD not yet supported on this platform."
1824 #endif // CPU(ARM_TRADITIONAL)
1825 #endif
1826 } // namespace JSC
1827
1828 #endif // ENABLE(JIT)