0a3d69d7744853d937a3fd0e1b348d1d64d7ab40
[WebKit-https.git] / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
40
41 namespace JSC {
42
43 void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45 #if ENABLE(JIT_USE_SOFT_MODULO)
46     Label softModBegin = align();
47     softModulo();
48 #endif
49 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
50     // (1) This function provides fast property access for string length
51     Label stringLengthBegin = align();
52
53     // regT0 holds payload, regT1 holds tag
54
55     Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
56     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
57
58     // Checks out okay! - get the length from the Ustring.
59     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
60
61     Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
62     move(regT2, regT0);
63     move(Imm32(JSValue::Int32Tag), regT1);
64
65     ret();
66 #endif
67     
68     JumpList callLinkFailures;
69     // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
70 #if ENABLE(JIT_OPTIMIZE_CALL)
71     // VirtualCallLink Trampoline
72     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
73     Label virtualCallLinkBegin = align();
74     compileOpCallInitializeCallFrame();
75     preserveReturnAddressAfterCall(regT3);
76     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
77     restoreArgumentReference();
78     Call callLazyLinkCall = call();
79     callLinkFailures.append(branchTestPtr(Zero, regT0));
80     restoreReturnAddressBeforeReturn(regT3);
81     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
82     jump(regT0);
83
84     // VirtualConstructLink Trampoline
85     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
86     Label virtualConstructLinkBegin = align();
87     compileOpCallInitializeCallFrame();
88     preserveReturnAddressAfterCall(regT3);
89     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
90     restoreArgumentReference();
91     Call callLazyLinkConstruct = call();
92     restoreReturnAddressBeforeReturn(regT3);
93     callLinkFailures.append(branchTestPtr(Zero, regT0));
94     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
95     jump(regT0);
96
97 #endif // ENABLE(JIT_OPTIMIZE_CALL)
98
99     // VirtualCall Trampoline
100     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
101     Label virtualCallBegin = align();
102     compileOpCallInitializeCallFrame();
103
104     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
105
106     Jump hasCodeBlock3 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), Imm32(0));
107     preserveReturnAddressAfterCall(regT3);
108     restoreArgumentReference();
109     Call callCompileCall = call();
110     callLinkFailures.append(branchTestPtr(Zero, regT0));
111     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
112     restoreReturnAddressBeforeReturn(regT3);
113     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
114     hasCodeBlock3.link(this);
115
116     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
117     jump(regT0);
118
119     // VirtualConstruct Trampoline
120     // regT0 holds callee, regT1 holds argCount.  regT2 will hold the FunctionExecutable.
121     Label virtualConstructBegin = align();
122     compileOpCallInitializeCallFrame();
123
124     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
125
126     Jump hasCodeBlock4 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), Imm32(0));
127     preserveReturnAddressAfterCall(regT3);
128     restoreArgumentReference();
129     Call callCompileCconstruct = call();
130     callLinkFailures.append(branchTestPtr(Zero, regT0));
131     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
132     restoreReturnAddressBeforeReturn(regT3);
133     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
134     hasCodeBlock4.link(this);
135
136     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
137     jump(regT0);
138     
139     // If the parser fails we want to be able to be able to keep going,
140     // So we handle this as a parse failure.
141     callLinkFailures.link(this);
142     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
143     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
144     restoreReturnAddressBeforeReturn(regT1);
145     move(ImmPtr(&globalData->exceptionLocation), regT2);
146     storePtr(regT1, regT2);
147     poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
148     poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
149     ret();
150
151     // NativeCall Trampoline
152     Label nativeCallThunk = privateCompileCTINativeCall(globalData);    
153     Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);    
154
155 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
156     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
157     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
158     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
159 #endif
160
161     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
162     LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
163
164 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
165     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
166     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
167     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
168 #endif
169 #if ENABLE(JIT_OPTIMIZE_CALL)
170     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
171     patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
172 #endif
173     patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
174     patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
175
176     CodeRef finalCode = patchBuffer.finalizeCode();
177     *executablePool = finalCode.m_executablePool;
178
179     trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
180     trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
181     trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
182     trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
183 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
184     trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
185 #endif
186 #if ENABLE(JIT_OPTIMIZE_CALL)
187     trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
188     trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
189 #endif
190 #if ENABLE(JIT_USE_SOFT_MODULO)
191     trampolines->ctiSoftModulo = patchBuffer.trampolineAt(softModBegin);
192 #endif
193 }
194
195 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
196 {
197     int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
198
199     Label nativeCallThunk = align();
200
201     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
202
203 #if CPU(X86)
204     // Load caller frame's scope chain into this callframe so that whatever we call can
205     // get to its global data.
206     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
207     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
208     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
209
210     peek(regT1);
211     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
212
213     // Calling convention:      f(ecx, edx, ...);
214     // Host function signature: f(ExecState*);
215     move(callFrameRegister, X86Registers::ecx);
216
217     subPtr(Imm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
218
219     // call the function
220     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
221     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
222     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
223     call(Address(regT1, executableOffsetToFunction));
224
225     addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
226
227 #elif CPU(ARM)
228     // Load caller frame's scope chain into this callframe so that whatever we call can
229     // get to its global data.
230     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
231     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
232     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
233
234     preserveReturnAddressAfterCall(regT3); // Callee preserved
235     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
236
237     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
238     // Host function signature: f(ExecState*);
239     move(callFrameRegister, ARMRegisters::r0);
240
241     // call the function
242     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
243     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
244     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
245     call(Address(regT2, executableOffsetToFunction));
246
247     restoreReturnAddressBeforeReturn(regT3);
248
249 #elif CPU(MIPS)
250     // Load caller frame's scope chain into this callframe so that whatever we call can
251     // get to its global data.
252     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
253     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
254     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
255
256     preserveReturnAddressAfterCall(regT3); // Callee preserved
257     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
258
259     // Calling convention:      f(a0, a1, a2, a3);
260     // Host function signature: f(ExecState*);
261
262     // Allocate stack space for 16 bytes (8-byte aligned)
263     // 16 bytes (unused) for 4 arguments
264     subPtr(Imm32(16), stackPointerRegister);
265
266     // Setup arg0
267     move(callFrameRegister, MIPSRegisters::a0);
268
269     // Call
270     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
271     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
272     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
273     call(Address(regT2, executableOffsetToFunction));
274
275     // Restore stack space
276     addPtr(Imm32(16), stackPointerRegister);
277
278     restoreReturnAddressBeforeReturn(regT3);
279
280 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
281 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
282 #else
283     UNUSED_PARAM(executableOffsetToFunction);
284     breakpoint();
285 #endif // CPU(X86)
286
287     // Check for an exception
288     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::EmptyValueTag));
289
290     // Return.
291     ret();
292
293     // Handle an exception
294     sawException.link(this);
295
296     // Grab the return address.
297     preserveReturnAddressAfterCall(regT1);
298
299     move(ImmPtr(&globalData->exceptionLocation), regT2);
300     storePtr(regT1, regT2);
301     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
302
303     // Set the return address.
304     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
305     restoreReturnAddressBeforeReturn(regT1);
306
307     ret();
308
309     return nativeCallThunk;
310 }
311
312 JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* globalData, NativeFunction func)
313 {
314     Call nativeCall;
315     Label nativeCallThunk = align();
316
317     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
318
319 #if CPU(X86)
320     // Load caller frame's scope chain into this callframe so that whatever we call can
321     // get to its global data.
322     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
323     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
324     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
325
326     peek(regT1);
327     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
328
329     // Calling convention:      f(ecx, edx, ...);
330     // Host function signature: f(ExecState*);
331     move(callFrameRegister, X86Registers::ecx);
332
333     subPtr(Imm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
334
335     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
336
337     // call the function
338     nativeCall = call();
339
340     addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
341
342 #elif CPU(ARM)
343     // Load caller frame's scope chain into this callframe so that whatever we call can
344     // get to its global data.
345     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
346     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
347     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
348
349     preserveReturnAddressAfterCall(regT3); // Callee preserved
350     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
351
352     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
353     // Host function signature: f(ExecState*);
354     move(callFrameRegister, ARMRegisters::r0);
355
356     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
357     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
358     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
359
360     // call the function
361     nativeCall = call();
362
363     restoreReturnAddressBeforeReturn(regT3);
364
365 #elif CPU(MIPS)
366     // Load caller frame's scope chain into this callframe so that whatever we call can
367     // get to its global data.
368     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
369     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
370     emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
371
372     preserveReturnAddressAfterCall(regT3); // Callee preserved
373     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
374
375     // Calling convention:      f(a0, a1, a2, a3);
376     // Host function signature: f(ExecState*);
377
378     // Allocate stack space for 16 bytes (8-byte aligned)
379     // 16 bytes (unused) for 4 arguments
380     subPtr(Imm32(16), stackPointerRegister);
381
382     // Setup arg0
383     move(callFrameRegister, MIPSRegisters::a0);
384
385     // Call
386     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
387     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
388     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
389     
390     // call the function
391     nativeCall = call();
392
393     // Restore stack space
394     addPtr(Imm32(16), stackPointerRegister);
395
396     restoreReturnAddressBeforeReturn(regT3);
397
398 #elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
399 #error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
400 #else
401     breakpoint();
402 #endif // CPU(X86)
403
404     // Check for an exception
405     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::EmptyValueTag));
406
407     // Return.
408     ret();
409
410     // Handle an exception
411     sawException.link(this);
412
413     // Grab the return address.
414     preserveReturnAddressAfterCall(regT1);
415
416     move(ImmPtr(&globalData->exceptionLocation), regT2);
417     storePtr(regT1, regT2);
418     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
419
420     // Set the return address.
421     move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
422     restoreReturnAddressBeforeReturn(regT1);
423
424     ret();
425
426     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
427     LinkBuffer patchBuffer(this, executablePool, 0);
428
429     patchBuffer.link(nativeCall, FunctionPtr(func));
430     patchBuffer.finalizeCode();
431
432     return patchBuffer.trampolineAt(nativeCallThunk);
433 }
434
435 void JIT::emit_op_mov(Instruction* currentInstruction)
436 {
437     unsigned dst = currentInstruction[1].u.operand;
438     unsigned src = currentInstruction[2].u.operand;
439
440     if (m_codeBlock->isConstantRegisterIndex(src))
441         emitStore(dst, getConstantOperand(src));
442     else {
443         emitLoad(src, regT1, regT0);
444         emitStore(dst, regT1, regT0);
445         map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
446     }
447 }
448
449 void JIT::emit_op_end(Instruction* currentInstruction)
450 {
451     if (m_codeBlock->needsFullScopeChain())
452         JITStubCall(this, cti_op_end).call();
453     ASSERT(returnValueRegister != callFrameRegister);
454     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
455     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
456     ret();
457 }
458
459 void JIT::emit_op_jmp(Instruction* currentInstruction)
460 {
461     unsigned target = currentInstruction[1].u.operand;
462     addJump(jump(), target);
463 }
464
465 void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
466 {
467     unsigned op1 = currentInstruction[1].u.operand;
468     unsigned op2 = currentInstruction[2].u.operand;
469     unsigned target = currentInstruction[3].u.operand;
470
471     emitTimeoutCheck();
472
473     if (isOperandConstantImmediateInt(op1)) {
474         emitLoad(op2, regT1, regT0);
475         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
476         addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
477         return;
478     }
479
480     if (isOperandConstantImmediateInt(op2)) {
481         emitLoad(op1, regT1, regT0);
482         addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
483         addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
484         return;
485     }
486
487     emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
488     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
489     addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
490     addJump(branch32(LessThanOrEqual, regT0, regT2), target);
491 }
492
493 void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
494 {
495     unsigned op1 = currentInstruction[1].u.operand;
496     unsigned op2 = currentInstruction[2].u.operand;
497     unsigned target = currentInstruction[3].u.operand;
498
499     if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
500         linkSlowCase(iter); // int32 check
501     linkSlowCase(iter); // int32 check
502
503     JITStubCall stubCall(this, cti_op_loop_if_lesseq);
504     stubCall.addArgument(op1);
505     stubCall.addArgument(op2);
506     stubCall.call();
507     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
508 }
509
510 void JIT::emit_op_new_object(Instruction* currentInstruction)
511 {
512     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
513 }
514
515 void JIT::emit_op_instanceof(Instruction* currentInstruction)
516 {
517     unsigned dst = currentInstruction[1].u.operand;
518     unsigned value = currentInstruction[2].u.operand;
519     unsigned baseVal = currentInstruction[3].u.operand;
520     unsigned proto = currentInstruction[4].u.operand;
521
522     // Load the operands into registers.
523     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
524     emitLoadPayload(value, regT2);
525     emitLoadPayload(baseVal, regT0);
526     emitLoadPayload(proto, regT1);
527
528     // Check that value, baseVal, and proto are cells.
529     emitJumpSlowCaseIfNotJSCell(value);
530     emitJumpSlowCaseIfNotJSCell(baseVal);
531     emitJumpSlowCaseIfNotJSCell(proto);
532     
533     // Check that prototype is an object
534     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSCell, m_structure)), regT3);
535     addSlowCase(branch8(NotEqual, Address(regT3, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
536     
537     // Check that baseVal 'ImplementsDefaultHasInstance'.
538     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
539     addSlowCase(branchTest8(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
540
541     // Optimistically load the result true, and start looping.
542     // Initially, regT1 still contains proto and regT2 still contains value.
543     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
544     move(Imm32(JSValue::TrueTag), regT0);
545     Label loop(this);
546
547     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
548     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
549     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
550     load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
551     Jump isInstance = branchPtr(Equal, regT2, regT1);
552     branchTest32(NonZero, regT2).linkTo(loop, this);
553
554     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
555     move(Imm32(JSValue::FalseTag), regT0);
556
557     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
558     isInstance.link(this);
559     emitStoreBool(dst, regT0);
560 }
561
562 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
563 {
564     unsigned dst = currentInstruction[1].u.operand;
565     unsigned value = currentInstruction[2].u.operand;
566     unsigned baseVal = currentInstruction[3].u.operand;
567     unsigned proto = currentInstruction[4].u.operand;
568
569     linkSlowCaseIfNotJSCell(iter, value);
570     linkSlowCaseIfNotJSCell(iter, baseVal);
571     linkSlowCaseIfNotJSCell(iter, proto);
572     linkSlowCase(iter);
573     linkSlowCase(iter);
574
575     JITStubCall stubCall(this, cti_op_instanceof);
576     stubCall.addArgument(value);
577     stubCall.addArgument(baseVal);
578     stubCall.addArgument(proto);
579     stubCall.call(dst);
580 }
581
582 void JIT::emit_op_get_global_var(Instruction* currentInstruction)
583 {
584     int dst = currentInstruction[1].u.operand;
585     JSGlobalObject* globalObject = m_codeBlock->globalObject();
586     ASSERT(globalObject->isGlobalObject());
587     int index = currentInstruction[2].u.operand;
588
589     loadPtr(&globalObject->d()->registers, regT2);
590
591     emitLoad(index, regT1, regT0, regT2);
592     emitStore(dst, regT1, regT0);
593     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
594 }
595
596 void JIT::emit_op_put_global_var(Instruction* currentInstruction)
597 {
598     JSGlobalObject* globalObject = m_codeBlock->globalObject();
599     ASSERT(globalObject->isGlobalObject());
600     int index = currentInstruction[1].u.operand;
601     int value = currentInstruction[2].u.operand;
602
603     emitLoad(value, regT1, regT0);
604
605     loadPtr(&globalObject->d()->registers, regT2);
606     emitStore(index, regT1, regT0, regT2);
607     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
608 }
609
610 void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
611 {
612     int dst = currentInstruction[1].u.operand;
613     int index = currentInstruction[2].u.operand;
614     int skip = currentInstruction[3].u.operand;
615
616     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
617     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
618     ASSERT(skip || !checkTopLevel);
619     if (checkTopLevel && skip--) {
620         Jump activationNotCreated;
621         if (checkTopLevel)
622             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), Imm32(JSValue::EmptyValueTag));
623         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
624         activationNotCreated.link(this);
625     }
626     while (skip--)
627         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
628
629     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
630     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
631     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
632
633     emitLoad(index, regT1, regT0, regT2);
634     emitStore(dst, regT1, regT0);
635     map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
636 }
637
638 void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
639 {
640     int index = currentInstruction[1].u.operand;
641     int skip = currentInstruction[2].u.operand;
642     int value = currentInstruction[3].u.operand;
643
644     emitLoad(value, regT1, regT0);
645
646     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
647     bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
648     ASSERT(skip || !checkTopLevel);
649     if (checkTopLevel && skip--) {
650         Jump activationNotCreated;
651         if (checkTopLevel)
652             activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), Imm32(JSValue::EmptyValueTag));
653         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
654         activationNotCreated.link(this);
655     }
656     while (skip--)
657         loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
658
659     loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
660     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
661     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
662
663     emitStore(index, regT1, regT0, regT2);
664     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
665 }
666
667 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
668 {
669     unsigned activation = currentInstruction[1].u.operand;
670     unsigned arguments = currentInstruction[2].u.operand;
671     Jump activationCreated = branch32(NotEqual, tagFor(activation), Imm32(JSValue::EmptyValueTag));
672     Jump argumentsNotCreated = branch32(Equal, tagFor(arguments), Imm32(JSValue::EmptyValueTag));
673     activationCreated.link(this);
674     JITStubCall stubCall(this, cti_op_tear_off_activation);
675     stubCall.addArgument(currentInstruction[1].u.operand);
676     stubCall.addArgument(unmodifiedArgumentsRegister(currentInstruction[2].u.operand));
677     stubCall.call();
678     argumentsNotCreated.link(this);
679 }
680
681 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
682 {
683     int dst = currentInstruction[1].u.operand;
684
685     Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(dst)), Imm32(JSValue::EmptyValueTag));
686     JITStubCall stubCall(this, cti_op_tear_off_arguments);
687     stubCall.addArgument(unmodifiedArgumentsRegister(dst));
688     stubCall.call();
689     argsNotCreated.link(this);
690 }
691
692 void JIT::emit_op_new_array(Instruction* currentInstruction)
693 {
694     JITStubCall stubCall(this, cti_op_new_array);
695     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
696     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
697     stubCall.call(currentInstruction[1].u.operand);
698 }
699
700 void JIT::emit_op_resolve(Instruction* currentInstruction)
701 {
702     JITStubCall stubCall(this, cti_op_resolve);
703     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
704     stubCall.call(currentInstruction[1].u.operand);
705 }
706
707 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
708 {
709     int dst = currentInstruction[1].u.operand;
710     int src = currentInstruction[2].u.operand;
711
712     emitLoad(src, regT1, regT0);
713
714     Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
715     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
716     isImm.link(this);
717
718     if (dst != src)
719         emitStore(dst, regT1, regT0);
720     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
721 }
722
723 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
724 {
725     int dst = currentInstruction[1].u.operand;
726
727     linkSlowCase(iter);
728
729     JITStubCall stubCall(this, cti_op_to_primitive);
730     stubCall.addArgument(regT1, regT0);
731     stubCall.call(dst);
732 }
733
734 void JIT::emit_op_strcat(Instruction* currentInstruction)
735 {
736     JITStubCall stubCall(this, cti_op_strcat);
737     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
738     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
739     stubCall.call(currentInstruction[1].u.operand);
740 }
741
742 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
743 {
744     JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
745     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
746     stubCall.call(currentInstruction[1].u.operand);
747 }
748
749 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
750 {
751     JITStubCall stubCall(this, cti_op_ensure_property_exists);
752     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
753     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
754     stubCall.call(currentInstruction[1].u.operand);
755 }
756
757 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
758 {
759     JITStubCall stubCall(this, cti_op_resolve_skip);
760     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
761     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
762     stubCall.call(currentInstruction[1].u.operand);
763 }
764
765 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
766 {
767     // FIXME: Optimize to use patching instead of so many memory accesses.
768
769     unsigned dst = currentInstruction[1].u.operand;
770     void* globalObject = m_codeBlock->globalObject();
771
772     unsigned currentIndex = m_globalResolveInfoIndex++;
773     void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
774     void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
775
776     // Verify structure.
777     move(ImmPtr(globalObject), regT0);
778     loadPtr(structureAddress, regT1);
779     addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
780
781     // Load property.
782     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
783     load32(offsetAddr, regT3);
784     load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
785     load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
786     emitStore(dst, regT1, regT0);
787     map(m_bytecodeOffset + dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
788 }
789
790 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
791 {
792     unsigned dst = currentInstruction[1].u.operand;
793     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
794
795     unsigned currentIndex = m_globalResolveInfoIndex++;
796
797     linkSlowCase(iter);
798     JITStubCall stubCall(this, cti_op_resolve_global);
799     stubCall.addArgument(ImmPtr(ident));
800     stubCall.addArgument(Imm32(currentIndex));
801     stubCall.call(dst);
802 }
803
804 void JIT::emit_op_not(Instruction* currentInstruction)
805 {
806     unsigned dst = currentInstruction[1].u.operand;
807     unsigned src = currentInstruction[2].u.operand;
808
809     emitLoadTag(src, regT0);
810
811     xor32(Imm32(JSValue::FalseTag), regT0);
812     addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
813     xor32(Imm32(JSValue::TrueTag), regT0);
814
815     emitStoreBool(dst, regT0, (dst == src));
816 }
817
818 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
819 {
820     unsigned dst = currentInstruction[1].u.operand;
821     unsigned src = currentInstruction[2].u.operand;
822
823     linkSlowCase(iter);
824
825     JITStubCall stubCall(this, cti_op_not);
826     stubCall.addArgument(src);
827     stubCall.call(dst);
828 }
829
830 void JIT::emit_op_jfalse(Instruction* currentInstruction)
831 {
832     unsigned cond = currentInstruction[1].u.operand;
833     unsigned target = currentInstruction[2].u.operand;
834
835     emitLoad(cond, regT1, regT0);
836
837     Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
838     addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
839
840     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
841     Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
842     addJump(jump(), target);
843
844     if (supportsFloatingPoint()) {
845         isNotInteger.link(this);
846
847         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
848
849         zeroDouble(fpRegT0);
850         emitLoadDouble(cond, fpRegT1);
851         addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
852     } else
853         addSlowCase(isNotInteger);
854
855     isTrue.link(this);
856     isTrue2.link(this);
857 }
858
859 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
860 {
861     unsigned cond = currentInstruction[1].u.operand;
862     unsigned target = currentInstruction[2].u.operand;
863
864     linkSlowCase(iter);
865     JITStubCall stubCall(this, cti_op_jtrue);
866     stubCall.addArgument(cond);
867     stubCall.call();
868     emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
869 }
870
871 void JIT::emit_op_jtrue(Instruction* currentInstruction)
872 {
873     unsigned cond = currentInstruction[1].u.operand;
874     unsigned target = currentInstruction[2].u.operand;
875
876     emitLoad(cond, regT1, regT0);
877
878     Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
879     addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
880
881     Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
882     Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
883     addJump(jump(), target);
884
885     if (supportsFloatingPoint()) {
886         isNotInteger.link(this);
887
888         addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
889
890         zeroDouble(fpRegT0);
891         emitLoadDouble(cond, fpRegT1);
892         addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
893     } else
894         addSlowCase(isNotInteger);
895
896     isFalse.link(this);
897     isFalse2.link(this);
898 }
899
900 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
901 {
902     unsigned cond = currentInstruction[1].u.operand;
903     unsigned target = currentInstruction[2].u.operand;
904
905     linkSlowCase(iter);
906     JITStubCall stubCall(this, cti_op_jtrue);
907     stubCall.addArgument(cond);
908     stubCall.call();
909     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
910 }
911
912 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
913 {
914     unsigned src = currentInstruction[1].u.operand;
915     unsigned target = currentInstruction[2].u.operand;
916
917     emitLoad(src, regT1, regT0);
918
919     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
920
921     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
922     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
923     addJump(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
924
925     Jump wasNotImmediate = jump();
926
927     // Now handle the immediate cases - undefined & null
928     isImmediate.link(this);
929
930     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && !(JSValue::NullTag + 1));
931     addJump(branch32(AboveOrEqual, regT1, Imm32(JSValue::UndefinedTag)), target);
932
933     wasNotImmediate.link(this);
934 }
935
936 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
937 {
938     unsigned src = currentInstruction[1].u.operand;
939     unsigned target = currentInstruction[2].u.operand;
940
941     emitLoad(src, regT1, regT0);
942
943     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
944
945     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
946     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
947     addJump(branchTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
948
949     Jump wasNotImmediate = jump();
950
951     // Now handle the immediate cases - undefined & null
952     isImmediate.link(this);
953
954     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && !(JSValue::NullTag + 1));
955     addJump(branch32(Below, regT1, Imm32(JSValue::UndefinedTag)), target);
956
957     wasNotImmediate.link(this);
958 }
959
960 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
961 {
962     unsigned src = currentInstruction[1].u.operand;
963     JSCell* ptr = currentInstruction[2].u.jsCell;
964     unsigned target = currentInstruction[3].u.operand;
965
966     emitLoad(src, regT1, regT0);
967     addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
968     addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
969 }
970
971 void JIT::emit_op_jsr(Instruction* currentInstruction)
972 {
973     int retAddrDst = currentInstruction[1].u.operand;
974     int target = currentInstruction[2].u.operand;
975     DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
976     addJump(jump(), target);
977     m_jsrSites.append(JSRInfo(storeLocation, label()));
978 }
979
980 void JIT::emit_op_sret(Instruction* currentInstruction)
981 {
982     jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
983 }
984
985 void JIT::emit_op_eq(Instruction* currentInstruction)
986 {
987     unsigned dst = currentInstruction[1].u.operand;
988     unsigned src1 = currentInstruction[2].u.operand;
989     unsigned src2 = currentInstruction[3].u.operand;
990
991     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
992     addSlowCase(branch32(NotEqual, regT1, regT3));
993     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
994     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
995
996     set8(Equal, regT0, regT2, regT0);
997     or32(Imm32(JSValue::FalseTag), regT0);
998
999     emitStoreBool(dst, regT0);
1000 }
1001
1002 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1003 {
1004     unsigned dst = currentInstruction[1].u.operand;
1005     unsigned op1 = currentInstruction[2].u.operand;
1006     unsigned op2 = currentInstruction[3].u.operand;
1007
1008     JumpList storeResult;
1009     JumpList genericCase;
1010
1011     genericCase.append(getSlowCase(iter)); // tags not equal
1012
1013     linkSlowCase(iter); // tags equal and JSCell
1014     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
1015     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
1016
1017     // String case.
1018     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1019     stubCallEqStrings.addArgument(regT0);
1020     stubCallEqStrings.addArgument(regT2);
1021     stubCallEqStrings.call();
1022     storeResult.append(jump());
1023
1024     // Generic case.
1025     genericCase.append(getSlowCase(iter)); // doubles
1026     genericCase.link(this);
1027     JITStubCall stubCallEq(this, cti_op_eq);
1028     stubCallEq.addArgument(op1);
1029     stubCallEq.addArgument(op2);
1030     stubCallEq.call(regT0);
1031
1032     storeResult.link(this);
1033     or32(Imm32(JSValue::FalseTag), regT0);
1034     emitStoreBool(dst, regT0);
1035 }
1036
1037 void JIT::emit_op_neq(Instruction* currentInstruction)
1038 {
1039     unsigned dst = currentInstruction[1].u.operand;
1040     unsigned src1 = currentInstruction[2].u.operand;
1041     unsigned src2 = currentInstruction[3].u.operand;
1042
1043     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1044     addSlowCase(branch32(NotEqual, regT1, regT3));
1045     addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
1046     addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
1047
1048     set8(NotEqual, regT0, regT2, regT0);
1049     or32(Imm32(JSValue::FalseTag), regT0);
1050
1051     emitStoreBool(dst, regT0);
1052 }
1053
1054 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1055 {
1056     unsigned dst = currentInstruction[1].u.operand;
1057
1058     JumpList storeResult;
1059     JumpList genericCase;
1060
1061     genericCase.append(getSlowCase(iter)); // tags not equal
1062
1063     linkSlowCase(iter); // tags equal and JSCell
1064     genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
1065     genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
1066
1067     // String case.
1068     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1069     stubCallEqStrings.addArgument(regT0);
1070     stubCallEqStrings.addArgument(regT2);
1071     stubCallEqStrings.call(regT0);
1072     storeResult.append(jump());
1073
1074     // Generic case.
1075     genericCase.append(getSlowCase(iter)); // doubles
1076     genericCase.link(this);
1077     JITStubCall stubCallEq(this, cti_op_eq);
1078     stubCallEq.addArgument(regT1, regT0);
1079     stubCallEq.addArgument(regT3, regT2);
1080     stubCallEq.call(regT0);
1081
1082     storeResult.link(this);
1083     xor32(Imm32(0x1), regT0);
1084     or32(Imm32(JSValue::FalseTag), regT0);
1085     emitStoreBool(dst, regT0);
1086 }
1087
1088 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1089 {
1090     unsigned dst = currentInstruction[1].u.operand;
1091     unsigned src1 = currentInstruction[2].u.operand;
1092     unsigned src2 = currentInstruction[3].u.operand;
1093
1094     emitLoadTag(src1, regT0);
1095     emitLoadTag(src2, regT1);
1096
1097     // Jump to a slow case if either operand is double, or if both operands are
1098     // cells and/or Int32s.
1099     move(regT0, regT2);
1100     and32(regT1, regT2);
1101     addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
1102     addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
1103
1104     if (type == OpStrictEq)
1105         set8(Equal, regT0, regT1, regT0);
1106     else
1107         set8(NotEqual, regT0, regT1, regT0);
1108
1109     or32(Imm32(JSValue::FalseTag), regT0);
1110
1111     emitStoreBool(dst, regT0);
1112 }
1113
1114 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1115 {
1116     compileOpStrictEq(currentInstruction, OpStrictEq);
1117 }
1118
1119 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1120 {
1121     unsigned dst = currentInstruction[1].u.operand;
1122     unsigned src1 = currentInstruction[2].u.operand;
1123     unsigned src2 = currentInstruction[3].u.operand;
1124
1125     linkSlowCase(iter);
1126     linkSlowCase(iter);
1127
1128     JITStubCall stubCall(this, cti_op_stricteq);
1129     stubCall.addArgument(src1);
1130     stubCall.addArgument(src2);
1131     stubCall.call(dst);
1132 }
1133
1134 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1135 {
1136     compileOpStrictEq(currentInstruction, OpNStrictEq);
1137 }
1138
1139 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1140 {
1141     unsigned dst = currentInstruction[1].u.operand;
1142     unsigned src1 = currentInstruction[2].u.operand;
1143     unsigned src2 = currentInstruction[3].u.operand;
1144
1145     linkSlowCase(iter);
1146     linkSlowCase(iter);
1147
1148     JITStubCall stubCall(this, cti_op_nstricteq);
1149     stubCall.addArgument(src1);
1150     stubCall.addArgument(src2);
1151     stubCall.call(dst);
1152 }
1153
1154 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1155 {
1156     unsigned dst = currentInstruction[1].u.operand;
1157     unsigned src = currentInstruction[2].u.operand;
1158
1159     emitLoad(src, regT1, regT0);
1160     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1161
1162     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1163     setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1164
1165     Jump wasNotImmediate = jump();
1166
1167     isImmediate.link(this);
1168
1169     set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
1170     set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
1171     or32(regT2, regT1);
1172
1173     wasNotImmediate.link(this);
1174
1175     or32(Imm32(JSValue::FalseTag), regT1);
1176
1177     emitStoreBool(dst, regT1);
1178 }
1179
1180 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1181 {
1182     unsigned dst = currentInstruction[1].u.operand;
1183     unsigned src = currentInstruction[2].u.operand;
1184
1185     emitLoad(src, regT1, regT0);
1186     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1187
1188     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
1189     setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
1190
1191     Jump wasNotImmediate = jump();
1192
1193     isImmediate.link(this);
1194
1195     set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
1196     set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
1197     and32(regT2, regT1);
1198
1199     wasNotImmediate.link(this);
1200
1201     or32(Imm32(JSValue::FalseTag), regT1);
1202
1203     emitStoreBool(dst, regT1);
1204 }
1205
1206 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1207 {
1208     JITStubCall stubCall(this, cti_op_resolve_with_base);
1209     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1210     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1211     stubCall.call(currentInstruction[2].u.operand);
1212 }
1213
1214 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1215 {
1216     JITStubCall stubCall(this, cti_op_new_func_exp);
1217     stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1218     stubCall.call(currentInstruction[1].u.operand);
1219 }
1220
1221 void JIT::emit_op_throw(Instruction* currentInstruction)
1222 {
1223     unsigned exception = currentInstruction[1].u.operand;
1224     JITStubCall stubCall(this, cti_op_throw);
1225     stubCall.addArgument(exception);
1226     stubCall.call();
1227
1228 #ifndef NDEBUG
1229     // cti_op_throw always changes it's return address,
1230     // this point in the code should never be reached.
1231     breakpoint();
1232 #endif
1233 }
1234
1235 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1236 {
1237     int dst = currentInstruction[1].u.operand;
1238     int base = currentInstruction[2].u.operand;
1239     int i = currentInstruction[3].u.operand;
1240     int size = currentInstruction[4].u.operand;
1241     int breakTarget = currentInstruction[5].u.operand;
1242
1243     JumpList isNotObject;
1244
1245     emitLoad(base, regT1, regT0);
1246     if (!m_codeBlock->isKnownNotImmediate(base))
1247         isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1248     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1249         loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1250         isNotObject.append(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
1251     }
1252
1253     // We could inline the case where you have a valid cache, but
1254     // this call doesn't seem to be hot.
1255     Label isObject(this);
1256     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1257     getPnamesStubCall.addArgument(regT0);
1258     getPnamesStubCall.call(dst);
1259     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1260     store32(Imm32(0), addressFor(i));
1261     store32(regT3, addressFor(size));
1262     Jump end = jump();
1263
1264     isNotObject.link(this);
1265     addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
1266     addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
1267     JITStubCall toObjectStubCall(this, cti_to_object);
1268     toObjectStubCall.addArgument(regT1, regT0);
1269     toObjectStubCall.call(base);
1270     jump().linkTo(isObject, this);
1271
1272     end.link(this);
1273 }
1274
1275 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1276 {
1277     int dst = currentInstruction[1].u.operand;
1278     int base = currentInstruction[2].u.operand;
1279     int i = currentInstruction[3].u.operand;
1280     int size = currentInstruction[4].u.operand;
1281     int it = currentInstruction[5].u.operand;
1282     int target = currentInstruction[6].u.operand;
1283
1284     JumpList callHasProperty;
1285
1286     Label begin(this);
1287     load32(addressFor(i), regT0);
1288     Jump end = branch32(Equal, regT0, addressFor(size));
1289
1290     // Grab key @ i
1291     loadPtr(addressFor(it), regT1);
1292     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1293     load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1294     store32(Imm32(JSValue::CellTag), tagFor(dst));
1295     store32(regT2, payloadFor(dst));
1296
1297     // Increment i
1298     add32(Imm32(1), regT0);
1299     store32(regT0, addressFor(i));
1300
1301     // Verify that i is valid:
1302     loadPtr(addressFor(base), regT0);
1303
1304     // Test base's structure
1305     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1306     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1307
1308     // Test base's prototype chain
1309     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1310     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1311     addJump(branchTestPtr(Zero, Address(regT3)), target);
1312
1313     Label checkPrototype(this);
1314     callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
1315     loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1316     loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1317     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1318     addPtr(Imm32(sizeof(Structure*)), regT3);
1319     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1320
1321     // Continue loop.
1322     addJump(jump(), target);
1323
1324     // Slow case: Ask the object if i is valid.
1325     callHasProperty.link(this);
1326     loadPtr(addressFor(dst), regT1);
1327     JITStubCall stubCall(this, cti_has_property);
1328     stubCall.addArgument(regT0);
1329     stubCall.addArgument(regT1);
1330     stubCall.call();
1331
1332     // Test for valid key.
1333     addJump(branchTest32(NonZero, regT0), target);
1334     jump().linkTo(begin, this);
1335
1336     // End of loop.
1337     end.link(this);
1338 }
1339
1340 void JIT::emit_op_push_scope(Instruction* currentInstruction)
1341 {
1342     JITStubCall stubCall(this, cti_op_push_scope);
1343     stubCall.addArgument(currentInstruction[1].u.operand);
1344     stubCall.call(currentInstruction[1].u.operand);
1345 }
1346
1347 void JIT::emit_op_pop_scope(Instruction*)
1348 {
1349     JITStubCall(this, cti_op_pop_scope).call();
1350 }
1351
1352 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1353 {
1354     int dst = currentInstruction[1].u.operand;
1355     int src = currentInstruction[2].u.operand;
1356
1357     emitLoad(src, regT1, regT0);
1358
1359     Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
1360     addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
1361     isInt32.link(this);
1362
1363     if (src != dst)
1364         emitStore(dst, regT1, regT0);
1365     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1366 }
1367
1368 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1369 {
1370     int dst = currentInstruction[1].u.operand;
1371
1372     linkSlowCase(iter);
1373
1374     JITStubCall stubCall(this, cti_op_to_jsnumber);
1375     stubCall.addArgument(regT1, regT0);
1376     stubCall.call(dst);
1377 }
1378
1379 void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
1380 {
1381     JITStubCall stubCall(this, cti_op_push_new_scope);
1382     stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
1383     stubCall.addArgument(currentInstruction[3].u.operand);
1384     stubCall.call(currentInstruction[1].u.operand);
1385 }
1386
1387 void JIT::emit_op_catch(Instruction* currentInstruction)
1388 {
1389     // cti_op_throw returns the callFrame for the handler.
1390     move(regT0, callFrameRegister);
1391
1392     // Now store the exception returned by cti_op_throw.
1393     loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1394     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1395     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1396     store32(Imm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1397     store32(Imm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1398
1399     unsigned exception = currentInstruction[1].u.operand;
1400     emitStore(exception, regT1, regT0);
1401     map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1402 }
1403
1404 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1405 {
1406     JITStubCall stubCall(this, cti_op_jmp_scopes);
1407     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1408     stubCall.call();
1409     addJump(jump(), currentInstruction[2].u.operand);
1410 }
1411
1412 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1413 {
1414     unsigned tableIndex = currentInstruction[1].u.operand;
1415     unsigned defaultOffset = currentInstruction[2].u.operand;
1416     unsigned scrutinee = currentInstruction[3].u.operand;
1417
1418     // create jump table for switch destinations, track this switch statement.
1419     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1420     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1421     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1422
1423     JITStubCall stubCall(this, cti_op_switch_imm);
1424     stubCall.addArgument(scrutinee);
1425     stubCall.addArgument(Imm32(tableIndex));
1426     stubCall.call();
1427     jump(regT0);
1428 }
1429
1430 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1431 {
1432     unsigned tableIndex = currentInstruction[1].u.operand;
1433     unsigned defaultOffset = currentInstruction[2].u.operand;
1434     unsigned scrutinee = currentInstruction[3].u.operand;
1435
1436     // create jump table for switch destinations, track this switch statement.
1437     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1438     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1439     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1440
1441     JITStubCall stubCall(this, cti_op_switch_char);
1442     stubCall.addArgument(scrutinee);
1443     stubCall.addArgument(Imm32(tableIndex));
1444     stubCall.call();
1445     jump(regT0);
1446 }
1447
1448 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1449 {
1450     unsigned tableIndex = currentInstruction[1].u.operand;
1451     unsigned defaultOffset = currentInstruction[2].u.operand;
1452     unsigned scrutinee = currentInstruction[3].u.operand;
1453
1454     // create jump table for switch destinations, track this switch statement.
1455     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1456     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1457
1458     JITStubCall stubCall(this, cti_op_switch_string);
1459     stubCall.addArgument(scrutinee);
1460     stubCall.addArgument(Imm32(tableIndex));
1461     stubCall.call();
1462     jump(regT0);
1463 }
1464
1465 void JIT::emit_op_new_error(Instruction* currentInstruction)
1466 {
1467     unsigned dst = currentInstruction[1].u.operand;
1468     unsigned type = currentInstruction[2].u.operand;
1469     unsigned message = currentInstruction[3].u.operand;
1470
1471     JITStubCall stubCall(this, cti_op_new_error);
1472     stubCall.addArgument(Imm32(type));
1473     stubCall.addArgument(m_codeBlock->getConstant(message));
1474     stubCall.addArgument(Imm32(m_bytecodeOffset));
1475     stubCall.call(dst);
1476 }
1477
1478 void JIT::emit_op_debug(Instruction* currentInstruction)
1479 {
1480 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1481     UNUSED_PARAM(currentInstruction);
1482     breakpoint();
1483 #else
1484     JITStubCall stubCall(this, cti_op_debug);
1485     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1486     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1487     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1488     stubCall.call();
1489 #endif
1490 }
1491
1492
1493 void JIT::emit_op_enter(Instruction*)
1494 {
1495     // Even though JIT code doesn't use them, we initialize our constant
1496     // registers to zap stale pointers, to avoid unnecessarily prolonging
1497     // object lifetime and increasing GC pressure.
1498     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1499         emitStore(i, jsUndefined());
1500 }
1501
1502 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1503 {
1504     unsigned activation = currentInstruction[1].u.operand;
1505     
1506     Jump activationCreated = branch32(NotEqual, tagFor(activation), Imm32(JSValue::EmptyValueTag));
1507     JITStubCall(this, cti_op_push_activation).call(activation);
1508     activationCreated.link(this);
1509 }
1510
1511 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1512 {
1513     unsigned dst = currentInstruction[1].u.operand;
1514
1515     Jump argsCreated = branch32(NotEqual, tagFor(dst), Imm32(JSValue::EmptyValueTag));
1516
1517     if (m_codeBlock->m_numParameters == 1)
1518         JITStubCall(this, cti_op_create_arguments_no_params).call();
1519     else
1520         JITStubCall(this, cti_op_create_arguments).call();
1521
1522     emitStore(dst, regT1, regT0);
1523     emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1524
1525     argsCreated.link(this);
1526 }
1527
1528 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1529 {
1530     unsigned dst = currentInstruction[1].u.operand;
1531
1532     emitStore(dst, JSValue());
1533 }
1534
1535 void JIT::emit_op_get_callee(Instruction* currentInstruction)
1536 {
1537     int dst = currentInstruction[1].u.operand;
1538     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1539     emitStoreCell(dst, regT0);
1540 }
1541
1542 void JIT::emit_op_create_this(Instruction* currentInstruction)
1543 {
1544     unsigned protoRegister = currentInstruction[2].u.operand;
1545     emitLoad(protoRegister, regT1, regT0);
1546     JITStubCall stubCall(this, cti_op_create_this);
1547     stubCall.addArgument(regT1, regT0);
1548     stubCall.call(currentInstruction[1].u.operand);
1549 }
1550
1551 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1552 {
1553     unsigned thisRegister = currentInstruction[1].u.operand;
1554
1555     emitLoad(thisRegister, regT1, regT0);
1556
1557     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
1558
1559     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1560     addSlowCase(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1561
1562     map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
1563 }
1564
1565 void JIT::emit_op_convert_this_strict(Instruction* currentInstruction)
1566 {
1567     unsigned thisRegister = currentInstruction[1].u.operand;
1568     
1569     emitLoad(thisRegister, regT1, regT0);
1570     
1571     Jump notNull = branch32(NotEqual, regT1, Imm32(JSValue::EmptyValueTag));
1572     emitStore(thisRegister, jsNull());
1573     Jump setThis = jump();
1574     notNull.link(this);
1575     Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
1576     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
1577     Jump notAnObject = branch8(NotEqual, Address(regT3, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType));
1578     addSlowCase(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
1579     isImmediate.link(this);
1580     notAnObject.link(this);
1581     setThis.link(this);
1582     map(m_bytecodeOffset + OPCODE_LENGTH(op_convert_this_strict), thisRegister, regT1, regT0);
1583 }
1584
1585 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1586 {
1587     unsigned thisRegister = currentInstruction[1].u.operand;
1588
1589     linkSlowCase(iter);
1590     linkSlowCase(iter);
1591
1592     JITStubCall stubCall(this, cti_op_convert_this);
1593     stubCall.addArgument(regT1, regT0);
1594     stubCall.call(thisRegister);
1595 }
1596
1597 void JIT::emitSlow_op_convert_this_strict(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1598 {
1599     unsigned thisRegister = currentInstruction[1].u.operand;
1600     
1601     linkSlowCase(iter);
1602     
1603     JITStubCall stubCall(this, cti_op_convert_this_strict);
1604     stubCall.addArgument(regT1, regT0);
1605     stubCall.call(thisRegister);
1606 }
1607
1608 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1609 {
1610     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1611     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1612
1613     JITStubCall stubCall(this, cti_op_profile_will_call);
1614     stubCall.addArgument(currentInstruction[1].u.operand);
1615     stubCall.call();
1616     noProfiler.link(this);
1617 }
1618
1619 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1620 {
1621     peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
1622     Jump noProfiler = branchTestPtr(Zero, Address(regT2));
1623
1624     JITStubCall stubCall(this, cti_op_profile_did_call);
1625     stubCall.addArgument(currentInstruction[1].u.operand);
1626     stubCall.call();
1627     noProfiler.link(this);
1628 }
1629
1630 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1631 {
1632     int dst = currentInstruction[1].u.operand;
1633     int argumentsRegister = currentInstruction[2].u.operand;
1634     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), Imm32(JSValue::EmptyValueTag)));
1635     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
1636     sub32(Imm32(1), regT0);
1637     emitStoreInt32(dst, regT0);
1638 }
1639
1640 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1641 {
1642     linkSlowCase(iter);
1643     int dst = currentInstruction[1].u.operand;
1644     int base = currentInstruction[2].u.operand;
1645     int ident = currentInstruction[3].u.operand;
1646     
1647     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1648     stubCall.addArgument(base);
1649     stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
1650     stubCall.call(dst);
1651 }
1652
1653 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1654 {
1655     int dst = currentInstruction[1].u.operand;
1656     int argumentsRegister = currentInstruction[2].u.operand;
1657     int property = currentInstruction[3].u.operand;
1658     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), Imm32(JSValue::EmptyValueTag)));
1659     emitLoad(property, regT1, regT2);
1660     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
1661     add32(Imm32(1), regT2);
1662     // regT2 now contains the integer index of the argument we want, including this
1663     emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT3);
1664     addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1665     
1666     Jump skipOutofLineParams;
1667     int numArgs = m_codeBlock->m_numParameters;
1668     if (numArgs) {
1669         Jump notInInPlaceArgs = branch32(AboveOrEqual, regT2, Imm32(numArgs));
1670         addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1671         loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1672         loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1673         skipOutofLineParams = jump();
1674         notInInPlaceArgs.link(this);
1675     }
1676
1677     addPtr(Imm32(static_cast<unsigned>(-(RegisterFile::CallFrameHeaderSize + numArgs) * sizeof(Register))), callFrameRegister, regT1);
1678     mul32(Imm32(sizeof(Register)), regT3, regT3);
1679     subPtr(regT3, regT1);
1680     loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1681     loadPtr(BaseIndex(regT1, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1682     if (numArgs)
1683         skipOutofLineParams.link(this);
1684     emitStore(dst, regT1, regT0);
1685 }
1686
1687 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1688 {
1689     unsigned dst = currentInstruction[1].u.operand;
1690     unsigned arguments = currentInstruction[2].u.operand;
1691     unsigned property = currentInstruction[3].u.operand;
1692
1693     linkSlowCase(iter);
1694     Jump skipArgumentsCreation = jump();
1695
1696     linkSlowCase(iter);
1697     linkSlowCase(iter);
1698     if (m_codeBlock->m_numParameters == 1)
1699         JITStubCall(this, cti_op_create_arguments_no_params).call();
1700     else
1701         JITStubCall(this, cti_op_create_arguments).call();
1702     
1703     emitStore(arguments, regT1, regT0);
1704     emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1705     
1706     skipArgumentsCreation.link(this);
1707     JITStubCall stubCall(this, cti_op_get_by_val);
1708     stubCall.addArgument(arguments);
1709     stubCall.addArgument(property);
1710     stubCall.call(dst);
1711 }
1712
1713 } // namespace JSC
1714
1715 #endif // USE(JSVALUE32_64)
1716 #endif // ENABLE(JIT)