jneq_ptr shouldn't have a pointer
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "JITInlineMethods.h"
34 #include "JITStubCall.h"
35 #include "JSArray.h"
36 #include "JSCell.h"
37 #include "JSFunction.h"
38 #include "JSPropertyNameIterator.h"
39 #include "LinkBuffer.h"
40
41 namespace JSC {
42
43 PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
44 {
45     // (1) This function provides fast property access for string length
46     Label stringLengthBegin = align();
47
48     // regT0 holds payload, regT1 holds tag
49
50     Jump string_failureCases1 = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
51     Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(globalData->stringStructure.get()));
52
53     // Checks out okay! - get the length from the Ustring.
54     load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
55
56     Jump string_failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
57     move(regT2, regT0);
58     move(TrustedImm32(JSValue::Int32Tag), regT1);
59
60     ret();
61     
62     JumpList callSlowCase;
63     JumpList constructSlowCase;
64
65     // VirtualCallLink Trampoline
66     // regT1, regT0 holds callee; callFrame is moved and partially initialized.
67     Label virtualCallLinkBegin = align();
68     callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
69     callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
70
71     // Finish canonical initialization before JS function call.
72     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
73     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
74
75     // Also initialize ReturnPC for use by lazy linking and exceptions.
76     preserveReturnAddressAfterCall(regT3);
77     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
78     
79     storePtr(callFrameRegister, &m_globalData->topCallFrame);
80     restoreArgumentReference();
81     Call callLazyLinkCall = call();
82     restoreReturnAddressBeforeReturn(regT3);
83     jump(regT0);
84
85     // VirtualConstructLink Trampoline
86     // regT1, regT0 holds callee; callFrame is moved and partially initialized.
87     Label virtualConstructLinkBegin = align();
88     constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
89     constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
90
91     // Finish canonical initialization before JS function call.
92     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
93     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
94
95     // Also initialize ReturnPC for use by lazy linking and exeptions.
96     preserveReturnAddressAfterCall(regT3);
97     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
98     
99     storePtr(callFrameRegister, &m_globalData->topCallFrame);
100     restoreArgumentReference();
101     Call callLazyLinkConstruct = call();
102     restoreReturnAddressBeforeReturn(regT3);
103     jump(regT0);
104
105     // VirtualCall Trampoline
106     // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
107     Label virtualCallBegin = align();
108     callSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
109     callSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
110
111     // Finish canonical initialization before JS function call.
112     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
113     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
114
115     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
116     Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
117     preserveReturnAddressAfterCall(regT3);
118     
119     storePtr(callFrameRegister, &m_globalData->topCallFrame);
120     restoreArgumentReference();
121     Call callCompileCall = call();
122     restoreReturnAddressBeforeReturn(regT3);
123     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
124
125     hasCodeBlock1.link(this);
126     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForCallWithArityCheck)), regT0);
127     jump(regT0);
128
129     // VirtualConstruct Trampoline
130     // regT1, regT0 holds callee; regT2 will hold the FunctionExecutable.
131     Label virtualConstructBegin = align();
132     constructSlowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
133     constructSlowCase.append(emitJumpIfNotType(regT0, regT1, JSFunctionType));
134
135     // Finish canonical initialization before JS function call.
136     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
137     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
138
139     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
140     Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
141     preserveReturnAddressAfterCall(regT3);
142     
143     storePtr(callFrameRegister, &m_globalData->topCallFrame);
144     restoreArgumentReference();
145     Call callCompileConstruct = call();
146     restoreReturnAddressBeforeReturn(regT3);
147     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
148
149     hasCodeBlock2.link(this);
150     loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCodeForConstructWithArityCheck)), regT0);
151     jump(regT0);
152
153     callSlowCase.link(this);
154     // Finish canonical initialization before JS function call.
155     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
156     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
157     emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
158
159     // Also initialize ReturnPC and CodeBlock, like a JS function would.
160     preserveReturnAddressAfterCall(regT3);
161     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
162     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
163
164     storePtr(callFrameRegister, &m_globalData->topCallFrame);
165     restoreArgumentReference();
166     Call callCallNotJSFunction = call();
167     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
168     restoreReturnAddressBeforeReturn(regT3);
169     ret();
170
171     constructSlowCase.link(this);
172     // Finish canonical initialization before JS function call.
173     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
174     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2, regT2);
175     emitPutCellToCallFrameHeader(regT2, RegisterFile::ScopeChain);
176
177     // Also initialize ReturnPC and CodeBlock, like a JS function would.
178     preserveReturnAddressAfterCall(regT3);
179     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
180     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
181
182     storePtr(callFrameRegister, &m_globalData->topCallFrame);
183     restoreArgumentReference();
184     Call callConstructNotJSFunction = call();
185     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
186     restoreReturnAddressBeforeReturn(regT3);
187     ret();
188
189     // NativeCall Trampoline
190     Label nativeCallThunk = privateCompileCTINativeCall(globalData);    
191     Label nativeConstructThunk = privateCompileCTINativeCall(globalData, true);    
192
193     Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
194     Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
195     Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
196
197     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
198     LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
199
200     patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
201     patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
202     patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
203     patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
204     patchBuffer.link(callLazyLinkConstruct, FunctionPtr(cti_vm_lazyLinkConstruct));
205     patchBuffer.link(callCompileCall, FunctionPtr(cti_op_call_jitCompile));
206     patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
207     patchBuffer.link(callCallNotJSFunction, FunctionPtr(cti_op_call_NotJSFunction));
208     patchBuffer.link(callConstructNotJSFunction, FunctionPtr(cti_op_construct_NotJSConstruct));
209
210     CodeRef finalCode = FINALIZE_CODE(patchBuffer, ("JIT CTI machine trampolines"));
211     RefPtr<ExecutableMemoryHandle> executableMemory = finalCode.executableMemory();
212
213     trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
214     trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
215     trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
216     trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
217     trampolines->ctiNativeCall = patchBuffer.trampolineAt(nativeCallThunk);
218     trampolines->ctiNativeConstruct = patchBuffer.trampolineAt(nativeConstructThunk);
219     trampolines->ctiStringLengthTrampoline = patchBuffer.trampolineAt(stringLengthBegin);
220
221     return executableMemory.release();
222 }
223
224 JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isConstruct)
225 {
226     int executableOffsetToFunction = isConstruct ? OBJECT_OFFSETOF(NativeExecutable, m_constructor) : OBJECT_OFFSETOF(NativeExecutable, m_function);
227
228     Label nativeCallThunk = align();
229
230     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
231     storePtr(callFrameRegister, &m_globalData->topCallFrame);
232
233 #if CPU(X86)
234     // Load caller frame's scope chain into this callframe so that whatever we call can
235     // get to its global data.
236     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
237     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
238     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
239
240     peek(regT1);
241     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
242
243     // Calling convention:      f(ecx, edx, ...);
244     // Host function signature: f(ExecState*);
245     move(callFrameRegister, X86Registers::ecx);
246
247     subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
248
249     // call the function
250     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT1);
251     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT1);
252     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
253     call(Address(regT1, executableOffsetToFunction));
254
255     addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
256
257 #elif CPU(ARM)
258     // Load caller frame's scope chain into this callframe so that whatever we call can
259     // get to its global data.
260     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
261     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
262     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
263
264     preserveReturnAddressAfterCall(regT3); // Callee preserved
265     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
266
267     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
268     // Host function signature: f(ExecState*);
269     move(callFrameRegister, ARMRegisters::r0);
270
271     // call the function
272     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
273     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
274     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
275     call(Address(regT2, executableOffsetToFunction));
276
277     restoreReturnAddressBeforeReturn(regT3);
278 #elif CPU(SH4)
279     // Load caller frame's scope chain into this callframe so that whatever we call can
280     // get to its global data.
281     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
282     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
283     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
284
285     preserveReturnAddressAfterCall(regT3); // Callee preserved
286     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
287
288     // Calling convention: f(r0 == regT4, r1 == regT5, ...);
289     // Host function signature: f(ExecState*);
290     move(callFrameRegister, regT4);
291
292     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
293     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
294     loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
295
296     call(Address(regT2, executableOffsetToFunction), regT0);
297     restoreReturnAddressBeforeReturn(regT3);
298 #elif CPU(MIPS)
299     // Load caller frame's scope chain into this callframe so that whatever we call can
300     // get to its global data.
301     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
302     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
303     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
304
305     preserveReturnAddressAfterCall(regT3); // Callee preserved
306     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
307
308     // Calling convention:      f(a0, a1, a2, a3);
309     // Host function signature: f(ExecState*);
310
311     // Allocate stack space for 16 bytes (8-byte aligned)
312     // 16 bytes (unused) for 4 arguments
313     subPtr(TrustedImm32(16), stackPointerRegister);
314
315     // Setup arg0
316     move(callFrameRegister, MIPSRegisters::a0);
317
318     // Call
319     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
320     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
321     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
322     call(Address(regT2, executableOffsetToFunction));
323
324     // Restore stack space
325     addPtr(TrustedImm32(16), stackPointerRegister);
326
327     restoreReturnAddressBeforeReturn(regT3);
328
329 #else
330 #error "JIT not supported on this platform."
331     UNUSED_PARAM(executableOffsetToFunction);
332     breakpoint();
333 #endif // CPU(X86)
334
335     // Check for an exception
336     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
337
338     // Return.
339     ret();
340
341     // Handle an exception
342     sawException.link(this);
343
344     // Grab the return address.
345     preserveReturnAddressAfterCall(regT1);
346
347     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
348     storePtr(regT1, regT2);
349     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
350     
351     storePtr(callFrameRegister, &m_globalData->topCallFrame);
352     // Set the return address.
353     move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
354     restoreReturnAddressBeforeReturn(regT1);
355
356     ret();
357
358     return nativeCallThunk;
359 }
360
361 JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
362 {
363     Call nativeCall;
364
365     emitPutImmediateToCallFrameHeader(0, RegisterFile::CodeBlock);
366     storePtr(callFrameRegister, &m_globalData->topCallFrame);
367
368 #if CPU(X86)
369     // Load caller frame's scope chain into this callframe so that whatever we call can
370     // get to its global data.
371     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
372     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
373     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
374
375     peek(regT1);
376     emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
377
378     // Calling convention:      f(ecx, edx, ...);
379     // Host function signature: f(ExecState*);
380     move(callFrameRegister, X86Registers::ecx);
381
382     subPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister); // Align stack after call.
383
384     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
385
386     // call the function
387     nativeCall = call();
388
389     addPtr(TrustedImm32(16 - sizeof(void*)), stackPointerRegister);
390
391 #elif CPU(ARM)
392     // Load caller frame's scope chain into this callframe so that whatever we call can
393     // get to its global data.
394     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
395     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
396     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
397
398     preserveReturnAddressAfterCall(regT3); // Callee preserved
399     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
400
401     // Calling convention:      f(r0 == regT0, r1 == regT1, ...);
402     // Host function signature: f(ExecState*);
403     move(callFrameRegister, ARMRegisters::r0);
404
405     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
406     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
407     loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
408
409     // call the function
410     nativeCall = call();
411
412     restoreReturnAddressBeforeReturn(regT3);
413
414 #elif CPU(MIPS)
415     // Load caller frame's scope chain into this callframe so that whatever we call can
416     // get to its global data.
417     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
418     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT0);
419     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
420
421     preserveReturnAddressAfterCall(regT3); // Callee preserved
422     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
423
424     // Calling convention:      f(a0, a1, a2, a3);
425     // Host function signature: f(ExecState*);
426
427     // Allocate stack space for 16 bytes (8-byte aligned)
428     // 16 bytes (unused) for 4 arguments
429     subPtr(TrustedImm32(16), stackPointerRegister);
430
431     // Setup arg0
432     move(callFrameRegister, MIPSRegisters::a0);
433
434     // Call
435     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
436     loadPtr(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
437     move(regT0, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
438     
439     // call the function
440     nativeCall = call();
441
442     // Restore stack space
443     addPtr(TrustedImm32(16), stackPointerRegister);
444
445     restoreReturnAddressBeforeReturn(regT3);
446 #elif CPU(SH4)
447     // Load caller frame's scope chain into this callframe so that whatever we call can
448     // get to its global data.
449     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
450     emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
451     emitPutCellToCallFrameHeader(regT1, RegisterFile::ScopeChain);
452
453     preserveReturnAddressAfterCall(regT3); // Callee preserved
454     emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
455
456     // Calling convention: f(r0 == regT4, r1 == regT5, ...);
457     // Host function signature: f(ExecState*);
458     move(callFrameRegister, regT4);
459
460     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT5);
461     move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
462     loadPtr(Address(regT5, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
463
464     // call the function
465     nativeCall = call();
466
467     restoreReturnAddressBeforeReturn(regT3);
468 #else
469 #error "JIT not supported on this platform."
470     breakpoint();
471 #endif // CPU(X86)
472
473     // Check for an exception
474     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(&globalData->exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
475
476     // Return.
477     ret();
478
479     // Handle an exception
480     sawException.link(this);
481
482     // Grab the return address.
483     preserveReturnAddressAfterCall(regT1);
484
485     move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
486     storePtr(regT1, regT2);
487     poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
488     
489     storePtr(callFrameRegister, &m_globalData->topCallFrame);
490     // Set the return address.
491     move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
492     restoreReturnAddressBeforeReturn(regT1);
493
494     ret();
495
496     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
497     LinkBuffer patchBuffer(*m_globalData, this, GLOBAL_THUNK_ID);
498
499     patchBuffer.link(nativeCall, FunctionPtr(func));
500     return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
501 }
502
503 void JIT::emit_op_mov(Instruction* currentInstruction)
504 {
505     unsigned dst = currentInstruction[1].u.operand;
506     unsigned src = currentInstruction[2].u.operand;
507
508     if (m_codeBlock->isConstantRegisterIndex(src))
509         emitStore(dst, getConstantOperand(src));
510     else {
511         emitLoad(src, regT1, regT0);
512         emitStore(dst, regT1, regT0);
513         map(m_bytecodeOffset + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
514     }
515 }
516
517 void JIT::emit_op_end(Instruction* currentInstruction)
518 {
519     ASSERT(returnValueRegister != callFrameRegister);
520     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
521     restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
522     ret();
523 }
524
525 void JIT::emit_op_jmp(Instruction* currentInstruction)
526 {
527     unsigned target = currentInstruction[1].u.operand;
528     addJump(jump(), target);
529 }
530
531 void JIT::emit_op_new_object(Instruction* currentInstruction)
532 {
533     emitAllocateJSFinalObject(TrustedImmPtr(m_codeBlock->globalObject()->emptyObjectStructure()), regT0, regT1);
534     
535     emitStoreCell(currentInstruction[1].u.operand, regT0);
536 }
537
538 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
539 {
540     linkSlowCase(iter);
541     JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
542 }
543
544 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
545 {
546     unsigned baseVal = currentInstruction[3].u.operand;
547
548     emitLoadPayload(baseVal, regT0);
549
550     // Check that baseVal is a cell.
551     emitJumpSlowCaseIfNotJSCell(baseVal);
552     
553     // Check that baseVal 'ImplementsHasInstance'.
554     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
555     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
556 }
557
558 void JIT::emit_op_instanceof(Instruction* currentInstruction)
559 {
560     unsigned dst = currentInstruction[1].u.operand;
561     unsigned value = currentInstruction[2].u.operand;
562     unsigned proto = currentInstruction[3].u.operand;
563
564     // Load the operands into registers.
565     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
566     emitLoadPayload(value, regT2);
567     emitLoadPayload(proto, regT1);
568
569     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
570     emitJumpSlowCaseIfNotJSCell(value);
571     emitJumpSlowCaseIfNotJSCell(proto);
572     
573     // Check that prototype is an object
574     loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
575     addSlowCase(emitJumpIfNotObject(regT3));
576
577     // Optimistically load the result true, and start looping.
578     // Initially, regT1 still contains proto and regT2 still contains value.
579     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
580     move(TrustedImm32(1), regT0);
581     Label loop(this);
582
583     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
584     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
585     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
586     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
587     Jump isInstance = branchPtr(Equal, regT2, regT1);
588     branchTest32(NonZero, regT2).linkTo(loop, this);
589
590     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
591     move(TrustedImm32(0), regT0);
592
593     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
594     isInstance.link(this);
595     emitStoreBool(dst, regT0);
596 }
597
598 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
599 {
600     unsigned dst = currentInstruction[1].u.operand;
601     unsigned value = currentInstruction[2].u.operand;
602     unsigned baseVal = currentInstruction[3].u.operand;
603
604     linkSlowCaseIfNotJSCell(iter, baseVal);
605     linkSlowCase(iter);
606
607     JITStubCall stubCall(this, cti_op_check_has_instance);
608     stubCall.addArgument(value);
609     stubCall.addArgument(baseVal);
610     stubCall.call(dst);
611
612     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
613 }
614
615 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
616 {
617     unsigned dst = currentInstruction[1].u.operand;
618     unsigned value = currentInstruction[2].u.operand;
619     unsigned proto = currentInstruction[3].u.operand;
620
621     linkSlowCaseIfNotJSCell(iter, value);
622     linkSlowCaseIfNotJSCell(iter, proto);
623     linkSlowCase(iter);
624
625     JITStubCall stubCall(this, cti_op_instanceof);
626     stubCall.addArgument(value);
627     stubCall.addArgument(proto);
628     stubCall.call(dst);
629 }
630
631 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
632 {
633     unsigned dst = currentInstruction[1].u.operand;
634     unsigned value = currentInstruction[2].u.operand;
635     
636     emitLoad(value, regT1, regT0);
637     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
638
639     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
640     Jump done = jump();
641     
642     isCell.link(this);
643     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
644     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
645     move(TrustedImm32(0), regT0);
646     Jump notMasqueradesAsUndefined = jump();
647     
648     isMasqueradesAsUndefined.link(this);
649     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
650     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
651     compare32(Equal, regT0, regT1, regT0);
652
653     notMasqueradesAsUndefined.link(this);
654     done.link(this);
655     emitStoreBool(dst, regT0);
656 }
657
658 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
659 {
660     unsigned dst = currentInstruction[1].u.operand;
661     unsigned value = currentInstruction[2].u.operand;
662     
663     emitLoadTag(value, regT0);
664     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
665     emitStoreBool(dst, regT0);
666 }
667
668 void JIT::emit_op_is_number(Instruction* currentInstruction)
669 {
670     unsigned dst = currentInstruction[1].u.operand;
671     unsigned value = currentInstruction[2].u.operand;
672     
673     emitLoadTag(value, regT0);
674     add32(TrustedImm32(1), regT0);
675     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
676     emitStoreBool(dst, regT0);
677 }
678
679 void JIT::emit_op_is_string(Instruction* currentInstruction)
680 {
681     unsigned dst = currentInstruction[1].u.operand;
682     unsigned value = currentInstruction[2].u.operand;
683     
684     emitLoad(value, regT1, regT0);
685     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
686     
687     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
688     compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
689     Jump done = jump();
690     
691     isNotCell.link(this);
692     move(TrustedImm32(0), regT0);
693     
694     done.link(this);
695     emitStoreBool(dst, regT0);
696 }
697
698 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
699 {
700     unsigned activation = currentInstruction[1].u.operand;
701     Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
702     JITStubCall stubCall(this, cti_op_tear_off_activation);
703     stubCall.addArgument(activation);
704     stubCall.call();
705     activationNotCreated.link(this);
706 }
707
708 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
709 {
710     int arguments = currentInstruction[1].u.operand;
711     int activation = currentInstruction[2].u.operand;
712
713     Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments)), TrustedImm32(JSValue::EmptyValueTag));
714     JITStubCall stubCall(this, cti_op_tear_off_arguments);
715     stubCall.addArgument(unmodifiedArgumentsRegister(arguments));
716     stubCall.addArgument(activation);
717     stubCall.call();
718     argsNotCreated.link(this);
719 }
720
721 void JIT::emit_op_resolve(Instruction* currentInstruction)
722 {
723     JITStubCall stubCall(this, cti_op_resolve);
724     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
725     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
726 }
727
728 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
729 {
730     int dst = currentInstruction[1].u.operand;
731     int src = currentInstruction[2].u.operand;
732
733     emitLoad(src, regT1, regT0);
734
735     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
736     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
737     isImm.link(this);
738
739     if (dst != src)
740         emitStore(dst, regT1, regT0);
741     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
742 }
743
744 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
745 {
746     int dst = currentInstruction[1].u.operand;
747
748     linkSlowCase(iter);
749
750     JITStubCall stubCall(this, cti_op_to_primitive);
751     stubCall.addArgument(regT1, regT0);
752     stubCall.call(dst);
753 }
754
755 void JIT::emit_op_strcat(Instruction* currentInstruction)
756 {
757     JITStubCall stubCall(this, cti_op_strcat);
758     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
759     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
760     stubCall.call(currentInstruction[1].u.operand);
761 }
762
763 void JIT::emit_op_resolve_base(Instruction* currentInstruction)
764 {
765     JITStubCall stubCall(this, currentInstruction[3].u.operand ? cti_op_resolve_base_strict_put : cti_op_resolve_base);
766     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
767     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
768 }
769
770 void JIT::emit_op_ensure_property_exists(Instruction* currentInstruction)
771 {
772     JITStubCall stubCall(this, cti_op_ensure_property_exists);
773     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
774     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
775     stubCall.call(currentInstruction[1].u.operand);
776 }
777
778 void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
779 {
780     JITStubCall stubCall(this, cti_op_resolve_skip);
781     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
782     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
783     stubCall.callWithValueProfiling(currentInstruction[1].u.operand);
784 }
785
786 void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
787 {
788     // FIXME: Optimize to use patching instead of so many memory accesses.
789
790     unsigned dst = currentInstruction[1].u.operand;
791     void* globalObject = m_codeBlock->globalObject();
792
793     unsigned currentIndex = m_globalResolveInfoIndex++;
794     GlobalResolveInfo* resolveInfoAddress = &m_codeBlock->globalResolveInfo(currentIndex);
795
796
797     // Verify structure.
798     move(TrustedImmPtr(globalObject), regT2);
799     move(TrustedImmPtr(resolveInfoAddress), regT3);
800     loadPtr(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, structure)), regT1);
801     addSlowCase(branchPtr(NotEqual, regT1, Address(regT2, JSCell::structureOffset())));
802
803     // Load property.
804     load32(Address(regT3, OBJECT_OFFSETOF(GlobalResolveInfo, offset)), regT3);
805     compileGetDirectOffset(regT2, regT1, regT0, regT3, KnownNotFinal);
806     emitValueProfilingSite();
807     emitStore(dst, regT1, regT0);
808     map(m_bytecodeOffset + (dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global)), dst, regT1, regT0);
809 }
810
811 void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
812 {
813     unsigned dst = currentInstruction[1].u.operand;
814     Identifier* ident = &m_codeBlock->identifier(currentInstruction[2].u.operand);
815
816     unsigned currentIndex = m_globalResolveInfoIndex++;
817
818     linkSlowCase(iter);
819     JITStubCall stubCall(this, cti_op_resolve_global);
820     stubCall.addArgument(TrustedImmPtr(ident));
821     stubCall.addArgument(TrustedImm32(currentIndex));
822     stubCall.callWithValueProfiling(dst);
823 }
824
825 void JIT::emit_op_not(Instruction* currentInstruction)
826 {
827     unsigned dst = currentInstruction[1].u.operand;
828     unsigned src = currentInstruction[2].u.operand;
829
830     emitLoadTag(src, regT0);
831
832     emitLoad(src, regT1, regT0);
833     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
834     xor32(TrustedImm32(1), regT0);
835
836     emitStoreBool(dst, regT0, (dst == src));
837 }
838
839 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
840 {
841     unsigned dst = currentInstruction[1].u.operand;
842     unsigned src = currentInstruction[2].u.operand;
843
844     linkSlowCase(iter);
845
846     JITStubCall stubCall(this, cti_op_not);
847     stubCall.addArgument(src);
848     stubCall.call(dst);
849 }
850
851 void JIT::emit_op_jfalse(Instruction* currentInstruction)
852 {
853     unsigned cond = currentInstruction[1].u.operand;
854     unsigned target = currentInstruction[2].u.operand;
855
856     emitLoad(cond, regT1, regT0);
857
858     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
859     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
860     addJump(branchTest32(Zero, regT0), target);
861 }
862
863 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
864 {
865     unsigned cond = currentInstruction[1].u.operand;
866     unsigned target = currentInstruction[2].u.operand;
867
868     linkSlowCase(iter);
869
870     if (supportsFloatingPoint()) {
871         // regT1 contains the tag from the hot path.
872         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
873
874         emitLoadDouble(cond, fpRegT0);
875         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
876         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
877
878         notNumber.link(this);
879     }
880
881     JITStubCall stubCall(this, cti_op_jtrue);
882     stubCall.addArgument(cond);
883     stubCall.call();
884     emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
885 }
886
887 void JIT::emit_op_jtrue(Instruction* currentInstruction)
888 {
889     unsigned cond = currentInstruction[1].u.operand;
890     unsigned target = currentInstruction[2].u.operand;
891
892     emitLoad(cond, regT1, regT0);
893
894     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
895     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
896     addJump(branchTest32(NonZero, regT0), target);
897 }
898
899 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
900 {
901     unsigned cond = currentInstruction[1].u.operand;
902     unsigned target = currentInstruction[2].u.operand;
903
904     linkSlowCase(iter);
905
906     if (supportsFloatingPoint()) {
907         // regT1 contains the tag from the hot path.
908         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
909
910         emitLoadDouble(cond, fpRegT0);
911         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
912         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
913
914         notNumber.link(this);
915     }
916
917     JITStubCall stubCall(this, cti_op_jtrue);
918     stubCall.addArgument(cond);
919     stubCall.call();
920     emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
921 }
922
923 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
924 {
925     unsigned src = currentInstruction[1].u.operand;
926     unsigned target = currentInstruction[2].u.operand;
927
928     emitLoad(src, regT1, regT0);
929
930     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
931
932     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
933     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
934     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
935     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
936     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
937     Jump masqueradesGlobalObjectIsForeign = jump();
938
939     // Now handle the immediate cases - undefined & null
940     isImmediate.link(this);
941     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
942     or32(TrustedImm32(1), regT1);
943     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
944
945     isNotMasqueradesAsUndefined.link(this);
946     masqueradesGlobalObjectIsForeign.link(this);
947 }
948
949 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
950 {
951     unsigned src = currentInstruction[1].u.operand;
952     unsigned target = currentInstruction[2].u.operand;
953
954     emitLoad(src, regT1, regT0);
955
956     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
957
958     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
959     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
960     addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
961     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
962     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
963     Jump wasNotImmediate = jump();
964
965     // Now handle the immediate cases - undefined & null
966     isImmediate.link(this);
967
968     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
969     or32(TrustedImm32(1), regT1);
970     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
971
972     wasNotImmediate.link(this);
973 }
974
975 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
976 {
977     unsigned src = currentInstruction[1].u.operand;
978     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
979     unsigned target = currentInstruction[3].u.operand;
980
981     emitLoad(src, regT1, regT0);
982     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
983     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
984 }
985
986 void JIT::emit_op_eq(Instruction* currentInstruction)
987 {
988     unsigned dst = currentInstruction[1].u.operand;
989     unsigned src1 = currentInstruction[2].u.operand;
990     unsigned src2 = currentInstruction[3].u.operand;
991
992     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
993     addSlowCase(branch32(NotEqual, regT1, regT3));
994     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
995     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
996
997     compare32(Equal, regT0, regT2, regT0);
998
999     emitStoreBool(dst, regT0);
1000 }
1001
1002 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1003 {
1004     unsigned dst = currentInstruction[1].u.operand;
1005     unsigned op1 = currentInstruction[2].u.operand;
1006     unsigned op2 = currentInstruction[3].u.operand;
1007
1008     JumpList storeResult;
1009     JumpList genericCase;
1010
1011     genericCase.append(getSlowCase(iter)); // tags not equal
1012
1013     linkSlowCase(iter); // tags equal and JSCell
1014     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1015     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1016
1017     // String case.
1018     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1019     stubCallEqStrings.addArgument(regT0);
1020     stubCallEqStrings.addArgument(regT2);
1021     stubCallEqStrings.call();
1022     storeResult.append(jump());
1023
1024     // Generic case.
1025     genericCase.append(getSlowCase(iter)); // doubles
1026     genericCase.link(this);
1027     JITStubCall stubCallEq(this, cti_op_eq);
1028     stubCallEq.addArgument(op1);
1029     stubCallEq.addArgument(op2);
1030     stubCallEq.call(regT0);
1031
1032     storeResult.link(this);
1033     emitStoreBool(dst, regT0);
1034 }
1035
1036 void JIT::emit_op_neq(Instruction* currentInstruction)
1037 {
1038     unsigned dst = currentInstruction[1].u.operand;
1039     unsigned src1 = currentInstruction[2].u.operand;
1040     unsigned src2 = currentInstruction[3].u.operand;
1041
1042     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1043     addSlowCase(branch32(NotEqual, regT1, regT3));
1044     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
1045     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1046
1047     compare32(NotEqual, regT0, regT2, regT0);
1048
1049     emitStoreBool(dst, regT0);
1050 }
1051
1052 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1053 {
1054     unsigned dst = currentInstruction[1].u.operand;
1055
1056     JumpList storeResult;
1057     JumpList genericCase;
1058
1059     genericCase.append(getSlowCase(iter)); // tags not equal
1060
1061     linkSlowCase(iter); // tags equal and JSCell
1062     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1063     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1064
1065     // String case.
1066     JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
1067     stubCallEqStrings.addArgument(regT0);
1068     stubCallEqStrings.addArgument(regT2);
1069     stubCallEqStrings.call(regT0);
1070     storeResult.append(jump());
1071
1072     // Generic case.
1073     genericCase.append(getSlowCase(iter)); // doubles
1074     genericCase.link(this);
1075     JITStubCall stubCallEq(this, cti_op_eq);
1076     stubCallEq.addArgument(regT1, regT0);
1077     stubCallEq.addArgument(regT3, regT2);
1078     stubCallEq.call(regT0);
1079
1080     storeResult.link(this);
1081     xor32(TrustedImm32(0x1), regT0);
1082     emitStoreBool(dst, regT0);
1083 }
1084
1085 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
1086 {
1087     unsigned dst = currentInstruction[1].u.operand;
1088     unsigned src1 = currentInstruction[2].u.operand;
1089     unsigned src2 = currentInstruction[3].u.operand;
1090
1091     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
1092
1093     // Bail if the tags differ, or are double.
1094     addSlowCase(branch32(NotEqual, regT1, regT3));
1095     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
1096
1097     // Jump to a slow case if both are strings.
1098     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1099     Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get()));
1100     addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1101     notCell.link(this);
1102     firstNotString.link(this);
1103
1104     // Simply compare the payloads.
1105     if (type == OpStrictEq)
1106         compare32(Equal, regT0, regT2, regT0);
1107     else
1108         compare32(NotEqual, regT0, regT2, regT0);
1109
1110     emitStoreBool(dst, regT0);
1111 }
1112
1113 void JIT::emit_op_stricteq(Instruction* currentInstruction)
1114 {
1115     compileOpStrictEq(currentInstruction, OpStrictEq);
1116 }
1117
1118 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1119 {
1120     unsigned dst = currentInstruction[1].u.operand;
1121     unsigned src1 = currentInstruction[2].u.operand;
1122     unsigned src2 = currentInstruction[3].u.operand;
1123
1124     linkSlowCase(iter);
1125     linkSlowCase(iter);
1126     linkSlowCase(iter);
1127
1128     JITStubCall stubCall(this, cti_op_stricteq);
1129     stubCall.addArgument(src1);
1130     stubCall.addArgument(src2);
1131     stubCall.call(dst);
1132 }
1133
1134 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
1135 {
1136     compileOpStrictEq(currentInstruction, OpNStrictEq);
1137 }
1138
1139 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1140 {
1141     unsigned dst = currentInstruction[1].u.operand;
1142     unsigned src1 = currentInstruction[2].u.operand;
1143     unsigned src2 = currentInstruction[3].u.operand;
1144
1145     linkSlowCase(iter);
1146     linkSlowCase(iter);
1147     linkSlowCase(iter);
1148
1149     JITStubCall stubCall(this, cti_op_nstricteq);
1150     stubCall.addArgument(src1);
1151     stubCall.addArgument(src2);
1152     stubCall.call(dst);
1153 }
1154
1155 void JIT::emit_op_eq_null(Instruction* currentInstruction)
1156 {
1157     unsigned dst = currentInstruction[1].u.operand;
1158     unsigned src = currentInstruction[2].u.operand;
1159
1160     emitLoad(src, regT1, regT0);
1161     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1162
1163     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1164     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
1165     move(TrustedImm32(0), regT1);
1166     Jump wasNotMasqueradesAsUndefined = jump();
1167
1168     isMasqueradesAsUndefined.link(this);
1169     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
1170     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
1171     compare32(Equal, regT0, regT2, regT1);
1172     Jump wasNotImmediate = jump();
1173
1174     isImmediate.link(this);
1175
1176     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
1177     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1178     or32(regT2, regT1);
1179
1180     wasNotImmediate.link(this);
1181     wasNotMasqueradesAsUndefined.link(this);
1182
1183     emitStoreBool(dst, regT1);
1184 }
1185
1186 void JIT::emit_op_neq_null(Instruction* currentInstruction)
1187 {
1188     unsigned dst = currentInstruction[1].u.operand;
1189     unsigned src = currentInstruction[2].u.operand;
1190
1191     emitLoad(src, regT1, regT0);
1192     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
1193
1194     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
1195     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
1196     move(TrustedImm32(1), regT1);
1197     Jump wasNotMasqueradesAsUndefined = jump();
1198
1199     isMasqueradesAsUndefined.link(this);
1200     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
1201     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
1202     compare32(NotEqual, regT0, regT2, regT1);
1203     Jump wasNotImmediate = jump();
1204
1205     isImmediate.link(this);
1206
1207     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
1208     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
1209     and32(regT2, regT1);
1210
1211     wasNotImmediate.link(this);
1212     wasNotMasqueradesAsUndefined.link(this);
1213
1214     emitStoreBool(dst, regT1);
1215 }
1216
1217 void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
1218 {
1219     JITStubCall stubCall(this, cti_op_resolve_with_base);
1220     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1221     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1222     stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1223 }
1224
1225 void JIT::emit_op_resolve_with_this(Instruction* currentInstruction)
1226 {
1227     JITStubCall stubCall(this, cti_op_resolve_with_this);
1228     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
1229     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1230     stubCall.callWithValueProfiling(currentInstruction[2].u.operand);
1231 }
1232
1233 void JIT::emit_op_throw(Instruction* currentInstruction)
1234 {
1235     unsigned exception = currentInstruction[1].u.operand;
1236     JITStubCall stubCall(this, cti_op_throw);
1237     stubCall.addArgument(exception);
1238     stubCall.call();
1239
1240 #ifndef NDEBUG
1241     // cti_op_throw always changes it's return address,
1242     // this point in the code should never be reached.
1243     breakpoint();
1244 #endif
1245 }
1246
1247 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
1248 {
1249     int dst = currentInstruction[1].u.operand;
1250     int base = currentInstruction[2].u.operand;
1251     int i = currentInstruction[3].u.operand;
1252     int size = currentInstruction[4].u.operand;
1253     int breakTarget = currentInstruction[5].u.operand;
1254
1255     JumpList isNotObject;
1256
1257     emitLoad(base, regT1, regT0);
1258     if (!m_codeBlock->isKnownNotImmediate(base))
1259         isNotObject.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
1260     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
1261         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1262         isNotObject.append(emitJumpIfNotObject(regT2));
1263     }
1264
1265     // We could inline the case where you have a valid cache, but
1266     // this call doesn't seem to be hot.
1267     Label isObject(this);
1268     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
1269     getPnamesStubCall.addArgument(regT0);
1270     getPnamesStubCall.call(dst);
1271     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
1272     store32(TrustedImm32(Int32Tag), intTagFor(i));
1273     store32(TrustedImm32(0), intPayloadFor(i));
1274     store32(TrustedImm32(Int32Tag), intTagFor(size));
1275     store32(regT3, payloadFor(size));
1276     Jump end = jump();
1277
1278     isNotObject.link(this);
1279     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), breakTarget);
1280     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag)), breakTarget);
1281     JITStubCall toObjectStubCall(this, cti_to_object);
1282     toObjectStubCall.addArgument(regT1, regT0);
1283     toObjectStubCall.call(base);
1284     jump().linkTo(isObject, this);
1285
1286     end.link(this);
1287 }
1288
1289 void JIT::emit_op_next_pname(Instruction* currentInstruction)
1290 {
1291     int dst = currentInstruction[1].u.operand;
1292     int base = currentInstruction[2].u.operand;
1293     int i = currentInstruction[3].u.operand;
1294     int size = currentInstruction[4].u.operand;
1295     int it = currentInstruction[5].u.operand;
1296     int target = currentInstruction[6].u.operand;
1297
1298     JumpList callHasProperty;
1299
1300     Label begin(this);
1301     load32(intPayloadFor(i), regT0);
1302     Jump end = branch32(Equal, regT0, intPayloadFor(size));
1303
1304     // Grab key @ i
1305     loadPtr(payloadFor(it), regT1);
1306     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
1307     load32(BaseIndex(regT2, regT0, TimesEight), regT2);
1308     store32(TrustedImm32(JSValue::CellTag), tagFor(dst));
1309     store32(regT2, payloadFor(dst));
1310
1311     // Increment i
1312     add32(TrustedImm32(1), regT0);
1313     store32(regT0, intPayloadFor(i));
1314
1315     // Verify that i is valid:
1316     loadPtr(payloadFor(base), regT0);
1317
1318     // Test base's structure
1319     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
1320     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
1321
1322     // Test base's prototype chain
1323     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
1324     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
1325     addJump(branchTestPtr(Zero, Address(regT3)), target);
1326
1327     Label checkPrototype(this);
1328     callHasProperty.append(branch32(Equal, Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::NullTag)));
1329     loadPtr(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
1330     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
1331     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
1332     addPtr(TrustedImm32(sizeof(Structure*)), regT3);
1333     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
1334
1335     // Continue loop.
1336     addJump(jump(), target);
1337
1338     // Slow case: Ask the object if i is valid.
1339     callHasProperty.link(this);
1340     loadPtr(addressFor(dst), regT1);
1341     JITStubCall stubCall(this, cti_has_property);
1342     stubCall.addArgument(regT0);
1343     stubCall.addArgument(regT1);
1344     stubCall.call();
1345
1346     // Test for valid key.
1347     addJump(branchTest32(NonZero, regT0), target);
1348     jump().linkTo(begin, this);
1349
1350     // End of loop.
1351     end.link(this);
1352 }
1353
1354 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
1355 {
1356     JITStubCall stubCall(this, cti_op_push_with_scope);
1357     stubCall.addArgument(currentInstruction[1].u.operand);
1358     stubCall.call();
1359 }
1360
1361 void JIT::emit_op_pop_scope(Instruction*)
1362 {
1363     JITStubCall(this, cti_op_pop_scope).call();
1364 }
1365
1366 void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
1367 {
1368     int dst = currentInstruction[1].u.operand;
1369     int src = currentInstruction[2].u.operand;
1370
1371     emitLoad(src, regT1, regT0);
1372
1373     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
1374     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::EmptyValueTag)));
1375     isInt32.link(this);
1376
1377     if (src != dst)
1378         emitStore(dst, regT1, regT0);
1379     map(m_bytecodeOffset + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
1380 }
1381
1382 void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1383 {
1384     int dst = currentInstruction[1].u.operand;
1385
1386     linkSlowCase(iter);
1387
1388     JITStubCall stubCall(this, cti_op_to_jsnumber);
1389     stubCall.addArgument(regT1, regT0);
1390     stubCall.call(dst);
1391 }
1392
1393 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
1394 {
1395     JITStubCall stubCall(this, cti_op_push_name_scope);
1396     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
1397     stubCall.addArgument(currentInstruction[2].u.operand);
1398     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1399     stubCall.call();
1400 }
1401
1402 void JIT::emit_op_catch(Instruction* currentInstruction)
1403 {
1404     // cti_op_throw returns the callFrame for the handler.
1405     move(regT0, callFrameRegister);
1406
1407     // Now store the exception returned by cti_op_throw.
1408     loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, globalData)), regT3);
1409     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1410     load32(Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1411     store32(TrustedImm32(JSValue().payload()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1412     store32(TrustedImm32(JSValue().tag()), Address(regT3, OBJECT_OFFSETOF(JSGlobalData, exception) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1413
1414     unsigned exception = currentInstruction[1].u.operand;
1415     emitStore(exception, regT1, regT0);
1416     map(m_bytecodeOffset + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
1417 }
1418
1419 void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
1420 {
1421     JITStubCall stubCall(this, cti_op_jmp_scopes);
1422     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
1423     stubCall.call();
1424     addJump(jump(), currentInstruction[2].u.operand);
1425 }
1426
1427 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
1428 {
1429     unsigned tableIndex = currentInstruction[1].u.operand;
1430     unsigned defaultOffset = currentInstruction[2].u.operand;
1431     unsigned scrutinee = currentInstruction[3].u.operand;
1432
1433     // create jump table for switch destinations, track this switch statement.
1434     SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
1435     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
1436     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1437
1438     JITStubCall stubCall(this, cti_op_switch_imm);
1439     stubCall.addArgument(scrutinee);
1440     stubCall.addArgument(TrustedImm32(tableIndex));
1441     stubCall.call();
1442     jump(regT0);
1443 }
1444
1445 void JIT::emit_op_switch_char(Instruction* currentInstruction)
1446 {
1447     unsigned tableIndex = currentInstruction[1].u.operand;
1448     unsigned defaultOffset = currentInstruction[2].u.operand;
1449     unsigned scrutinee = currentInstruction[3].u.operand;
1450
1451     // create jump table for switch destinations, track this switch statement.
1452     SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
1453     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
1454     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
1455
1456     JITStubCall stubCall(this, cti_op_switch_char);
1457     stubCall.addArgument(scrutinee);
1458     stubCall.addArgument(TrustedImm32(tableIndex));
1459     stubCall.call();
1460     jump(regT0);
1461 }
1462
1463 void JIT::emit_op_switch_string(Instruction* currentInstruction)
1464 {
1465     unsigned tableIndex = currentInstruction[1].u.operand;
1466     unsigned defaultOffset = currentInstruction[2].u.operand;
1467     unsigned scrutinee = currentInstruction[3].u.operand;
1468
1469     // create jump table for switch destinations, track this switch statement.
1470     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
1471     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
1472
1473     JITStubCall stubCall(this, cti_op_switch_string);
1474     stubCall.addArgument(scrutinee);
1475     stubCall.addArgument(TrustedImm32(tableIndex));
1476     stubCall.call();
1477     jump(regT0);
1478 }
1479
1480 void JIT::emit_op_throw_reference_error(Instruction* currentInstruction)
1481 {
1482     unsigned message = currentInstruction[1].u.operand;
1483
1484     JITStubCall stubCall(this, cti_op_throw_reference_error);
1485     stubCall.addArgument(m_codeBlock->getConstant(message));
1486     stubCall.call();
1487 }
1488
1489 void JIT::emit_op_debug(Instruction* currentInstruction)
1490 {
1491 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1492     UNUSED_PARAM(currentInstruction);
1493     breakpoint();
1494 #else
1495     JITStubCall stubCall(this, cti_op_debug);
1496     stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
1497     stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
1498     stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
1499     stubCall.addArgument(Imm32(currentInstruction[4].u.operand));
1500     stubCall.call();
1501 #endif
1502 }
1503
1504
1505 void JIT::emit_op_enter(Instruction*)
1506 {
1507     emitOptimizationCheck(EnterOptimizationCheck);
1508     
1509     // Even though JIT code doesn't use them, we initialize our constant
1510     // registers to zap stale pointers, to avoid unnecessarily prolonging
1511     // object lifetime and increasing GC pressure.
1512     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
1513         emitStore(i, jsUndefined());
1514 }
1515
1516 void JIT::emit_op_create_activation(Instruction* currentInstruction)
1517 {
1518     unsigned activation = currentInstruction[1].u.operand;
1519     
1520     Jump activationCreated = branch32(NotEqual, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
1521     JITStubCall(this, cti_op_push_activation).call(activation);
1522     activationCreated.link(this);
1523 }
1524
1525 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
1526 {
1527     unsigned dst = currentInstruction[1].u.operand;
1528
1529     Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1530
1531     JITStubCall(this, cti_op_create_arguments).call();
1532     emitStore(dst, regT1, regT0);
1533     emitStore(unmodifiedArgumentsRegister(dst), regT1, regT0);
1534
1535     argsCreated.link(this);
1536 }
1537
1538 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
1539 {
1540     unsigned dst = currentInstruction[1].u.operand;
1541
1542     emitStore(dst, JSValue());
1543 }
1544
1545 void JIT::emit_op_create_this(Instruction* currentInstruction)
1546 {
1547     emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT0);
1548     loadPtr(Address(regT0, JSFunction::offsetOfCachedInheritorID()), regT2);
1549     addSlowCase(branchTestPtr(Zero, regT2));
1550     
1551     // now regT2 contains the inheritorID, which is the structure that the newly
1552     // allocated object will have.
1553     
1554     emitAllocateJSFinalObject(regT2, regT0, regT1);
1555     emitStoreCell(currentInstruction[1].u.operand, regT0);
1556 }
1557
1558 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1559 {
1560     linkSlowCase(iter); // doesn't have an inheritor ID
1561     linkSlowCase(iter); // allocation failed
1562     JITStubCall stubCall(this, cti_op_create_this);
1563     stubCall.call(currentInstruction[1].u.operand);
1564 }
1565
1566 void JIT::emit_op_convert_this(Instruction* currentInstruction)
1567 {
1568     unsigned thisRegister = currentInstruction[1].u.operand;
1569
1570     emitLoad(thisRegister, regT3, regT2);
1571
1572     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1573     if (shouldEmitProfiling()) {
1574         loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1575         move(regT3, regT1);
1576         emitValueProfilingSite();
1577     }
1578     addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
1579 }
1580
1581 void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1582 {
1583     void* globalThis = m_codeBlock->globalObject()->globalThis();
1584     unsigned thisRegister = currentInstruction[1].u.operand;
1585
1586     linkSlowCase(iter);
1587     if (shouldEmitProfiling()) {
1588         move(TrustedImm32(JSValue::UndefinedTag), regT1);
1589         move(TrustedImm32(0), regT0);
1590     }
1591     Jump isNotUndefined = branch32(NotEqual, regT3, TrustedImm32(JSValue::UndefinedTag));
1592     emitValueProfilingSite();
1593     move(TrustedImmPtr(globalThis), regT0);
1594     move(TrustedImm32(JSValue::CellTag), regT1);
1595     emitStore(thisRegister, regT1, regT0);
1596     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
1597
1598     linkSlowCase(iter);
1599     if (shouldEmitProfiling()) {
1600         move(TrustedImm32(JSValue::CellTag), regT1);
1601         move(TrustedImmPtr(m_globalData->stringStructure.get()), regT0);
1602     }
1603     isNotUndefined.link(this);
1604     emitValueProfilingSite();
1605     JITStubCall stubCall(this, cti_op_convert_this);
1606     stubCall.addArgument(regT3, regT2);
1607     stubCall.call(thisRegister);
1608 }
1609
1610 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1611 {
1612     JITStubCall stubCall(this, cti_op_profile_will_call);
1613     stubCall.addArgument(currentInstruction[1].u.operand);
1614     stubCall.call();
1615 }
1616
1617 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1618 {
1619     JITStubCall stubCall(this, cti_op_profile_did_call);
1620     stubCall.addArgument(currentInstruction[1].u.operand);
1621     stubCall.call();
1622 }
1623
1624 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1625 {
1626     int dst = currentInstruction[1].u.operand;
1627     int argumentsRegister = currentInstruction[2].u.operand;
1628     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1629     load32(payloadFor(RegisterFile::ArgumentCount), regT0);
1630     sub32(TrustedImm32(1), regT0);
1631     emitStoreInt32(dst, regT0);
1632 }
1633
1634 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1635 {
1636     linkSlowCase(iter);
1637     int dst = currentInstruction[1].u.operand;
1638     int base = currentInstruction[2].u.operand;
1639     int ident = currentInstruction[3].u.operand;
1640     
1641     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1642     stubCall.addArgument(base);
1643     stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
1644     stubCall.call(dst);
1645 }
1646
1647 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1648 {
1649     int dst = currentInstruction[1].u.operand;
1650     int argumentsRegister = currentInstruction[2].u.operand;
1651     int property = currentInstruction[3].u.operand;
1652     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1653     emitLoad(property, regT1, regT2);
1654     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1655     add32(TrustedImm32(1), regT2);
1656     // regT2 now contains the integer index of the argument we want, including this
1657     load32(payloadFor(RegisterFile::ArgumentCount), regT3);
1658     addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1659     
1660     neg32(regT2);
1661     loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1662     loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
1663     emitValueProfilingSite();
1664     emitStore(dst, regT1, regT0);
1665 }
1666
1667 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1668 {
1669     unsigned dst = currentInstruction[1].u.operand;
1670     unsigned arguments = currentInstruction[2].u.operand;
1671     unsigned property = currentInstruction[3].u.operand;
1672
1673     linkSlowCase(iter);
1674     Jump skipArgumentsCreation = jump();
1675
1676     linkSlowCase(iter);
1677     linkSlowCase(iter);
1678     JITStubCall(this, cti_op_create_arguments).call();
1679     emitStore(arguments, regT1, regT0);
1680     emitStore(unmodifiedArgumentsRegister(arguments), regT1, regT0);
1681     
1682     skipArgumentsCreation.link(this);
1683     JITStubCall stubCall(this, cti_op_get_by_val);
1684     stubCall.addArgument(arguments);
1685     stubCall.addArgument(property);
1686     stubCall.callWithValueProfiling(dst);
1687 }
1688
1689 } // namespace JSC
1690
1691 #endif // USE(JSVALUE32_64)
1692 #endif // ENABLE(JIT)