4b93a2da36881383d576255df73bb6552b7d8ce0
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "CCallHelpers.h"
34 #include "Debugger.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "RepatchBuffer.h"
44 #include "SlowPathCall.h"
45 #include "VirtualRegister.h"
46
47 namespace JSC {
48
49 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
50 {
51     Call nativeCall;
52
53     emitFunctionPrologue();
54     emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
55     storePtr(callFrameRegister, &m_vm->topCallFrame);
56
57 #if CPU(X86)
58     // Load caller frame's scope chain into this callframe so that whatever we call can
59     // get to its global data.
60     emitGetCallerFrameFromCallFrameHeaderPtr(regT0);
61     emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT0);
62     emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
63
64     // Calling convention:      f(ecx, edx, ...);
65     // Host function signature: f(ExecState*);
66     move(callFrameRegister, X86Registers::ecx);
67
68     subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
69     storePtr(X86Registers::ecx, Address(stackPointerRegister));
70
71     // call the function
72     nativeCall = call();
73
74     addPtr(TrustedImm32(8), stackPointerRegister);
75
76 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
77     // Load caller frame's scope chain into this callframe so that whatever we call can get to its global data.
78     emitGetCallerFrameFromCallFrameHeaderPtr(regT2);
79     emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1, regT2);
80     emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain);
81
82 #if CPU(MIPS)
83     // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
84     subPtr(TrustedImm32(16), stackPointerRegister);
85 #endif
86
87     // Calling convention is f(argumentGPR0, argumentGPR1, ...).
88     // Host function signature is f(ExecState*).
89     move(callFrameRegister, argumentGPR0);
90
91     emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
92     loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
93
94     // call the function
95     nativeCall = call();
96
97 #if CPU(MIPS)
98     // Restore stack space
99     addPtr(TrustedImm32(16), stackPointerRegister);
100 #endif
101
102     restoreReturnAddressBeforeReturn(regT3);
103 #else
104 #error "JIT not supported on this platform."
105     abortWithReason(JITNotSupported);
106 #endif // CPU(X86)
107
108     // Check for an exception
109     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(vm->addressOfException()) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag)); 
110
111     emitFunctionEpilogue();
112     // Return.
113     ret();
114
115     // Handle an exception
116     sawException.link(this);
117
118     storePtr(callFrameRegister, &m_vm->topCallFrame);
119
120 #if CPU(X86)
121     addPtr(TrustedImm32(-4), stackPointerRegister);
122     loadPtr(Address(callFrameRegister), X86Registers::ecx);
123     push(X86Registers::ecx);
124 #else
125     loadPtr(Address(callFrameRegister), argumentGPR0);
126 #endif
127     move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
128     call(regT3);
129
130 #if CPU(X86)
131     addPtr(TrustedImm32(8), stackPointerRegister);
132 #endif
133
134     jumpToExceptionHandler();
135
136     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
137     LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
138
139     patchBuffer.link(nativeCall, FunctionPtr(func));
140     return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
141 }
142
143 void JIT::emit_op_mov(Instruction* currentInstruction)
144 {
145     int dst = currentInstruction[1].u.operand;
146     int src = currentInstruction[2].u.operand;
147     
148     if (m_codeBlock->isConstantRegisterIndex(src))
149         emitStore(dst, getConstantOperand(src));
150     else {
151         emitLoad(src, regT1, regT0);
152         emitStore(dst, regT1, regT0);
153     }
154 }
155
156 void JIT::emit_op_captured_mov(Instruction* currentInstruction)
157 {
158     int dst = currentInstruction[1].u.operand;
159     int src = currentInstruction[2].u.operand;
160
161     emitLoad(src, regT1, regT0);
162     emitNotifyWrite(regT1, regT0, regT2, currentInstruction[3].u.watchpointSet);
163     emitStore(dst, regT1, regT0);
164 }
165
166 void JIT::emit_op_end(Instruction* currentInstruction)
167 {
168     ASSERT(returnValueGPR != callFrameRegister);
169     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
170     emitFunctionEpilogue();
171     ret();
172 }
173
174 void JIT::emit_op_jmp(Instruction* currentInstruction)
175 {
176     unsigned target = currentInstruction[1].u.operand;
177     addJump(jump(), target);
178 }
179
180 void JIT::emit_op_new_object(Instruction* currentInstruction)
181 {
182     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
183     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
184     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
185
186     RegisterID resultReg = regT0;
187     RegisterID allocatorReg = regT1;
188     RegisterID scratchReg = regT2;
189
190     move(TrustedImmPtr(allocator), allocatorReg);
191     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
192     emitStoreCell(currentInstruction[1].u.operand, resultReg);
193 }
194
195 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
196 {
197     linkSlowCase(iter);
198     int dst = currentInstruction[1].u.operand;
199     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
200     callOperation(operationNewObject, structure);
201     emitStoreCell(dst, returnValueGPR);
202 }
203
204 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
205 {
206     int baseVal = currentInstruction[3].u.operand;
207
208     emitLoadPayload(baseVal, regT0);
209
210     // Check that baseVal is a cell.
211     emitJumpSlowCaseIfNotJSCell(baseVal);
212     
213     // Check that baseVal 'ImplementsHasInstance'.
214     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
215 }
216
217 void JIT::emit_op_instanceof(Instruction* currentInstruction)
218 {
219     int dst = currentInstruction[1].u.operand;
220     int value = currentInstruction[2].u.operand;
221     int proto = currentInstruction[3].u.operand;
222
223     // Load the operands into registers.
224     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
225     emitLoadPayload(value, regT2);
226     emitLoadPayload(proto, regT1);
227
228     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
229     emitJumpSlowCaseIfNotJSCell(value);
230     emitJumpSlowCaseIfNotJSCell(proto);
231     
232     // Check that prototype is an object
233     addSlowCase(emitJumpIfCellNotObject(regT1));
234
235     // Optimistically load the result true, and start looping.
236     // Initially, regT1 still contains proto and regT2 still contains value.
237     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
238     move(TrustedImm32(1), regT0);
239     Label loop(this);
240
241     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
242     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
243     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
244     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
245     Jump isInstance = branchPtr(Equal, regT2, regT1);
246     branchTest32(NonZero, regT2).linkTo(loop, this);
247
248     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
249     move(TrustedImm32(0), regT0);
250
251     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
252     isInstance.link(this);
253     emitStoreBool(dst, regT0);
254 }
255
256 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
257 {
258     int dst = currentInstruction[1].u.operand;
259     int value = currentInstruction[2].u.operand;
260     int baseVal = currentInstruction[3].u.operand;
261
262     linkSlowCaseIfNotJSCell(iter, baseVal);
263     linkSlowCase(iter);
264
265     emitLoad(value, regT1, regT0);
266     emitLoad(baseVal, regT3, regT2);
267     callOperation(operationCheckHasInstance, dst, regT1, regT0, regT3, regT2);
268
269     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
270 }
271
272 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
273 {
274     int dst = currentInstruction[1].u.operand;
275     int value = currentInstruction[2].u.operand;
276     int proto = currentInstruction[3].u.operand;
277
278     linkSlowCaseIfNotJSCell(iter, value);
279     linkSlowCaseIfNotJSCell(iter, proto);
280     linkSlowCase(iter);
281
282     emitLoad(value, regT1, regT0);
283     emitLoad(proto, regT3, regT2);
284     callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
285 }
286
287 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
288 {
289     int dst = currentInstruction[1].u.operand;
290     int value = currentInstruction[2].u.operand;
291     
292     emitLoad(value, regT1, regT0);
293     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
294
295     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
296     Jump done = jump();
297     
298     isCell.link(this);
299     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
300     move(TrustedImm32(0), regT0);
301     Jump notMasqueradesAsUndefined = jump();
302     
303     isMasqueradesAsUndefined.link(this);
304     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
305     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
306     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
307     compare32(Equal, regT0, regT1, regT0);
308
309     notMasqueradesAsUndefined.link(this);
310     done.link(this);
311     emitStoreBool(dst, regT0);
312 }
313
314 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
315 {
316     int dst = currentInstruction[1].u.operand;
317     int value = currentInstruction[2].u.operand;
318     
319     emitLoadTag(value, regT0);
320     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
321     emitStoreBool(dst, regT0);
322 }
323
324 void JIT::emit_op_is_number(Instruction* currentInstruction)
325 {
326     int dst = currentInstruction[1].u.operand;
327     int value = currentInstruction[2].u.operand;
328     
329     emitLoadTag(value, regT0);
330     add32(TrustedImm32(1), regT0);
331     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
332     emitStoreBool(dst, regT0);
333 }
334
335 void JIT::emit_op_is_string(Instruction* currentInstruction)
336 {
337     int dst = currentInstruction[1].u.operand;
338     int value = currentInstruction[2].u.operand;
339     
340     emitLoad(value, regT1, regT0);
341     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
342     
343     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
344     Jump done = jump();
345     
346     isNotCell.link(this);
347     move(TrustedImm32(0), regT0);
348     
349     done.link(this);
350     emitStoreBool(dst, regT0);
351 }
352
353 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
354 {
355     int activation = currentInstruction[1].u.operand;
356     Jump activationNotCreated = branch32(Equal, tagFor(activation), TrustedImm32(JSValue::EmptyValueTag));
357     emitLoadPayload(activation, regT0);
358     callOperation(operationTearOffActivation, regT0);
359     activationNotCreated.link(this);
360 }
361
362 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
363 {
364     VirtualRegister arguments = VirtualRegister(currentInstruction[1].u.operand);
365     int activation = currentInstruction[2].u.operand;
366
367     Jump argsNotCreated = branch32(Equal, tagFor(unmodifiedArgumentsRegister(arguments).offset()), TrustedImm32(JSValue::EmptyValueTag));
368     emitLoadPayload(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
369     emitLoadPayload(activation, regT1);
370     callOperation(operationTearOffArguments, regT0, regT1);
371     argsNotCreated.link(this);
372 }
373
374 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
375 {
376     int dst = currentInstruction[1].u.operand;
377     int src = currentInstruction[2].u.operand;
378
379     emitLoad(src, regT1, regT0);
380
381     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
382     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
383     isImm.link(this);
384
385     if (dst != src)
386         emitStore(dst, regT1, regT0);
387 }
388
389 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
390 {
391     linkSlowCase(iter);
392
393     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
394     slowPathCall.call();
395 }
396
397 void JIT::emit_op_strcat(Instruction* currentInstruction)
398 {
399     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
400     slowPathCall.call();
401 }
402
403 void JIT::emit_op_not(Instruction* currentInstruction)
404 {
405     int dst = currentInstruction[1].u.operand;
406     int src = currentInstruction[2].u.operand;
407
408     emitLoadTag(src, regT0);
409
410     emitLoad(src, regT1, regT0);
411     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
412     xor32(TrustedImm32(1), regT0);
413
414     emitStoreBool(dst, regT0, (dst == src));
415 }
416
417 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
418 {
419     linkSlowCase(iter);
420
421     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
422     slowPathCall.call();
423 }
424
425 void JIT::emit_op_jfalse(Instruction* currentInstruction)
426 {
427     int cond = currentInstruction[1].u.operand;
428     unsigned target = currentInstruction[2].u.operand;
429
430     emitLoad(cond, regT1, regT0);
431
432     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
433     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
434     addJump(branchTest32(Zero, regT0), target);
435 }
436
437 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
438 {
439     int cond = currentInstruction[1].u.operand;
440     unsigned target = currentInstruction[2].u.operand;
441
442     linkSlowCase(iter);
443
444     if (supportsFloatingPoint()) {
445         // regT1 contains the tag from the hot path.
446         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
447
448         emitLoadDouble(cond, fpRegT0);
449         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
450         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
451
452         notNumber.link(this);
453     }
454
455     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
456     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
457 }
458
459 void JIT::emit_op_jtrue(Instruction* currentInstruction)
460 {
461     int cond = currentInstruction[1].u.operand;
462     unsigned target = currentInstruction[2].u.operand;
463
464     emitLoad(cond, regT1, regT0);
465
466     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
467     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
468     addJump(branchTest32(NonZero, regT0), target);
469 }
470
471 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
472 {
473     int cond = currentInstruction[1].u.operand;
474     unsigned target = currentInstruction[2].u.operand;
475
476     linkSlowCase(iter);
477
478     if (supportsFloatingPoint()) {
479         // regT1 contains the tag from the hot path.
480         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
481
482         emitLoadDouble(cond, fpRegT0);
483         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
484         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
485
486         notNumber.link(this);
487     }
488
489     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
490     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
491 }
492
493 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
494 {
495     int src = currentInstruction[1].u.operand;
496     unsigned target = currentInstruction[2].u.operand;
497
498     emitLoad(src, regT1, regT0);
499
500     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
501
502     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
503     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
504     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
505     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
506     Jump masqueradesGlobalObjectIsForeign = jump();
507
508     // Now handle the immediate cases - undefined & null
509     isImmediate.link(this);
510     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
511     or32(TrustedImm32(1), regT1);
512     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
513
514     isNotMasqueradesAsUndefined.link(this);
515     masqueradesGlobalObjectIsForeign.link(this);
516 }
517
518 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
519 {
520     int src = currentInstruction[1].u.operand;
521     unsigned target = currentInstruction[2].u.operand;
522
523     emitLoad(src, regT1, regT0);
524
525     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
526
527     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
528     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
529     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
530     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
531     Jump wasNotImmediate = jump();
532
533     // Now handle the immediate cases - undefined & null
534     isImmediate.link(this);
535
536     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
537     or32(TrustedImm32(1), regT1);
538     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
539
540     wasNotImmediate.link(this);
541 }
542
543 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
544 {
545     int src = currentInstruction[1].u.operand;
546     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
547     unsigned target = currentInstruction[3].u.operand;
548
549     emitLoad(src, regT1, regT0);
550     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
551     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
552 }
553
554 void JIT::emit_op_eq(Instruction* currentInstruction)
555 {
556     int dst = currentInstruction[1].u.operand;
557     int src1 = currentInstruction[2].u.operand;
558     int src2 = currentInstruction[3].u.operand;
559
560     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
561     addSlowCase(branch32(NotEqual, regT1, regT3));
562     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
563     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
564
565     compare32(Equal, regT0, regT2, regT0);
566
567     emitStoreBool(dst, regT0);
568 }
569
570 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
571 {
572     int dst = currentInstruction[1].u.operand;
573     int op1 = currentInstruction[2].u.operand;
574     int op2 = currentInstruction[3].u.operand;
575
576     JumpList storeResult;
577     JumpList genericCase;
578
579     genericCase.append(getSlowCase(iter)); // tags not equal
580
581     linkSlowCase(iter); // tags equal and JSCell
582     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
583     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
584
585     // String case.
586     callOperation(operationCompareStringEq, regT0, regT2);
587     storeResult.append(jump());
588
589     // Generic case.
590     genericCase.append(getSlowCase(iter)); // doubles
591     genericCase.link(this);
592     emitLoad(op1, regT1, regT0);
593     emitLoad(op2, regT3, regT2);
594     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
595
596     storeResult.link(this);
597     emitStoreBool(dst, returnValueGPR);
598 }
599
600 void JIT::emit_op_neq(Instruction* currentInstruction)
601 {
602     int dst = currentInstruction[1].u.operand;
603     int src1 = currentInstruction[2].u.operand;
604     int src2 = currentInstruction[3].u.operand;
605
606     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
607     addSlowCase(branch32(NotEqual, regT1, regT3));
608     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
609     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
610
611     compare32(NotEqual, regT0, regT2, regT0);
612
613     emitStoreBool(dst, regT0);
614 }
615
616 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
617 {
618     int dst = currentInstruction[1].u.operand;
619
620     JumpList storeResult;
621     JumpList genericCase;
622
623     genericCase.append(getSlowCase(iter)); // tags not equal
624
625     linkSlowCase(iter); // tags equal and JSCell
626     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
627     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
628
629     // String case.
630     callOperation(operationCompareStringEq, regT0, regT2);
631     storeResult.append(jump());
632
633     // Generic case.
634     genericCase.append(getSlowCase(iter)); // doubles
635     genericCase.link(this);
636     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
637
638     storeResult.link(this);
639     xor32(TrustedImm32(0x1), returnValueGPR);
640     emitStoreBool(dst, returnValueGPR);
641 }
642
643 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
644 {
645     int dst = currentInstruction[1].u.operand;
646     int src1 = currentInstruction[2].u.operand;
647     int src2 = currentInstruction[3].u.operand;
648
649     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
650
651     // Bail if the tags differ, or are double.
652     addSlowCase(branch32(NotEqual, regT1, regT3));
653     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
654
655     // Jump to a slow case if both are strings.
656     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
657     Jump firstNotString = branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
658     addSlowCase(branchPtr(Equal, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
659     notCell.link(this);
660     firstNotString.link(this);
661
662     // Simply compare the payloads.
663     if (type == OpStrictEq)
664         compare32(Equal, regT0, regT2, regT0);
665     else
666         compare32(NotEqual, regT0, regT2, regT0);
667
668     emitStoreBool(dst, regT0);
669 }
670
671 void JIT::emit_op_stricteq(Instruction* currentInstruction)
672 {
673     compileOpStrictEq(currentInstruction, OpStrictEq);
674 }
675
676 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
677 {
678     linkSlowCase(iter);
679     linkSlowCase(iter);
680     linkSlowCase(iter);
681
682     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
683     slowPathCall.call();
684 }
685
686 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
687 {
688     compileOpStrictEq(currentInstruction, OpNStrictEq);
689 }
690
691 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
692 {
693     linkSlowCase(iter);
694     linkSlowCase(iter);
695     linkSlowCase(iter);
696
697     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
698     slowPathCall.call();
699 }
700
701 void JIT::emit_op_eq_null(Instruction* currentInstruction)
702 {
703     int dst = currentInstruction[1].u.operand;
704     int src = currentInstruction[2].u.operand;
705
706     emitLoad(src, regT1, regT0);
707     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
708
709     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
710     move(TrustedImm32(0), regT1);
711     Jump wasNotMasqueradesAsUndefined = jump();
712
713     isMasqueradesAsUndefined.link(this);
714     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
715     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
716     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
717     compare32(Equal, regT0, regT2, regT1);
718     Jump wasNotImmediate = jump();
719
720     isImmediate.link(this);
721
722     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
723     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
724     or32(regT2, regT1);
725
726     wasNotImmediate.link(this);
727     wasNotMasqueradesAsUndefined.link(this);
728
729     emitStoreBool(dst, regT1);
730 }
731
732 void JIT::emit_op_neq_null(Instruction* currentInstruction)
733 {
734     int dst = currentInstruction[1].u.operand;
735     int src = currentInstruction[2].u.operand;
736
737     emitLoad(src, regT1, regT0);
738     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
739
740     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
741     move(TrustedImm32(1), regT1);
742     Jump wasNotMasqueradesAsUndefined = jump();
743
744     isMasqueradesAsUndefined.link(this);
745     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
746     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
747     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
748     compare32(NotEqual, regT0, regT2, regT1);
749     Jump wasNotImmediate = jump();
750
751     isImmediate.link(this);
752
753     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
754     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
755     and32(regT2, regT1);
756
757     wasNotImmediate.link(this);
758     wasNotMasqueradesAsUndefined.link(this);
759
760     emitStoreBool(dst, regT1);
761 }
762
763 void JIT::emit_op_throw(Instruction* currentInstruction)
764 {
765     ASSERT(regT0 == returnValueGPR);
766     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
767     callOperationNoExceptionCheck(operationThrow, regT1, regT0);
768     jumpToExceptionHandler();
769 }
770
771 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
772 {
773     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
774     callOperation(operationPushWithScope, regT1, regT0);
775 }
776
777 void JIT::emit_op_pop_scope(Instruction*)
778 {
779     callOperation(operationPopScope);
780 }
781
782 void JIT::emit_op_to_number(Instruction* currentInstruction)
783 {
784     int dst = currentInstruction[1].u.operand;
785     int src = currentInstruction[2].u.operand;
786
787     emitLoad(src, regT1, regT0);
788
789     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
790     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
791     isInt32.link(this);
792
793     if (src != dst)
794         emitStore(dst, regT1, regT0);
795 }
796
797 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
798 {
799     linkSlowCase(iter);
800
801     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
802     slowPathCall.call();
803 }
804
805 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
806 {
807     emitLoad(currentInstruction[2].u.operand, regT1, regT0);
808     callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT1, regT0, currentInstruction[3].u.operand);
809 }
810
811 void JIT::emit_op_catch(Instruction* currentInstruction)
812 {
813     move(TrustedImmPtr(m_vm), regT3);
814     // operationThrow returns the callFrame for the handler.
815     load32(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
816     load32(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
817     store32(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
818
819     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
820
821     // Now store the exception returned by operationThrow.
822     load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
823     load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
824     store32(TrustedImm32(JSValue().payload()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
825     store32(TrustedImm32(JSValue().tag()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
826
827     unsigned exception = currentInstruction[1].u.operand;
828     emitStore(exception, regT1, regT0);
829 }
830
831 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
832 {
833     size_t tableIndex = currentInstruction[1].u.operand;
834     unsigned defaultOffset = currentInstruction[2].u.operand;
835     unsigned scrutinee = currentInstruction[3].u.operand;
836
837     // create jump table for switch destinations, track this switch statement.
838     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
839     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
840     jumpTable->ensureCTITable();
841
842     emitLoad(scrutinee, regT1, regT0);
843     callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
844     jump(returnValueGPR);
845 }
846
847 void JIT::emit_op_switch_char(Instruction* currentInstruction)
848 {
849     size_t tableIndex = currentInstruction[1].u.operand;
850     unsigned defaultOffset = currentInstruction[2].u.operand;
851     unsigned scrutinee = currentInstruction[3].u.operand;
852
853     // create jump table for switch destinations, track this switch statement.
854     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
855     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
856     jumpTable->ensureCTITable();
857
858     emitLoad(scrutinee, regT1, regT0);
859     callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
860     jump(returnValueGPR);
861 }
862
863 void JIT::emit_op_switch_string(Instruction* currentInstruction)
864 {
865     size_t tableIndex = currentInstruction[1].u.operand;
866     unsigned defaultOffset = currentInstruction[2].u.operand;
867     unsigned scrutinee = currentInstruction[3].u.operand;
868
869     // create jump table for switch destinations, track this switch statement.
870     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
871     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
872
873     emitLoad(scrutinee, regT1, regT0);
874     callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
875     jump(returnValueGPR);
876 }
877
878 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
879 {
880     emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
881     callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
882 }
883
884 void JIT::emit_op_debug(Instruction* currentInstruction)
885 {
886     load32(codeBlock()->debuggerRequestsAddress(), regT0);
887     Jump noDebuggerRequests = branchTest32(Zero, regT0);
888     callOperation(operationDebug, currentInstruction[1].u.operand);
889     noDebuggerRequests.link(this);
890 }
891
892
893 void JIT::emit_op_enter(Instruction* currentInstruction)
894 {
895     emitEnterOptimizationCheck();
896     
897     // Even though JIT code doesn't use them, we initialize our constant
898     // registers to zap stale pointers, to avoid unnecessarily prolonging
899     // object lifetime and increasing GC pressure.
900     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
901         emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
902
903     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
904     slowPathCall.call();
905 }
906
907 void JIT::emit_op_create_activation(Instruction* currentInstruction)
908 {
909     int activation = currentInstruction[1].u.operand;
910
911     callOperation(operationCreateActivation, 0);
912     emitStoreCell(activation, returnValueGPR);
913 }
914
915 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
916 {
917     int dst = currentInstruction[1].u.operand;
918
919     Jump argsCreated = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
920     callOperation(operationCreateArguments);
921     emitStoreCell(dst, returnValueGPR);
922     emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)).offset(), returnValueGPR);
923     argsCreated.link(this);
924 }
925
926 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
927 {
928     int dst = currentInstruction[1].u.operand;
929
930     emitStore(dst, JSValue());
931 }
932
933 void JIT::emit_op_get_callee(Instruction* currentInstruction)
934 {
935     int result = currentInstruction[1].u.operand;
936     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
937     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
938
939     loadPtr(cachedFunction, regT2);
940     addSlowCase(branchPtr(NotEqual, regT0, regT2));
941
942     move(TrustedImm32(JSValue::CellTag), regT1);
943     emitStore(result, regT1, regT0);
944 }
945
946 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
947 {
948     linkSlowCase(iter);
949
950     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
951     slowPathCall.call();
952 }
953
954 void JIT::emit_op_create_this(Instruction* currentInstruction)
955 {
956     int callee = currentInstruction[2].u.operand;
957     RegisterID calleeReg = regT0;
958     RegisterID resultReg = regT0;
959     RegisterID allocatorReg = regT1;
960     RegisterID structureReg = regT2;
961     RegisterID scratchReg = regT3;
962
963     emitLoadPayload(callee, calleeReg);
964     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
965     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
966     addSlowCase(branchTestPtr(Zero, allocatorReg));
967
968     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
969     emitStoreCell(currentInstruction[1].u.operand, resultReg);
970 }
971
972 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
973 {
974     linkSlowCase(iter); // doesn't have an allocation profile
975     linkSlowCase(iter); // allocation failed
976
977     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
978     slowPathCall.call();
979 }
980
981 void JIT::emit_op_to_this(Instruction* currentInstruction)
982 {
983     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
984     int thisRegister = currentInstruction[1].u.operand;
985
986     emitLoad(thisRegister, regT3, regT2);
987
988     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
989     addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
990     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
991     loadPtr(cachedStructure, regT2);
992     addSlowCase(branchPtr(NotEqual, regT0, regT2));
993 }
994
995 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
996 {
997     linkSlowCase(iter);
998     linkSlowCase(iter);
999     linkSlowCase(iter);
1000     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
1001     slowPathCall.call();
1002 }
1003
1004 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1005 {
1006     load32(m_vm->enabledProfilerAddress(), regT0);
1007     Jump profilerDone = branchTestPtr(Zero, regT0);
1008     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1009     callOperation(operationProfileWillCall, regT1, regT0);
1010     profilerDone.link(this);
1011 }
1012
1013 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1014 {
1015     load32(m_vm->enabledProfilerAddress(), regT0);
1016     Jump profilerDone = branchTestPtr(Zero, regT0);
1017     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1018     callOperation(operationProfileDidCall, regT1, regT0);
1019     profilerDone.link(this);
1020 }
1021
1022 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1023 {
1024     int dst = currentInstruction[1].u.operand;
1025     int argumentsRegister = currentInstruction[2].u.operand;
1026     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1027     load32(payloadFor(JSStack::ArgumentCount), regT0);
1028     sub32(TrustedImm32(1), regT0);
1029     emitStoreInt32(dst, regT0);
1030 }
1031
1032 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1033 {
1034     linkSlowCase(iter);
1035     int dst = currentInstruction[1].u.operand;
1036     int base = currentInstruction[2].u.operand;
1037     callOperation(operationGetArgumentsLength, dst, base);
1038 }
1039
1040 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1041 {
1042     int dst = currentInstruction[1].u.operand;
1043     int argumentsRegister = currentInstruction[2].u.operand;
1044     int property = currentInstruction[3].u.operand;
1045     addSlowCase(branch32(NotEqual, tagFor(argumentsRegister), TrustedImm32(JSValue::EmptyValueTag)));
1046     emitLoad(property, regT1, regT2);
1047     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
1048     add32(TrustedImm32(1), regT2);
1049     // regT2 now contains the integer index of the argument we want, including this
1050     load32(payloadFor(JSStack::ArgumentCount), regT3);
1051     addSlowCase(branch32(AboveOrEqual, regT2, regT3));
1052     
1053     loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1054     loadPtr(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT1);
1055     emitValueProfilingSite();
1056     emitStore(dst, regT1, regT0);
1057 }
1058
1059 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1060 {
1061     int dst = currentInstruction[1].u.operand;
1062     int arguments = currentInstruction[2].u.operand;
1063     int property = currentInstruction[3].u.operand;
1064
1065     linkSlowCase(iter);
1066     Jump skipArgumentsCreation = jump();
1067
1068     linkSlowCase(iter);
1069     linkSlowCase(iter);
1070
1071     callOperation(operationCreateArguments);
1072     emitStoreCell(arguments, returnValueGPR);
1073     emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), returnValueGPR);
1074     
1075     skipArgumentsCreation.link(this);
1076     emitLoad(arguments, regT1, regT0);
1077     emitLoad(property, regT3, regT2);
1078     callOperation(WithProfile, operationGetByValGeneric, dst, regT1, regT0, regT3, regT2);
1079 }
1080
1081 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1082 {
1083     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1084     slowPathCall.call();
1085 }
1086
1087 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1088 {
1089     int dst = currentInstruction[1].u.operand;
1090     int base = currentInstruction[2].u.operand;
1091     int enumerator = currentInstruction[4].u.operand;
1092
1093     emitLoadPayload(base, regT0);
1094     emitJumpSlowCaseIfNotJSCell(base);
1095
1096     emitLoadPayload(enumerator, regT1);
1097
1098     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1099     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1100     
1101     move(TrustedImm32(1), regT0);
1102     emitStoreBool(dst, regT0);
1103 }
1104
1105 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1106 {
1107     linkSlowCase(iter);
1108     linkSlowCase(iter);
1109
1110     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1111     slowPathCall.call();
1112 }
1113
1114 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1115 {
1116     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1117     slowPathCall.call();
1118 }
1119
1120 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1121 {
1122     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1123     
1124     PatchableJump badType;
1125     
1126     // FIXME: Add support for other types like TypedArrays and Arguments.
1127     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1128     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1129     move(TrustedImm32(1), regT0);
1130     Jump done = jump();
1131
1132     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1133     
1134     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1135     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1136     
1137     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1138     
1139     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1140         m_codeBlock, patchBuffer,
1141         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1142     
1143     RepatchBuffer repatchBuffer(m_codeBlock);
1144     repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1145     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1146 }
1147
1148 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1149 {
1150     int dst = currentInstruction[1].u.operand;
1151     int base = currentInstruction[2].u.operand;
1152     int property = currentInstruction[3].u.operand;
1153     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1154     
1155     emitLoadPayload(base, regT0);
1156     emitJumpSlowCaseIfNotJSCell(base);
1157
1158     emitLoadPayload(property, regT1);
1159
1160     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1161     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1162     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1163     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1164     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1165     // extending since it makes it easier to re-tag the value in the slow case.
1166     zeroExtend32ToPtr(regT1, regT1);
1167
1168     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1169     and32(TrustedImm32(IndexingShapeMask), regT2);
1170
1171     JITArrayMode mode = chooseArrayMode(profile);
1172     PatchableJump badType;
1173
1174     // FIXME: Add support for other types like TypedArrays and Arguments.
1175     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1176     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1177     move(TrustedImm32(1), regT0);
1178
1179     addSlowCase(badType);
1180     addSlowCase(slowCases);
1181     
1182     Label done = label();
1183     
1184     emitStoreBool(dst, regT0);
1185     
1186     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1187 }
1188
1189 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1190 {
1191     int dst = currentInstruction[1].u.operand;
1192     int base = currentInstruction[2].u.operand;
1193     int property = currentInstruction[3].u.operand;
1194     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1195     
1196     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1197     linkSlowCase(iter); // base array check
1198     
1199     Jump skipProfiling = jump();
1200     
1201     linkSlowCase(iter); // vector length check
1202     linkSlowCase(iter); // empty value
1203     
1204     emitArrayProfileOutOfBoundsSpecialCase(profile);
1205     
1206     skipProfiling.link(this);
1207     
1208     Label slowPath = label();
1209     
1210     emitLoad(base, regT1, regT0);
1211     emitLoad(property, regT3, regT2);
1212     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2);
1213
1214     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1215     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1216     m_byValInstructionIndex++;
1217 }
1218
1219 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1220 {
1221     int dst = currentInstruction[1].u.operand;
1222     int base = currentInstruction[2].u.operand;
1223     int index = currentInstruction[4].u.operand;
1224     int enumerator = currentInstruction[5].u.operand;
1225
1226     // Check that base is a cell
1227     emitLoadPayload(base, regT0);
1228     emitJumpSlowCaseIfNotJSCell(base);
1229
1230     // Check the structure
1231     emitLoadPayload(enumerator, regT1);
1232     load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1233     addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1234
1235     // Compute the offset
1236     emitLoadPayload(index, regT2);
1237     // If index is less than the enumerator's cached inline storage, then it's an inline access
1238     Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1239     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1240     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1241     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1242     
1243     Jump done = jump();
1244
1245     // Otherwise it's out of line
1246     outOfLineAccess.link(this);
1247     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1248     sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1249     neg32(regT2);
1250     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1251     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1252     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1253     
1254     done.link(this);
1255     emitValueProfilingSite();
1256     emitStore(dst, regT1, regT0);
1257 }
1258
1259 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1260 {
1261     int base = currentInstruction[2].u.operand;
1262     linkSlowCaseIfNotJSCell(iter, base);
1263     linkSlowCase(iter);
1264
1265     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1266     slowPathCall.call();
1267 }
1268
1269 void JIT::emit_op_get_structure_property_enumerator(Instruction* currentInstruction)
1270 {
1271     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_structure_property_enumerator);
1272     slowPathCall.call();
1273 }
1274
1275 void JIT::emit_op_get_generic_property_enumerator(Instruction* currentInstruction)
1276 {
1277     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_generic_property_enumerator);
1278     slowPathCall.call();
1279 }
1280
1281 void JIT::emit_op_next_enumerator_pname(Instruction* currentInstruction)
1282 {
1283     int dst = currentInstruction[1].u.operand;
1284     int enumerator = currentInstruction[2].u.operand;
1285     int index = currentInstruction[3].u.operand;
1286
1287     emitLoadPayload(index, regT0);
1288     emitLoadPayload(enumerator, regT1);
1289     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesLengthOffset()));
1290
1291     move(TrustedImm32(JSValue::NullTag), regT2);
1292     move(TrustedImm32(0), regT0);
1293
1294     Jump done = jump();
1295     inBounds.link(this);
1296
1297     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1298     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1299     move(TrustedImm32(JSValue::CellTag), regT2);
1300
1301     done.link(this);
1302     emitStore(dst, regT2, regT0);
1303 }
1304
1305 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1306 {
1307     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1308     slowPathCall.call();
1309 }
1310
1311 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1312 {
1313     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_profile_type);
1314     slowPathCall.call();
1315 }
1316
1317 } // namespace JSC
1318
1319 #endif // USE(JSVALUE32_64)
1320 #endif // ENABLE(JIT)