Unreviewed, rolling out r156474.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Heap.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41 #include "SlowPathCall.h"
42
43 namespace JSC {
44
45 #if USE(JSVALUE64)
46
47 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
48 {
49     return vm->getCTIStub(nativeCallGenerator);
50 }
51
52 void JIT::emit_op_mov(Instruction* currentInstruction)
53 {
54     int dst = currentInstruction[1].u.operand;
55     int src = currentInstruction[2].u.operand;
56
57     if (canBeOptimizedOrInlined()) {
58         // Use simpler approach, since the DFG thinks that the last result register
59         // is always set to the destination on every operation.
60         emitGetVirtualRegister(src, regT0);
61         emitPutVirtualRegister(dst);
62     } else {
63         if (m_codeBlock->isConstantRegisterIndex(src)) {
64             if (!getConstantOperand(src).isNumber())
65                 store64(TrustedImm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
66             else
67                 store64(Imm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
68             if (dst == m_lastResultBytecodeRegister)
69                 killLastResultRegister();
70         } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
71             // If either the src or dst is the cached register go though
72             // get/put registers to make sure we track this correctly.
73             emitGetVirtualRegister(src, regT0);
74             emitPutVirtualRegister(dst);
75         } else {
76             // Perform the copy via regT1; do not disturb any mapping in regT0.
77             load64(Address(callFrameRegister, src * sizeof(Register)), regT1);
78             store64(regT1, Address(callFrameRegister, dst * sizeof(Register)));
79         }
80     }
81 }
82
83 void JIT::emit_op_end(Instruction* currentInstruction)
84 {
85     RELEASE_ASSERT(returnValueRegister != callFrameRegister);
86     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
87     restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
88     ret();
89 }
90
91 void JIT::emit_op_jmp(Instruction* currentInstruction)
92 {
93     unsigned target = currentInstruction[1].u.operand;
94     addJump(jump(), target);
95 }
96
97 void JIT::emit_op_new_object(Instruction* currentInstruction)
98 {
99     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
100     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
101     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
102
103     RegisterID resultReg = regT0;
104     RegisterID allocatorReg = regT1;
105     RegisterID scratchReg = regT2;
106
107     move(TrustedImmPtr(allocator), allocatorReg);
108     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
109     emitPutVirtualRegister(currentInstruction[1].u.operand);
110 }
111
112 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
113 {
114     linkSlowCase(iter);
115     JITStubCall stubCall(this, cti_op_new_object);
116     stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
117     stubCall.call(currentInstruction[1].u.operand);
118 }
119
120 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
121 {
122     int baseVal = currentInstruction[3].u.operand;
123
124     emitGetVirtualRegister(baseVal, regT0);
125
126     // Check that baseVal is a cell.
127     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
128
129     // Check that baseVal 'ImplementsHasInstance'.
130     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
131     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
132 }
133
134 void JIT::emit_op_instanceof(Instruction* currentInstruction)
135 {
136     int dst = currentInstruction[1].u.operand;
137     int value = currentInstruction[2].u.operand;
138     int proto = currentInstruction[3].u.operand;
139
140     // Load the operands (baseVal, proto, and value respectively) into registers.
141     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
142     emitGetVirtualRegister(value, regT2);
143     emitGetVirtualRegister(proto, regT1);
144
145     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
146     emitJumpSlowCaseIfNotJSCell(regT2, value);
147     emitJumpSlowCaseIfNotJSCell(regT1, proto);
148
149     // Check that prototype is an object
150     loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
151     addSlowCase(emitJumpIfNotObject(regT3));
152     
153     // Optimistically load the result true, and start looping.
154     // Initially, regT1 still contains proto and regT2 still contains value.
155     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
156     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
157     Label loop(this);
158
159     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
160     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
161     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
162     load64(Address(regT2, Structure::prototypeOffset()), regT2);
163     Jump isInstance = branchPtr(Equal, regT2, regT1);
164     emitJumpIfJSCell(regT2).linkTo(loop, this);
165
166     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
167     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
168
169     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
170     isInstance.link(this);
171     emitPutVirtualRegister(dst);
172 }
173
174 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
175 {
176     int dst = currentInstruction[1].u.operand;
177     int value = currentInstruction[2].u.operand;
178     
179     emitGetVirtualRegister(value, regT0);
180     Jump isCell = emitJumpIfJSCell(regT0);
181
182     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
183     Jump done = jump();
184     
185     isCell.link(this);
186     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
187     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
188     move(TrustedImm32(0), regT0);
189     Jump notMasqueradesAsUndefined = jump();
190
191     isMasqueradesAsUndefined.link(this);
192     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
193     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
194     comparePtr(Equal, regT0, regT1, regT0);
195
196     notMasqueradesAsUndefined.link(this);
197     done.link(this);
198     emitTagAsBoolImmediate(regT0);
199     emitPutVirtualRegister(dst);
200 }
201
202 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
203 {
204     int dst = currentInstruction[1].u.operand;
205     int value = currentInstruction[2].u.operand;
206     
207     emitGetVirtualRegister(value, regT0);
208     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
209     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
210     emitTagAsBoolImmediate(regT0);
211     emitPutVirtualRegister(dst);
212 }
213
214 void JIT::emit_op_is_number(Instruction* currentInstruction)
215 {
216     int dst = currentInstruction[1].u.operand;
217     int value = currentInstruction[2].u.operand;
218     
219     emitGetVirtualRegister(value, regT0);
220     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
221     emitTagAsBoolImmediate(regT0);
222     emitPutVirtualRegister(dst);
223 }
224
225 void JIT::emit_op_is_string(Instruction* currentInstruction)
226 {
227     int dst = currentInstruction[1].u.operand;
228     int value = currentInstruction[2].u.operand;
229     
230     emitGetVirtualRegister(value, regT0);
231     Jump isNotCell = emitJumpIfNotJSCell(regT0);
232     
233     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
234     compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
235     emitTagAsBoolImmediate(regT0);
236     Jump done = jump();
237     
238     isNotCell.link(this);
239     move(TrustedImm32(ValueFalse), regT0);
240     
241     done.link(this);
242     emitPutVirtualRegister(dst);
243 }
244
245 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
246 {
247     int activation = currentInstruction[1].u.operand;
248     Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
249     JITStubCall stubCall(this, cti_op_tear_off_activation);
250     stubCall.addArgument(activation, regT2);
251     stubCall.call();
252     activationNotCreated.link(this);
253 }
254
255 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
256 {
257     int arguments = currentInstruction[1].u.operand;
258     int activation = currentInstruction[2].u.operand;
259
260     Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(arguments))));
261     JITStubCall stubCall(this, cti_op_tear_off_arguments);
262     stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
263     stubCall.addArgument(activation, regT2);
264     stubCall.call();
265     argsNotCreated.link(this);
266 }
267
268 void JIT::emit_op_ret(Instruction* currentInstruction)
269 {
270     ASSERT(callFrameRegister != regT1);
271     ASSERT(regT1 != returnValueRegister);
272     ASSERT(returnValueRegister != callFrameRegister);
273
274     // Return the result in %eax.
275     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
276
277     // Grab the return address.
278     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
279
280     // Restore our caller's "r".
281     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
282
283     // Return.
284     restoreReturnAddressBeforeReturn(regT1);
285     ret();
286 }
287
288 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
289 {
290     ASSERT(callFrameRegister != regT1);
291     ASSERT(regT1 != returnValueRegister);
292     ASSERT(returnValueRegister != callFrameRegister);
293
294     // Return the result in %eax.
295     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
296     Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
297     loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
298     Jump notObject = emitJumpIfNotObject(regT2);
299
300     // Grab the return address.
301     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
302
303     // Restore our caller's "r".
304     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
305
306     // Return.
307     restoreReturnAddressBeforeReturn(regT1);
308     ret();
309
310     // Return 'this' in %eax.
311     notJSCell.link(this);
312     notObject.link(this);
313     emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
314
315     // Grab the return address.
316     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
317
318     // Restore our caller's "r".
319     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
320
321     // Return.
322     restoreReturnAddressBeforeReturn(regT1);
323     ret();
324 }
325
326 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
327 {
328     int dst = currentInstruction[1].u.operand;
329     int src = currentInstruction[2].u.operand;
330
331     emitGetVirtualRegister(src, regT0);
332     
333     Jump isImm = emitJumpIfNotJSCell(regT0);
334     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
335     isImm.link(this);
336
337     if (dst != src)
338         emitPutVirtualRegister(dst);
339
340 }
341
342 void JIT::emit_op_strcat(Instruction* currentInstruction)
343 {
344     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
345     slowPathCall.call();
346     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
347 }
348
349 void JIT::emit_op_not(Instruction* currentInstruction)
350 {
351     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
352
353     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
354     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
355     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
356     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
357     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
358     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
359
360     emitPutVirtualRegister(currentInstruction[1].u.operand);
361 }
362
363 void JIT::emit_op_jfalse(Instruction* currentInstruction)
364 {
365     unsigned target = currentInstruction[2].u.operand;
366     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
367
368     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
369     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
370
371     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
372     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
373
374     isNonZero.link(this);
375 }
376
377 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
378 {
379     int src = currentInstruction[1].u.operand;
380     unsigned target = currentInstruction[2].u.operand;
381
382     emitGetVirtualRegister(src, regT0);
383     Jump isImmediate = emitJumpIfNotJSCell(regT0);
384
385     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
386     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
387     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
388     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
389     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
390     Jump masqueradesGlobalObjectIsForeign = jump();
391
392     // Now handle the immediate cases - undefined & null
393     isImmediate.link(this);
394     and64(TrustedImm32(~TagBitUndefined), regT0);
395     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
396
397     isNotMasqueradesAsUndefined.link(this);
398     masqueradesGlobalObjectIsForeign.link(this);
399 };
400 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
401 {
402     int src = currentInstruction[1].u.operand;
403     unsigned target = currentInstruction[2].u.operand;
404
405     emitGetVirtualRegister(src, regT0);
406     Jump isImmediate = emitJumpIfNotJSCell(regT0);
407
408     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
409     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
410     addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
411     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
412     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
413     Jump wasNotImmediate = jump();
414
415     // Now handle the immediate cases - undefined & null
416     isImmediate.link(this);
417     and64(TrustedImm32(~TagBitUndefined), regT0);
418     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
419
420     wasNotImmediate.link(this);
421 }
422
423 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
424 {
425     int src = currentInstruction[1].u.operand;
426     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
427     unsigned target = currentInstruction[3].u.operand;
428     
429     emitGetVirtualRegister(src, regT0);
430     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
431 }
432
433 void JIT::emit_op_eq(Instruction* currentInstruction)
434 {
435     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
436     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
437     compare32(Equal, regT1, regT0, regT0);
438     emitTagAsBoolImmediate(regT0);
439     emitPutVirtualRegister(currentInstruction[1].u.operand);
440 }
441
442 void JIT::emit_op_jtrue(Instruction* currentInstruction)
443 {
444     unsigned target = currentInstruction[2].u.operand;
445     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
446
447     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
448     addJump(emitJumpIfImmediateInteger(regT0), target);
449
450     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
451     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
452
453     isZero.link(this);
454 }
455
456 void JIT::emit_op_neq(Instruction* currentInstruction)
457 {
458     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
459     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
460     compare32(NotEqual, regT1, regT0, regT0);
461     emitTagAsBoolImmediate(regT0);
462
463     emitPutVirtualRegister(currentInstruction[1].u.operand);
464
465 }
466
467 void JIT::emit_op_bitxor(Instruction* currentInstruction)
468 {
469     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
470     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
471     xor64(regT1, regT0);
472     emitFastArithReTagImmediate(regT0, regT0);
473     emitPutVirtualRegister(currentInstruction[1].u.operand);
474 }
475
476 void JIT::emit_op_bitor(Instruction* currentInstruction)
477 {
478     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
479     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
480     or64(regT1, regT0);
481     emitPutVirtualRegister(currentInstruction[1].u.operand);
482 }
483
484 void JIT::emit_op_throw(Instruction* currentInstruction)
485 {
486     JITStubCall stubCall(this, cti_op_throw);
487     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
488     stubCall.call();
489     ASSERT(regT0 == returnValueRegister);
490 #ifndef NDEBUG
491     // cti_op_throw always changes it's return address,
492     // this point in the code should never be reached.
493     breakpoint();
494 #endif
495 }
496
497 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
498 {
499     int dst = currentInstruction[1].u.operand;
500     int base = currentInstruction[2].u.operand;
501     int i = currentInstruction[3].u.operand;
502     int size = currentInstruction[4].u.operand;
503     int breakTarget = currentInstruction[5].u.operand;
504
505     JumpList isNotObject;
506
507     emitGetVirtualRegister(base, regT0);
508     if (!m_codeBlock->isKnownNotImmediate(base))
509         isNotObject.append(emitJumpIfNotJSCell(regT0));
510     if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
511         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
512         isNotObject.append(emitJumpIfNotObject(regT2));
513     }
514
515     // We could inline the case where you have a valid cache, but
516     // this call doesn't seem to be hot.
517     Label isObject(this);
518     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
519     getPnamesStubCall.addArgument(regT0);
520     getPnamesStubCall.call(dst);
521     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
522     store64(tagTypeNumberRegister, addressFor(i));
523     store32(TrustedImm32(Int32Tag), intTagFor(size));
524     store32(regT3, intPayloadFor(size));
525     Jump end = jump();
526
527     isNotObject.link(this);
528     move(regT0, regT1);
529     and32(TrustedImm32(~TagBitUndefined), regT1);
530     addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
531
532     JITStubCall toObjectStubCall(this, cti_to_object);
533     toObjectStubCall.addArgument(regT0);
534     toObjectStubCall.call(base);
535     jump().linkTo(isObject, this);
536     
537     end.link(this);
538 }
539
540 void JIT::emit_op_next_pname(Instruction* currentInstruction)
541 {
542     int dst = currentInstruction[1].u.operand;
543     int base = currentInstruction[2].u.operand;
544     int i = currentInstruction[3].u.operand;
545     int size = currentInstruction[4].u.operand;
546     int it = currentInstruction[5].u.operand;
547     int target = currentInstruction[6].u.operand;
548     
549     JumpList callHasProperty;
550
551     Label begin(this);
552     load32(intPayloadFor(i), regT0);
553     Jump end = branch32(Equal, regT0, intPayloadFor(size));
554
555     // Grab key @ i
556     loadPtr(addressFor(it), regT1);
557     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
558
559     load64(BaseIndex(regT2, regT0, TimesEight), regT2);
560
561     emitPutVirtualRegister(dst, regT2);
562
563     // Increment i
564     add32(TrustedImm32(1), regT0);
565     store32(regT0, intPayloadFor(i));
566
567     // Verify that i is valid:
568     emitGetVirtualRegister(base, regT0);
569
570     // Test base's structure
571     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
572     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
573
574     // Test base's prototype chain
575     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
576     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
577     addJump(branchTestPtr(Zero, Address(regT3)), target);
578
579     Label checkPrototype(this);
580     load64(Address(regT2, Structure::prototypeOffset()), regT2);
581     callHasProperty.append(emitJumpIfNotJSCell(regT2));
582     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
583     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
584     addPtr(TrustedImm32(sizeof(Structure*)), regT3);
585     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
586
587     // Continue loop.
588     addJump(jump(), target);
589
590     // Slow case: Ask the object if i is valid.
591     callHasProperty.link(this);
592     emitGetVirtualRegister(dst, regT1);
593     JITStubCall stubCall(this, cti_has_property);
594     stubCall.addArgument(regT0);
595     stubCall.addArgument(regT1);
596     stubCall.call();
597
598     // Test for valid key.
599     addJump(branchTest32(NonZero, regT0), target);
600     jump().linkTo(begin, this);
601
602     // End of loop.
603     end.link(this);
604 }
605
606 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
607 {
608     JITStubCall stubCall(this, cti_op_push_with_scope);
609     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
610     stubCall.call();
611 }
612
613 void JIT::emit_op_pop_scope(Instruction*)
614 {
615     JITStubCall(this, cti_op_pop_scope).call();
616 }
617
618 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
619 {
620     int dst = currentInstruction[1].u.operand;
621     int src1 = currentInstruction[2].u.operand;
622     int src2 = currentInstruction[3].u.operand;
623
624     emitGetVirtualRegisters(src1, regT0, src2, regT1);
625     
626     // Jump slow if both are cells (to cover strings).
627     move(regT0, regT2);
628     or64(regT1, regT2);
629     addSlowCase(emitJumpIfJSCell(regT2));
630     
631     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
632     // if it's a double.
633     Jump leftOK = emitJumpIfImmediateInteger(regT0);
634     addSlowCase(emitJumpIfImmediateNumber(regT0));
635     leftOK.link(this);
636     Jump rightOK = emitJumpIfImmediateInteger(regT1);
637     addSlowCase(emitJumpIfImmediateNumber(regT1));
638     rightOK.link(this);
639
640     if (type == OpStrictEq)
641         compare64(Equal, regT1, regT0, regT0);
642     else
643         compare64(NotEqual, regT1, regT0, regT0);
644     emitTagAsBoolImmediate(regT0);
645
646     emitPutVirtualRegister(dst);
647 }
648
649 void JIT::emit_op_stricteq(Instruction* currentInstruction)
650 {
651     compileOpStrictEq(currentInstruction, OpStrictEq);
652 }
653
654 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
655 {
656     compileOpStrictEq(currentInstruction, OpNStrictEq);
657 }
658
659 void JIT::emit_op_to_number(Instruction* currentInstruction)
660 {
661     int srcVReg = currentInstruction[2].u.operand;
662     emitGetVirtualRegister(srcVReg, regT0);
663     
664     addSlowCase(emitJumpIfNotImmediateNumber(regT0));
665
666     emitPutVirtualRegister(currentInstruction[1].u.operand);
667 }
668
669 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
670 {
671     JITStubCall stubCall(this, cti_op_push_name_scope);
672     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
673     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
674     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
675     stubCall.call();
676 }
677
678 void JIT::emit_op_catch(Instruction* currentInstruction)
679 {
680     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
681     move(regT0, callFrameRegister);
682     peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*));
683     load64(Address(regT3, VM::exceptionOffset()), regT0);
684     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
685     emitPutVirtualRegister(currentInstruction[1].u.operand);
686 }
687
688 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
689 {
690     unsigned tableIndex = currentInstruction[1].u.operand;
691     unsigned defaultOffset = currentInstruction[2].u.operand;
692     unsigned scrutinee = currentInstruction[3].u.operand;
693
694     // create jump table for switch destinations, track this switch statement.
695     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
696     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
697     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
698
699     JITStubCall stubCall(this, cti_op_switch_imm);
700     stubCall.addArgument(scrutinee, regT2);
701     stubCall.addArgument(TrustedImm32(tableIndex));
702     stubCall.call();
703     jump(regT0);
704 }
705
706 void JIT::emit_op_switch_char(Instruction* currentInstruction)
707 {
708     unsigned tableIndex = currentInstruction[1].u.operand;
709     unsigned defaultOffset = currentInstruction[2].u.operand;
710     unsigned scrutinee = currentInstruction[3].u.operand;
711
712     // create jump table for switch destinations, track this switch statement.
713     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
714     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
715     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
716
717     JITStubCall stubCall(this, cti_op_switch_char);
718     stubCall.addArgument(scrutinee, regT2);
719     stubCall.addArgument(TrustedImm32(tableIndex));
720     stubCall.call();
721     jump(regT0);
722 }
723
724 void JIT::emit_op_switch_string(Instruction* currentInstruction)
725 {
726     unsigned tableIndex = currentInstruction[1].u.operand;
727     unsigned defaultOffset = currentInstruction[2].u.operand;
728     unsigned scrutinee = currentInstruction[3].u.operand;
729
730     // create jump table for switch destinations, track this switch statement.
731     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
732     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
733
734     JITStubCall stubCall(this, cti_op_switch_string);
735     stubCall.addArgument(scrutinee, regT2);
736     stubCall.addArgument(TrustedImm32(tableIndex));
737     stubCall.call();
738     jump(regT0);
739 }
740
741 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
742 {
743     JITStubCall stubCall(this, cti_op_throw_static_error);
744     if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
745         stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
746     else
747         stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
748     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
749     stubCall.call();
750 }
751
752 void JIT::emit_op_debug(Instruction* currentInstruction)
753 {
754 #if ENABLE(DEBUG_WITH_BREAKPOINT)
755     UNUSED_PARAM(currentInstruction);
756     breakpoint();
757 #else
758     JITStubCall stubCall(this, cti_op_debug);
759     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
760     stubCall.call();
761 #endif
762 }
763
764 void JIT::emit_op_eq_null(Instruction* currentInstruction)
765 {
766     int dst = currentInstruction[1].u.operand;
767     int src1 = currentInstruction[2].u.operand;
768
769     emitGetVirtualRegister(src1, regT0);
770     Jump isImmediate = emitJumpIfNotJSCell(regT0);
771
772     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
773     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
774     move(TrustedImm32(0), regT0);
775     Jump wasNotMasqueradesAsUndefined = jump();
776
777     isMasqueradesAsUndefined.link(this);
778     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
779     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
780     comparePtr(Equal, regT0, regT2, regT0);
781     Jump wasNotImmediate = jump();
782
783     isImmediate.link(this);
784
785     and64(TrustedImm32(~TagBitUndefined), regT0);
786     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
787
788     wasNotImmediate.link(this);
789     wasNotMasqueradesAsUndefined.link(this);
790
791     emitTagAsBoolImmediate(regT0);
792     emitPutVirtualRegister(dst);
793
794 }
795
796 void JIT::emit_op_neq_null(Instruction* currentInstruction)
797 {
798     int dst = currentInstruction[1].u.operand;
799     int src1 = currentInstruction[2].u.operand;
800
801     emitGetVirtualRegister(src1, regT0);
802     Jump isImmediate = emitJumpIfNotJSCell(regT0);
803
804     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
805     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
806     move(TrustedImm32(1), regT0);
807     Jump wasNotMasqueradesAsUndefined = jump();
808
809     isMasqueradesAsUndefined.link(this);
810     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
811     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
812     comparePtr(NotEqual, regT0, regT2, regT0);
813     Jump wasNotImmediate = jump();
814
815     isImmediate.link(this);
816
817     and64(TrustedImm32(~TagBitUndefined), regT0);
818     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
819
820     wasNotImmediate.link(this);
821     wasNotMasqueradesAsUndefined.link(this);
822
823     emitTagAsBoolImmediate(regT0);
824     emitPutVirtualRegister(dst);
825 }
826
827 void JIT::emit_op_enter(Instruction*)
828 {
829     emitEnterOptimizationCheck();
830     
831     // Even though CTI doesn't use them, we initialize our constant
832     // registers to zap stale pointers, to avoid unnecessarily prolonging
833     // object lifetime and increasing GC pressure.
834     size_t count = m_codeBlock->m_numVars;
835     for (size_t j = 0; j < count; ++j)
836         emitInitRegister(localToOperand(j));
837 }
838
839 void JIT::emit_op_create_activation(Instruction* currentInstruction)
840 {
841     int dst = currentInstruction[1].u.operand;
842     
843     Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
844     JITStubCall(this, cti_op_push_activation).call(dst);
845     activationCreated.link(this);
846 }
847
848 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
849 {
850     int dst = currentInstruction[1].u.operand;
851
852     Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
853     JITStubCall(this, cti_op_create_arguments).call();
854     emitPutVirtualRegister(dst);
855     emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
856     argsCreated.link(this);
857 }
858
859 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
860 {
861     int dst = currentInstruction[1].u.operand;
862
863     store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
864 }
865
866 void JIT::emit_op_to_this(Instruction* currentInstruction)
867 {
868     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
869     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
870
871     emitJumpSlowCaseIfNotJSCell(regT1);
872     loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
873
874     addSlowCase(branch8(NotEqual, Address(regT0, Structure::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
875     loadPtr(cachedStructure, regT2);
876     addSlowCase(branchPtr(NotEqual, regT0, regT2));
877 }
878
879 void JIT::emit_op_get_callee(Instruction* currentInstruction)
880 {
881     int result = currentInstruction[1].u.operand;
882     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
883     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
884
885     loadPtr(cachedFunction, regT2);
886     addSlowCase(branchPtr(NotEqual, regT0, regT2));
887
888     emitPutVirtualRegister(result);
889 }
890
891 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
892 {
893     linkSlowCase(iter);
894
895     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
896     slowPathCall.call();
897     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
898 }
899
900 void JIT::emit_op_create_this(Instruction* currentInstruction)
901 {
902     int callee = currentInstruction[2].u.operand;
903     RegisterID calleeReg = regT0;
904     RegisterID resultReg = regT0;
905     RegisterID allocatorReg = regT1;
906     RegisterID structureReg = regT2;
907     RegisterID scratchReg = regT3;
908
909     emitGetVirtualRegister(callee, calleeReg);
910     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
911     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
912     addSlowCase(branchTestPtr(Zero, allocatorReg));
913
914     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
915     emitPutVirtualRegister(currentInstruction[1].u.operand);
916 }
917
918 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
919 {
920     linkSlowCase(iter); // doesn't have an allocation profile
921     linkSlowCase(iter); // allocation failed
922
923     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
924     slowPathCall.call();
925     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
926 }
927
928 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
929 {
930     JITStubCall stubCall(this, cti_op_profile_will_call);
931     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
932     stubCall.call();
933 }
934
935 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
936 {
937     JITStubCall stubCall(this, cti_op_profile_did_call);
938     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
939     stubCall.call();
940 }
941
942
943 // Slow cases
944
945 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
946 {
947     linkSlowCase(iter);
948     linkSlowCase(iter);
949     linkSlowCase(iter);
950
951     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
952     slowPathCall.call();
953     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
954 }
955
956 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
957 {
958     linkSlowCase(iter);
959
960     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
961     slowPathCall.call();
962     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
963 }
964
965 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
966 {
967     linkSlowCase(iter);
968     
969     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
970     slowPathCall.call();
971     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
972 }
973
974 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
975 {
976     linkSlowCase(iter);
977     JITStubCall stubCall(this, cti_op_jtrue);
978     stubCall.addArgument(regT0);
979     stubCall.call();
980     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
981 }
982
983 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
984 {
985     linkSlowCase(iter);
986     JITStubCall stubCall(this, cti_op_jtrue);
987     stubCall.addArgument(regT0);
988     stubCall.call();
989     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
990 }
991
992 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
993 {
994     linkSlowCase(iter);
995     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
996     slowPathCall.call();
997     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
998 }
999
1000 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1001 {
1002     linkSlowCase(iter);
1003     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
1004     slowPathCall.call();
1005     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1006 }
1007
1008 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1009 {
1010     linkSlowCase(iter);
1011     JITStubCall stubCall(this, cti_op_eq);
1012     stubCall.addArgument(regT0);
1013     stubCall.addArgument(regT1);
1014     stubCall.call();
1015     emitTagAsBoolImmediate(regT0);
1016     emitPutVirtualRegister(currentInstruction[1].u.operand);
1017 }
1018
1019 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1020 {
1021     linkSlowCase(iter);
1022     JITStubCall stubCall(this, cti_op_eq);
1023     stubCall.addArgument(regT0);
1024     stubCall.addArgument(regT1);
1025     stubCall.call();
1026     xor32(TrustedImm32(0x1), regT0);
1027     emitTagAsBoolImmediate(regT0);
1028     emitPutVirtualRegister(currentInstruction[1].u.operand);
1029 }
1030
1031 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1032 {
1033     linkSlowCase(iter);
1034     linkSlowCase(iter);
1035     linkSlowCase(iter);
1036     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
1037     slowPathCall.call();
1038     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1039 }
1040
1041 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1042 {
1043     linkSlowCase(iter);
1044     linkSlowCase(iter);
1045     linkSlowCase(iter);
1046     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
1047     slowPathCall.call();
1048     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1049 }
1050
1051 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1052 {
1053     int dst = currentInstruction[1].u.operand;
1054     int value = currentInstruction[2].u.operand;
1055     int baseVal = currentInstruction[3].u.operand;
1056
1057     linkSlowCaseIfNotJSCell(iter, baseVal);
1058     linkSlowCase(iter);
1059     JITStubCall stubCall(this, cti_op_check_has_instance);
1060     stubCall.addArgument(value, regT2);
1061     stubCall.addArgument(baseVal, regT2);
1062     stubCall.call(dst);
1063
1064     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
1065 }
1066
1067 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1068 {
1069     int dst = currentInstruction[1].u.operand;
1070     int value = currentInstruction[2].u.operand;
1071     int proto = currentInstruction[3].u.operand;
1072
1073     linkSlowCaseIfNotJSCell(iter, value);
1074     linkSlowCaseIfNotJSCell(iter, proto);
1075     linkSlowCase(iter);
1076     JITStubCall stubCall(this, cti_op_instanceof);
1077     stubCall.addArgument(value, regT2);
1078     stubCall.addArgument(proto, regT2);
1079     stubCall.call(dst);
1080 }
1081
1082 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1083 {
1084     linkSlowCase(iter);
1085
1086     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
1087     slowPathCall.call();
1088     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1089 }
1090
1091 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1092 {
1093     int dst = currentInstruction[1].u.operand;
1094     int argumentsRegister = currentInstruction[2].u.operand;
1095     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1096     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
1097     sub32(TrustedImm32(1), regT0);
1098     emitFastArithReTagImmediate(regT0, regT0);
1099     emitPutVirtualRegister(dst, regT0);
1100 }
1101
1102 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1103 {
1104     linkSlowCase(iter);
1105     int dst = currentInstruction[1].u.operand;
1106     int base = currentInstruction[2].u.operand;
1107     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1108     
1109     emitGetVirtualRegister(base, regT0);
1110     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1111     stubCall.addArgument(regT0);
1112     stubCall.addArgument(TrustedImmPtr(ident));
1113     stubCall.call(dst);
1114 }
1115
1116 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1117 {
1118     int dst = currentInstruction[1].u.operand;
1119     int argumentsRegister = currentInstruction[2].u.operand;
1120     int property = currentInstruction[3].u.operand;
1121     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1122     emitGetVirtualRegister(property, regT1);
1123     addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1124     add32(TrustedImm32(1), regT1);
1125     // regT1 now contains the integer index of the argument we want, including this
1126     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
1127     addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1128
1129     signExtend32ToPtr(regT1, regT1);
1130     load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1131     emitValueProfilingSite(regT4);
1132     emitPutVirtualRegister(dst, regT0);
1133 }
1134
1135 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1136 {
1137     int dst = currentInstruction[1].u.operand;
1138     int arguments = currentInstruction[2].u.operand;
1139     int property = currentInstruction[3].u.operand;
1140     
1141     linkSlowCase(iter);
1142     Jump skipArgumentsCreation = jump();
1143     
1144     linkSlowCase(iter);
1145     linkSlowCase(iter);
1146     JITStubCall(this, cti_op_create_arguments).call();
1147     emitPutVirtualRegister(arguments);
1148     emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1149     
1150     skipArgumentsCreation.link(this);
1151     JITStubCall stubCall(this, cti_op_get_by_val_generic);
1152     stubCall.addArgument(arguments, regT2);
1153     stubCall.addArgument(property, regT2);
1154     stubCall.callWithValueProfiling(dst);
1155 }
1156
1157 #endif // USE(JSVALUE64)
1158
1159 void JIT::emit_op_loop_hint(Instruction*)
1160 {
1161     // Emit the JIT optimization check: 
1162     if (canBeOptimized()) {
1163         if (Options::enableOSREntryInLoops()) {
1164             addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
1165                 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
1166         } else {
1167             // Add with saturation.
1168             move(TrustedImmPtr(m_codeBlock->addressOfJITExecuteCounter()), regT3);
1169             load32(regT3, regT2);
1170             Jump dontAdd = branch32(
1171                 GreaterThan, regT2,
1172                 TrustedImm32(std::numeric_limits<int32_t>::max() - Options::executionCounterIncrementForLoop()));
1173             add32(TrustedImm32(Options::executionCounterIncrementForLoop()), regT2);
1174             store32(regT2, regT3);
1175             dontAdd.link(this);
1176         }
1177     }
1178
1179     // Emit the watchdog timer check:
1180     if (m_vm->watchdog.isEnabled())
1181         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())));
1182 }
1183
1184 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1185 {
1186 #if ENABLE(DFG_JIT)
1187     // Emit the slow path for the JIT optimization check:
1188     if (canBeOptimized() && Options::enableOSREntryInLoops()) {
1189         linkSlowCase(iter);
1190         
1191         JITStubCall stubCall(this, cti_optimize);
1192         stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
1193         stubCall.call();
1194         
1195         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1196     }
1197 #endif
1198
1199     // Emit the slow path of the watchdog timer check:
1200     if (m_vm->watchdog.isEnabled()) {
1201         linkSlowCase(iter);
1202
1203         JITStubCall stubCall(this, cti_handle_watchdog_timer);
1204         stubCall.call();
1205
1206         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1207     }
1208
1209 }
1210
1211 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1212 {
1213     JITStubCall stubCall(this, cti_op_new_regexp);
1214     stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1215     stubCall.call(currentInstruction[1].u.operand);
1216 }
1217
1218 void JIT::emit_op_new_func(Instruction* currentInstruction)
1219 {
1220     Jump lazyJump;
1221     int dst = currentInstruction[1].u.operand;
1222     if (currentInstruction[3].u.operand) {
1223 #if USE(JSVALUE32_64)
1224         lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1225 #else
1226         lazyJump = branchTest64(NonZero, addressFor(dst));
1227 #endif
1228     }
1229
1230     JITStubCall stubCall(this, cti_op_new_func);
1231     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1232     stubCall.call(dst);
1233
1234     if (currentInstruction[3].u.operand) {
1235 #if USE(JSVALUE32_64)        
1236         unmap();
1237 #else
1238         killLastResultRegister();
1239 #endif
1240         lazyJump.link(this);
1241     }
1242 }
1243
1244 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1245 {
1246     JITStubCall stubCall(this, cti_op_new_func_exp);
1247     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1248     stubCall.call(currentInstruction[1].u.operand);
1249 }
1250
1251 void JIT::emit_op_new_array(Instruction* currentInstruction)
1252 {
1253     JITStubCall stubCall(this, cti_op_new_array);
1254     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1255     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1256     stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1257     stubCall.call(currentInstruction[1].u.operand);
1258 }
1259
1260 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1261 {
1262     JITStubCall stubCall(this, cti_op_new_array_with_size);
1263 #if USE(JSVALUE64)
1264     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1265 #else
1266     stubCall.addArgument(currentInstruction[2].u.operand);
1267 #endif
1268     stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.arrayAllocationProfile));
1269     stubCall.call(currentInstruction[1].u.operand);
1270 }
1271
1272 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1273 {
1274     JITStubCall stubCall(this, cti_op_new_array_buffer);
1275     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1276     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1277     stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1278     stubCall.call(currentInstruction[1].u.operand);
1279 }
1280
1281 } // namespace JSC
1282
1283 #endif // ENABLE(JIT)