0eccac1cbc00321ef7218f65539fb779679c694b
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
33 #include "Heap.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41 #include "SlowPathCall.h"
42 #include "VirtualRegister.h"
43
44 namespace JSC {
45
46 #if USE(JSVALUE64)
47
48 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
49 {
50     return vm->getCTIStub(nativeCallGenerator);
51 }
52
53 void JIT::emit_op_mov(Instruction* currentInstruction)
54 {
55     int dst = currentInstruction[1].u.operand;
56     int src = currentInstruction[2].u.operand;
57
58     if (canBeOptimizedOrInlined()) {
59         // Use simpler approach, since the DFG thinks that the last result register
60         // is always set to the destination on every operation.
61         emitGetVirtualRegister(src, regT0);
62         emitPutVirtualRegister(dst);
63     } else {
64         if (m_codeBlock->isConstantRegisterIndex(src)) {
65             if (!getConstantOperand(src).isNumber())
66                 store64(TrustedImm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
67             else
68                 store64(Imm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
69             if (dst == m_lastResultBytecodeRegister)
70                 killLastResultRegister();
71         } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
72             // If either the src or dst is the cached register go though
73             // get/put registers to make sure we track this correctly.
74             emitGetVirtualRegister(src, regT0);
75             emitPutVirtualRegister(dst);
76         } else {
77             // Perform the copy via regT1; do not disturb any mapping in regT0.
78             load64(Address(callFrameRegister, src * sizeof(Register)), regT1);
79             store64(regT1, Address(callFrameRegister, dst * sizeof(Register)));
80         }
81     }
82 }
83
84 void JIT::emit_op_end(Instruction* currentInstruction)
85 {
86     RELEASE_ASSERT(returnValueRegister != callFrameRegister);
87     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
88     restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
89     ret();
90 }
91
92 void JIT::emit_op_jmp(Instruction* currentInstruction)
93 {
94     unsigned target = currentInstruction[1].u.operand;
95     addJump(jump(), target);
96 }
97
98 void JIT::emit_op_new_object(Instruction* currentInstruction)
99 {
100     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
101     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
102     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
103
104     RegisterID resultReg = regT0;
105     RegisterID allocatorReg = regT1;
106     RegisterID scratchReg = regT2;
107
108     move(TrustedImmPtr(allocator), allocatorReg);
109     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
110     emitPutVirtualRegister(currentInstruction[1].u.operand);
111 }
112
113 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
114 {
115     linkSlowCase(iter);
116     JITStubCall stubCall(this, cti_op_new_object);
117     stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
118     stubCall.call(currentInstruction[1].u.operand);
119 }
120
121 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
122 {
123     int baseVal = currentInstruction[3].u.operand;
124
125     emitGetVirtualRegister(baseVal, regT0);
126
127     // Check that baseVal is a cell.
128     emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
129
130     // Check that baseVal 'ImplementsHasInstance'.
131     loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
132     addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
133 }
134
135 void JIT::emit_op_instanceof(Instruction* currentInstruction)
136 {
137     int dst = currentInstruction[1].u.operand;
138     int value = currentInstruction[2].u.operand;
139     int proto = currentInstruction[3].u.operand;
140
141     // Load the operands (baseVal, proto, and value respectively) into registers.
142     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
143     emitGetVirtualRegister(value, regT2);
144     emitGetVirtualRegister(proto, regT1);
145
146     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
147     emitJumpSlowCaseIfNotJSCell(regT2, value);
148     emitJumpSlowCaseIfNotJSCell(regT1, proto);
149
150     // Check that prototype is an object
151     loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
152     addSlowCase(emitJumpIfNotObject(regT3));
153     
154     // Optimistically load the result true, and start looping.
155     // Initially, regT1 still contains proto and regT2 still contains value.
156     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
157     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
158     Label loop(this);
159
160     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
161     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
162     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
163     load64(Address(regT2, Structure::prototypeOffset()), regT2);
164     Jump isInstance = branchPtr(Equal, regT2, regT1);
165     emitJumpIfJSCell(regT2).linkTo(loop, this);
166
167     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
168     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
169
170     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
171     isInstance.link(this);
172     emitPutVirtualRegister(dst);
173 }
174
175 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
176 {
177     int dst = currentInstruction[1].u.operand;
178     int value = currentInstruction[2].u.operand;
179     
180     emitGetVirtualRegister(value, regT0);
181     Jump isCell = emitJumpIfJSCell(regT0);
182
183     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
184     Jump done = jump();
185     
186     isCell.link(this);
187     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
188     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
189     move(TrustedImm32(0), regT0);
190     Jump notMasqueradesAsUndefined = jump();
191
192     isMasqueradesAsUndefined.link(this);
193     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
194     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
195     comparePtr(Equal, regT0, regT1, regT0);
196
197     notMasqueradesAsUndefined.link(this);
198     done.link(this);
199     emitTagAsBoolImmediate(regT0);
200     emitPutVirtualRegister(dst);
201 }
202
203 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
204 {
205     int dst = currentInstruction[1].u.operand;
206     int value = currentInstruction[2].u.operand;
207     
208     emitGetVirtualRegister(value, regT0);
209     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
210     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
211     emitTagAsBoolImmediate(regT0);
212     emitPutVirtualRegister(dst);
213 }
214
215 void JIT::emit_op_is_number(Instruction* currentInstruction)
216 {
217     int dst = currentInstruction[1].u.operand;
218     int value = currentInstruction[2].u.operand;
219     
220     emitGetVirtualRegister(value, regT0);
221     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
222     emitTagAsBoolImmediate(regT0);
223     emitPutVirtualRegister(dst);
224 }
225
226 void JIT::emit_op_is_string(Instruction* currentInstruction)
227 {
228     int dst = currentInstruction[1].u.operand;
229     int value = currentInstruction[2].u.operand;
230     
231     emitGetVirtualRegister(value, regT0);
232     Jump isNotCell = emitJumpIfNotJSCell(regT0);
233     
234     loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
235     compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
236     emitTagAsBoolImmediate(regT0);
237     Jump done = jump();
238     
239     isNotCell.link(this);
240     move(TrustedImm32(ValueFalse), regT0);
241     
242     done.link(this);
243     emitPutVirtualRegister(dst);
244 }
245
246 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
247 {
248     int activation = currentInstruction[1].u.operand;
249     Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
250     JITStubCall stubCall(this, cti_op_tear_off_activation);
251     stubCall.addArgument(activation, regT2);
252     stubCall.call();
253     activationNotCreated.link(this);
254 }
255
256 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
257 {
258     int arguments = currentInstruction[1].u.operand;
259     int activation = currentInstruction[2].u.operand;
260
261     Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset())));
262     JITStubCall stubCall(this, cti_op_tear_off_arguments);
263     stubCall.addArgument(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT2);
264     stubCall.addArgument(activation, regT2);
265     stubCall.call();
266     argsNotCreated.link(this);
267 }
268
269 void JIT::emit_op_ret(Instruction* currentInstruction)
270 {
271     ASSERT(callFrameRegister != regT1);
272     ASSERT(regT1 != returnValueRegister);
273     ASSERT(returnValueRegister != callFrameRegister);
274
275     // Return the result in %eax.
276     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
277
278     // Grab the return address.
279     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
280
281     // Restore our caller's "r".
282     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
283
284     // Return.
285     restoreReturnAddressBeforeReturn(regT1);
286     ret();
287 }
288
289 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
290 {
291     ASSERT(callFrameRegister != regT1);
292     ASSERT(regT1 != returnValueRegister);
293     ASSERT(returnValueRegister != callFrameRegister);
294
295     // Return the result in %eax.
296     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
297     Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
298     loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
299     Jump notObject = emitJumpIfNotObject(regT2);
300
301     // Grab the return address.
302     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
303
304     // Restore our caller's "r".
305     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
306
307     // Return.
308     restoreReturnAddressBeforeReturn(regT1);
309     ret();
310
311     // Return 'this' in %eax.
312     notJSCell.link(this);
313     notObject.link(this);
314     emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
315
316     // Grab the return address.
317     emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
318
319     // Restore our caller's "r".
320     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
321
322     // Return.
323     restoreReturnAddressBeforeReturn(regT1);
324     ret();
325 }
326
327 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
328 {
329     int dst = currentInstruction[1].u.operand;
330     int src = currentInstruction[2].u.operand;
331
332     emitGetVirtualRegister(src, regT0);
333     
334     Jump isImm = emitJumpIfNotJSCell(regT0);
335     addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
336     isImm.link(this);
337
338     if (dst != src)
339         emitPutVirtualRegister(dst);
340
341 }
342
343 void JIT::emit_op_strcat(Instruction* currentInstruction)
344 {
345     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
346     slowPathCall.call();
347     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
348 }
349
350 void JIT::emit_op_not(Instruction* currentInstruction)
351 {
352     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
353
354     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
355     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
356     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
357     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
358     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
359     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
360
361     emitPutVirtualRegister(currentInstruction[1].u.operand);
362 }
363
364 void JIT::emit_op_jfalse(Instruction* currentInstruction)
365 {
366     unsigned target = currentInstruction[2].u.operand;
367     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
368
369     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
370     Jump isNonZero = emitJumpIfImmediateInteger(regT0);
371
372     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
373     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
374
375     isNonZero.link(this);
376 }
377
378 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
379 {
380     int src = currentInstruction[1].u.operand;
381     unsigned target = currentInstruction[2].u.operand;
382
383     emitGetVirtualRegister(src, regT0);
384     Jump isImmediate = emitJumpIfNotJSCell(regT0);
385
386     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
387     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
388     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
389     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
390     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
391     Jump masqueradesGlobalObjectIsForeign = jump();
392
393     // Now handle the immediate cases - undefined & null
394     isImmediate.link(this);
395     and64(TrustedImm32(~TagBitUndefined), regT0);
396     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
397
398     isNotMasqueradesAsUndefined.link(this);
399     masqueradesGlobalObjectIsForeign.link(this);
400 };
401 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
402 {
403     int src = currentInstruction[1].u.operand;
404     unsigned target = currentInstruction[2].u.operand;
405
406     emitGetVirtualRegister(src, regT0);
407     Jump isImmediate = emitJumpIfNotJSCell(regT0);
408
409     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
410     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
411     addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
412     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
413     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
414     Jump wasNotImmediate = jump();
415
416     // Now handle the immediate cases - undefined & null
417     isImmediate.link(this);
418     and64(TrustedImm32(~TagBitUndefined), regT0);
419     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
420
421     wasNotImmediate.link(this);
422 }
423
424 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
425 {
426     int src = currentInstruction[1].u.operand;
427     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
428     unsigned target = currentInstruction[3].u.operand;
429     
430     emitGetVirtualRegister(src, regT0);
431     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
432 }
433
434 void JIT::emit_op_eq(Instruction* currentInstruction)
435 {
436     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
437     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
438     compare32(Equal, regT1, regT0, regT0);
439     emitTagAsBoolImmediate(regT0);
440     emitPutVirtualRegister(currentInstruction[1].u.operand);
441 }
442
443 void JIT::emit_op_jtrue(Instruction* currentInstruction)
444 {
445     unsigned target = currentInstruction[2].u.operand;
446     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
447
448     Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
449     addJump(emitJumpIfImmediateInteger(regT0), target);
450
451     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
452     addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
453
454     isZero.link(this);
455 }
456
457 void JIT::emit_op_neq(Instruction* currentInstruction)
458 {
459     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
460     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
461     compare32(NotEqual, regT1, regT0, regT0);
462     emitTagAsBoolImmediate(regT0);
463
464     emitPutVirtualRegister(currentInstruction[1].u.operand);
465
466 }
467
468 void JIT::emit_op_bitxor(Instruction* currentInstruction)
469 {
470     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
471     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
472     xor64(regT1, regT0);
473     emitFastArithReTagImmediate(regT0, regT0);
474     emitPutVirtualRegister(currentInstruction[1].u.operand);
475 }
476
477 void JIT::emit_op_bitor(Instruction* currentInstruction)
478 {
479     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
480     emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
481     or64(regT1, regT0);
482     emitPutVirtualRegister(currentInstruction[1].u.operand);
483 }
484
485 void JIT::emit_op_throw(Instruction* currentInstruction)
486 {
487     JITStubCall stubCall(this, cti_op_throw);
488     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
489     stubCall.call();
490     ASSERT(regT0 == returnValueRegister);
491 #ifndef NDEBUG
492     // cti_op_throw always changes it's return address,
493     // this point in the code should never be reached.
494     breakpoint();
495 #endif
496 }
497
498 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
499 {
500     int dst = currentInstruction[1].u.operand;
501     int base = currentInstruction[2].u.operand;
502     int i = currentInstruction[3].u.operand;
503     int size = currentInstruction[4].u.operand;
504     int breakTarget = currentInstruction[5].u.operand;
505
506     JumpList isNotObject;
507
508     emitGetVirtualRegister(base, regT0);
509     if (!m_codeBlock->isKnownNotImmediate(base))
510         isNotObject.append(emitJumpIfNotJSCell(regT0));
511     if (base != m_codeBlock->thisRegister().offset() || m_codeBlock->isStrictMode()) {
512         loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
513         isNotObject.append(emitJumpIfNotObject(regT2));
514     }
515
516     // We could inline the case where you have a valid cache, but
517     // this call doesn't seem to be hot.
518     Label isObject(this);
519     JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
520     getPnamesStubCall.addArgument(regT0);
521     getPnamesStubCall.call(dst);
522     load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
523     store64(tagTypeNumberRegister, addressFor(i));
524     store32(TrustedImm32(Int32Tag), intTagFor(size));
525     store32(regT3, intPayloadFor(size));
526     Jump end = jump();
527
528     isNotObject.link(this);
529     move(regT0, regT1);
530     and32(TrustedImm32(~TagBitUndefined), regT1);
531     addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
532
533     JITStubCall toObjectStubCall(this, cti_to_object);
534     toObjectStubCall.addArgument(regT0);
535     toObjectStubCall.call(base);
536     jump().linkTo(isObject, this);
537     
538     end.link(this);
539 }
540
541 void JIT::emit_op_next_pname(Instruction* currentInstruction)
542 {
543     int dst = currentInstruction[1].u.operand;
544     int base = currentInstruction[2].u.operand;
545     int i = currentInstruction[3].u.operand;
546     int size = currentInstruction[4].u.operand;
547     int it = currentInstruction[5].u.operand;
548     int target = currentInstruction[6].u.operand;
549     
550     JumpList callHasProperty;
551
552     Label begin(this);
553     load32(intPayloadFor(i), regT0);
554     Jump end = branch32(Equal, regT0, intPayloadFor(size));
555
556     // Grab key @ i
557     loadPtr(addressFor(it), regT1);
558     loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
559
560     load64(BaseIndex(regT2, regT0, TimesEight), regT2);
561
562     emitPutVirtualRegister(dst, regT2);
563
564     // Increment i
565     add32(TrustedImm32(1), regT0);
566     store32(regT0, intPayloadFor(i));
567
568     // Verify that i is valid:
569     emitGetVirtualRegister(base, regT0);
570
571     // Test base's structure
572     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
573     callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
574
575     // Test base's prototype chain
576     loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
577     loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
578     addJump(branchTestPtr(Zero, Address(regT3)), target);
579
580     Label checkPrototype(this);
581     load64(Address(regT2, Structure::prototypeOffset()), regT2);
582     callHasProperty.append(emitJumpIfNotJSCell(regT2));
583     loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
584     callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
585     addPtr(TrustedImm32(sizeof(Structure*)), regT3);
586     branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
587
588     // Continue loop.
589     addJump(jump(), target);
590
591     // Slow case: Ask the object if i is valid.
592     callHasProperty.link(this);
593     emitGetVirtualRegister(dst, regT1);
594     JITStubCall stubCall(this, cti_has_property);
595     stubCall.addArgument(regT0);
596     stubCall.addArgument(regT1);
597     stubCall.call();
598
599     // Test for valid key.
600     addJump(branchTest32(NonZero, regT0), target);
601     jump().linkTo(begin, this);
602
603     // End of loop.
604     end.link(this);
605 }
606
607 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
608 {
609     JITStubCall stubCall(this, cti_op_push_with_scope);
610     stubCall.addArgument(currentInstruction[1].u.operand, regT2);
611     stubCall.call();
612 }
613
614 void JIT::emit_op_pop_scope(Instruction*)
615 {
616     JITStubCall(this, cti_op_pop_scope).call();
617 }
618
619 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
620 {
621     int dst = currentInstruction[1].u.operand;
622     int src1 = currentInstruction[2].u.operand;
623     int src2 = currentInstruction[3].u.operand;
624
625     emitGetVirtualRegisters(src1, regT0, src2, regT1);
626     
627     // Jump slow if both are cells (to cover strings).
628     move(regT0, regT2);
629     or64(regT1, regT2);
630     addSlowCase(emitJumpIfJSCell(regT2));
631     
632     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
633     // if it's a double.
634     Jump leftOK = emitJumpIfImmediateInteger(regT0);
635     addSlowCase(emitJumpIfImmediateNumber(regT0));
636     leftOK.link(this);
637     Jump rightOK = emitJumpIfImmediateInteger(regT1);
638     addSlowCase(emitJumpIfImmediateNumber(regT1));
639     rightOK.link(this);
640
641     if (type == OpStrictEq)
642         compare64(Equal, regT1, regT0, regT0);
643     else
644         compare64(NotEqual, regT1, regT0, regT0);
645     emitTagAsBoolImmediate(regT0);
646
647     emitPutVirtualRegister(dst);
648 }
649
650 void JIT::emit_op_stricteq(Instruction* currentInstruction)
651 {
652     compileOpStrictEq(currentInstruction, OpStrictEq);
653 }
654
655 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
656 {
657     compileOpStrictEq(currentInstruction, OpNStrictEq);
658 }
659
660 void JIT::emit_op_to_number(Instruction* currentInstruction)
661 {
662     int srcVReg = currentInstruction[2].u.operand;
663     emitGetVirtualRegister(srcVReg, regT0);
664     
665     addSlowCase(emitJumpIfNotImmediateNumber(regT0));
666
667     emitPutVirtualRegister(currentInstruction[1].u.operand);
668 }
669
670 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
671 {
672     JITStubCall stubCall(this, cti_op_push_name_scope);
673     stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
674     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
675     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
676     stubCall.call();
677 }
678
679 void JIT::emit_op_catch(Instruction* currentInstruction)
680 {
681     killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
682     move(regT0, callFrameRegister);
683     peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*));
684     load64(Address(regT3, VM::exceptionOffset()), regT0);
685     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
686     emitPutVirtualRegister(currentInstruction[1].u.operand);
687 }
688
689 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
690 {
691     unsigned tableIndex = currentInstruction[1].u.operand;
692     unsigned defaultOffset = currentInstruction[2].u.operand;
693     unsigned scrutinee = currentInstruction[3].u.operand;
694
695     // create jump table for switch destinations, track this switch statement.
696     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
697     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
698     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
699
700     JITStubCall stubCall(this, cti_op_switch_imm);
701     stubCall.addArgument(scrutinee, regT2);
702     stubCall.addArgument(TrustedImm32(tableIndex));
703     stubCall.call();
704     jump(regT0);
705 }
706
707 void JIT::emit_op_switch_char(Instruction* currentInstruction)
708 {
709     unsigned tableIndex = currentInstruction[1].u.operand;
710     unsigned defaultOffset = currentInstruction[2].u.operand;
711     unsigned scrutinee = currentInstruction[3].u.operand;
712
713     // create jump table for switch destinations, track this switch statement.
714     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
715     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
716     jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
717
718     JITStubCall stubCall(this, cti_op_switch_char);
719     stubCall.addArgument(scrutinee, regT2);
720     stubCall.addArgument(TrustedImm32(tableIndex));
721     stubCall.call();
722     jump(regT0);
723 }
724
725 void JIT::emit_op_switch_string(Instruction* currentInstruction)
726 {
727     unsigned tableIndex = currentInstruction[1].u.operand;
728     unsigned defaultOffset = currentInstruction[2].u.operand;
729     unsigned scrutinee = currentInstruction[3].u.operand;
730
731     // create jump table for switch destinations, track this switch statement.
732     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
733     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
734
735     JITStubCall stubCall(this, cti_op_switch_string);
736     stubCall.addArgument(scrutinee, regT2);
737     stubCall.addArgument(TrustedImm32(tableIndex));
738     stubCall.call();
739     jump(regT0);
740 }
741
742 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
743 {
744     JITStubCall stubCall(this, cti_op_throw_static_error);
745     if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
746         stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
747     else
748         stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
749     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
750     stubCall.call();
751 }
752
753 void JIT::emit_op_debug(Instruction* currentInstruction)
754 {
755 #if ENABLE(DEBUG_WITH_BREAKPOINT)
756     UNUSED_PARAM(currentInstruction);
757     breakpoint();
758 #else
759     JITStubCall stubCall(this, cti_op_debug);
760     stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
761     stubCall.call();
762 #endif
763 }
764
765 void JIT::emit_op_eq_null(Instruction* currentInstruction)
766 {
767     int dst = currentInstruction[1].u.operand;
768     int src1 = currentInstruction[2].u.operand;
769
770     emitGetVirtualRegister(src1, regT0);
771     Jump isImmediate = emitJumpIfNotJSCell(regT0);
772
773     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
774     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
775     move(TrustedImm32(0), regT0);
776     Jump wasNotMasqueradesAsUndefined = jump();
777
778     isMasqueradesAsUndefined.link(this);
779     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
780     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
781     comparePtr(Equal, regT0, regT2, regT0);
782     Jump wasNotImmediate = jump();
783
784     isImmediate.link(this);
785
786     and64(TrustedImm32(~TagBitUndefined), regT0);
787     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
788
789     wasNotImmediate.link(this);
790     wasNotMasqueradesAsUndefined.link(this);
791
792     emitTagAsBoolImmediate(regT0);
793     emitPutVirtualRegister(dst);
794
795 }
796
797 void JIT::emit_op_neq_null(Instruction* currentInstruction)
798 {
799     int dst = currentInstruction[1].u.operand;
800     int src1 = currentInstruction[2].u.operand;
801
802     emitGetVirtualRegister(src1, regT0);
803     Jump isImmediate = emitJumpIfNotJSCell(regT0);
804
805     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
806     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
807     move(TrustedImm32(1), regT0);
808     Jump wasNotMasqueradesAsUndefined = jump();
809
810     isMasqueradesAsUndefined.link(this);
811     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
812     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
813     comparePtr(NotEqual, regT0, regT2, regT0);
814     Jump wasNotImmediate = jump();
815
816     isImmediate.link(this);
817
818     and64(TrustedImm32(~TagBitUndefined), regT0);
819     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
820
821     wasNotImmediate.link(this);
822     wasNotMasqueradesAsUndefined.link(this);
823
824     emitTagAsBoolImmediate(regT0);
825     emitPutVirtualRegister(dst);
826 }
827
828 void JIT::emit_op_enter(Instruction*)
829 {
830     emitEnterOptimizationCheck();
831     
832     // Even though CTI doesn't use them, we initialize our constant
833     // registers to zap stale pointers, to avoid unnecessarily prolonging
834     // object lifetime and increasing GC pressure.
835     size_t count = m_codeBlock->m_numVars;
836     for (size_t j = 0; j < count; ++j)
837         emitInitRegister(virtualRegisterForLocal(j).offset());
838 }
839
840 void JIT::emit_op_create_activation(Instruction* currentInstruction)
841 {
842     int dst = currentInstruction[1].u.operand;
843     
844     Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
845     JITStubCall(this, cti_op_push_activation).call(dst);
846     activationCreated.link(this);
847 }
848
849 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
850 {
851     int dst = currentInstruction[1].u.operand;
852
853     Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
854     JITStubCall(this, cti_op_create_arguments).call();
855     emitPutVirtualRegister(dst);
856     emitPutVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(dst)));
857     argsCreated.link(this);
858 }
859
860 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
861 {
862     int dst = currentInstruction[1].u.operand;
863
864     store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
865 }
866
867 void JIT::emit_op_to_this(Instruction* currentInstruction)
868 {
869     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
870     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
871
872     emitJumpSlowCaseIfNotJSCell(regT1);
873     loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
874
875     addSlowCase(branch8(NotEqual, Address(regT0, Structure::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
876     loadPtr(cachedStructure, regT2);
877     addSlowCase(branchPtr(NotEqual, regT0, regT2));
878 }
879
880 void JIT::emit_op_get_callee(Instruction* currentInstruction)
881 {
882     int result = currentInstruction[1].u.operand;
883     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[2].u.jsCell;
884     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
885
886     loadPtr(cachedFunction, regT2);
887     addSlowCase(branchPtr(NotEqual, regT0, regT2));
888
889     emitPutVirtualRegister(result);
890 }
891
892 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
893 {
894     linkSlowCase(iter);
895
896     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
897     slowPathCall.call();
898     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
899 }
900
901 void JIT::emit_op_create_this(Instruction* currentInstruction)
902 {
903     int callee = currentInstruction[2].u.operand;
904     RegisterID calleeReg = regT0;
905     RegisterID resultReg = regT0;
906     RegisterID allocatorReg = regT1;
907     RegisterID structureReg = regT2;
908     RegisterID scratchReg = regT3;
909
910     emitGetVirtualRegister(callee, calleeReg);
911     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
912     loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
913     addSlowCase(branchTestPtr(Zero, allocatorReg));
914
915     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
916     emitPutVirtualRegister(currentInstruction[1].u.operand);
917 }
918
919 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
920 {
921     linkSlowCase(iter); // doesn't have an allocation profile
922     linkSlowCase(iter); // allocation failed
923
924     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
925     slowPathCall.call();
926     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
927 }
928
929 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
930 {
931     JITStubCall stubCall(this, cti_op_profile_will_call);
932     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
933     stubCall.call();
934 }
935
936 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
937 {
938     JITStubCall stubCall(this, cti_op_profile_did_call);
939     stubCall.addArgument(currentInstruction[1].u.operand, regT1);
940     stubCall.call();
941 }
942
943
944 // Slow cases
945
946 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
947 {
948     linkSlowCase(iter);
949     linkSlowCase(iter);
950     linkSlowCase(iter);
951
952     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
953     slowPathCall.call();
954     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
955 }
956
957 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
958 {
959     linkSlowCase(iter);
960
961     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
962     slowPathCall.call();
963     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
964 }
965
966 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
967 {
968     linkSlowCase(iter);
969     
970     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
971     slowPathCall.call();
972     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
973 }
974
975 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
976 {
977     linkSlowCase(iter);
978     JITStubCall stubCall(this, cti_op_jtrue);
979     stubCall.addArgument(regT0);
980     stubCall.call();
981     emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
982 }
983
984 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
985 {
986     linkSlowCase(iter);
987     JITStubCall stubCall(this, cti_op_jtrue);
988     stubCall.addArgument(regT0);
989     stubCall.call();
990     emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
991 }
992
993 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
994 {
995     linkSlowCase(iter);
996     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
997     slowPathCall.call();
998     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
999 }
1000
1001 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1002 {
1003     linkSlowCase(iter);
1004     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
1005     slowPathCall.call();
1006     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1007 }
1008
1009 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1010 {
1011     linkSlowCase(iter);
1012     JITStubCall stubCall(this, cti_op_eq);
1013     stubCall.addArgument(regT0);
1014     stubCall.addArgument(regT1);
1015     stubCall.call();
1016     emitTagAsBoolImmediate(regT0);
1017     emitPutVirtualRegister(currentInstruction[1].u.operand);
1018 }
1019
1020 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1021 {
1022     linkSlowCase(iter);
1023     JITStubCall stubCall(this, cti_op_eq);
1024     stubCall.addArgument(regT0);
1025     stubCall.addArgument(regT1);
1026     stubCall.call();
1027     xor32(TrustedImm32(0x1), regT0);
1028     emitTagAsBoolImmediate(regT0);
1029     emitPutVirtualRegister(currentInstruction[1].u.operand);
1030 }
1031
1032 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1033 {
1034     linkSlowCase(iter);
1035     linkSlowCase(iter);
1036     linkSlowCase(iter);
1037     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
1038     slowPathCall.call();
1039     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1040 }
1041
1042 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1043 {
1044     linkSlowCase(iter);
1045     linkSlowCase(iter);
1046     linkSlowCase(iter);
1047     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
1048     slowPathCall.call();
1049     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1050 }
1051
1052 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1053 {
1054     int dst = currentInstruction[1].u.operand;
1055     int value = currentInstruction[2].u.operand;
1056     int baseVal = currentInstruction[3].u.operand;
1057
1058     linkSlowCaseIfNotJSCell(iter, baseVal);
1059     linkSlowCase(iter);
1060     JITStubCall stubCall(this, cti_op_check_has_instance);
1061     stubCall.addArgument(value, regT2);
1062     stubCall.addArgument(baseVal, regT2);
1063     stubCall.call(dst);
1064
1065     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
1066 }
1067
1068 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1069 {
1070     int dst = currentInstruction[1].u.operand;
1071     int value = currentInstruction[2].u.operand;
1072     int proto = currentInstruction[3].u.operand;
1073
1074     linkSlowCaseIfNotJSCell(iter, value);
1075     linkSlowCaseIfNotJSCell(iter, proto);
1076     linkSlowCase(iter);
1077     JITStubCall stubCall(this, cti_op_instanceof);
1078     stubCall.addArgument(value, regT2);
1079     stubCall.addArgument(proto, regT2);
1080     stubCall.call(dst);
1081 }
1082
1083 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1084 {
1085     linkSlowCase(iter);
1086
1087     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
1088     slowPathCall.call();
1089     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1090 }
1091
1092 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1093 {
1094     int dst = currentInstruction[1].u.operand;
1095     int argumentsRegister = currentInstruction[2].u.operand;
1096     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1097     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
1098     sub32(TrustedImm32(1), regT0);
1099     emitFastArithReTagImmediate(regT0, regT0);
1100     emitPutVirtualRegister(dst, regT0);
1101 }
1102
1103 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1104 {
1105     linkSlowCase(iter);
1106     int dst = currentInstruction[1].u.operand;
1107     int base = currentInstruction[2].u.operand;
1108     const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1109     
1110     emitGetVirtualRegister(base, regT0);
1111     JITStubCall stubCall(this, cti_op_get_by_id_generic);
1112     stubCall.addArgument(regT0);
1113     stubCall.addArgument(TrustedImmPtr(ident));
1114     stubCall.call(dst);
1115 }
1116
1117 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1118 {
1119     int dst = currentInstruction[1].u.operand;
1120     int argumentsRegister = currentInstruction[2].u.operand;
1121     int property = currentInstruction[3].u.operand;
1122     addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1123     emitGetVirtualRegister(property, regT1);
1124     addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1125     add32(TrustedImm32(1), regT1);
1126     // regT1 now contains the integer index of the argument we want, including this
1127     emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
1128     addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1129
1130     signExtend32ToPtr(regT1, regT1);
1131     load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1132     emitValueProfilingSite(regT4);
1133     emitPutVirtualRegister(dst, regT0);
1134 }
1135
1136 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1137 {
1138     int dst = currentInstruction[1].u.operand;
1139     int arguments = currentInstruction[2].u.operand;
1140     int property = currentInstruction[3].u.operand;
1141     
1142     linkSlowCase(iter);
1143     Jump skipArgumentsCreation = jump();
1144     
1145     linkSlowCase(iter);
1146     linkSlowCase(iter);
1147     JITStubCall(this, cti_op_create_arguments).call();
1148     emitPutVirtualRegister(arguments);
1149     emitPutVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)));
1150     
1151     skipArgumentsCreation.link(this);
1152     JITStubCall stubCall(this, cti_op_get_by_val_generic);
1153     stubCall.addArgument(arguments, regT2);
1154     stubCall.addArgument(property, regT2);
1155     stubCall.callWithValueProfiling(dst);
1156 }
1157
1158 #endif // USE(JSVALUE64)
1159
1160 void JIT::emit_op_loop_hint(Instruction*)
1161 {
1162     // Emit the JIT optimization check: 
1163     if (canBeOptimized()) {
1164         if (Options::enableOSREntryInLoops()) {
1165             addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
1166                 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
1167         } else {
1168             // Add with saturation.
1169             move(TrustedImmPtr(m_codeBlock->addressOfJITExecuteCounter()), regT3);
1170             load32(regT3, regT2);
1171             Jump dontAdd = branch32(
1172                 GreaterThan, regT2,
1173                 TrustedImm32(std::numeric_limits<int32_t>::max() - Options::executionCounterIncrementForLoop()));
1174             add32(TrustedImm32(Options::executionCounterIncrementForLoop()), regT2);
1175             store32(regT2, regT3);
1176             dontAdd.link(this);
1177         }
1178     }
1179
1180     // Emit the watchdog timer check:
1181     if (m_vm->watchdog.isEnabled())
1182         addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())));
1183 }
1184
1185 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1186 {
1187 #if ENABLE(DFG_JIT)
1188     // Emit the slow path for the JIT optimization check:
1189     if (canBeOptimized() && Options::enableOSREntryInLoops()) {
1190         linkSlowCase(iter);
1191         
1192         JITStubCall stubCall(this, cti_optimize);
1193         stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
1194         stubCall.call();
1195         
1196         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1197     }
1198 #endif
1199
1200     // Emit the slow path of the watchdog timer check:
1201     if (m_vm->watchdog.isEnabled()) {
1202         linkSlowCase(iter);
1203
1204         JITStubCall stubCall(this, cti_handle_watchdog_timer);
1205         stubCall.call();
1206
1207         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1208     }
1209
1210 }
1211
1212 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1213 {
1214     JITStubCall stubCall(this, cti_op_new_regexp);
1215     stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1216     stubCall.call(currentInstruction[1].u.operand);
1217 }
1218
1219 void JIT::emit_op_new_func(Instruction* currentInstruction)
1220 {
1221     Jump lazyJump;
1222     int dst = currentInstruction[1].u.operand;
1223     if (currentInstruction[3].u.operand) {
1224 #if USE(JSVALUE32_64)
1225         lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1226 #else
1227         lazyJump = branchTest64(NonZero, addressFor(dst));
1228 #endif
1229     }
1230
1231     JITStubCall stubCall(this, cti_op_new_func);
1232     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1233     stubCall.call(dst);
1234
1235     if (currentInstruction[3].u.operand) {
1236 #if USE(JSVALUE32_64)        
1237         unmap();
1238 #else
1239         killLastResultRegister();
1240 #endif
1241         lazyJump.link(this);
1242     }
1243 }
1244
1245 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1246 {
1247     JITStubCall stubCall(this, cti_op_new_func_exp);
1248     stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1249     stubCall.call(currentInstruction[1].u.operand);
1250 }
1251
1252 void JIT::emit_op_new_array(Instruction* currentInstruction)
1253 {
1254     JITStubCall stubCall(this, cti_op_new_array);
1255     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1256     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1257     stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1258     stubCall.call(currentInstruction[1].u.operand);
1259 }
1260
1261 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1262 {
1263     JITStubCall stubCall(this, cti_op_new_array_with_size);
1264 #if USE(JSVALUE64)
1265     stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1266 #else
1267     stubCall.addArgument(currentInstruction[2].u.operand);
1268 #endif
1269     stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.arrayAllocationProfile));
1270     stubCall.call(currentInstruction[1].u.operand);
1271 }
1272
1273 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1274 {
1275     JITStubCall stubCall(this, cti_op_new_array_buffer);
1276     stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1277     stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1278     stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1279     stubCall.call(currentInstruction[1].u.operand);
1280 }
1281
1282 } // namespace JSC
1283
1284 #endif // ENABLE(JIT)