Renaming SpecInt32, SpecInt52, MachineInt to SpecInt32Only, SpecInt52Only, AnyInt.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009, 2012-2016 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "CCallHelpers.h"
34 #include "Debugger.h"
35 #include "Exception.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSEnvironmentRecord.h"
40 #include "JSFunction.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "SlowPathCall.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
47
48 namespace JSC {
49
50 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
51 {
52     // FIXME: This should be able to log ShadowChicken prologue packets.
53     // https://bugs.webkit.org/show_bug.cgi?id=155689
54     
55     Call nativeCall;
56
57     emitFunctionPrologue();
58     emitPutToCallFrameHeader(0, JSStack::CodeBlock);
59     storePtr(callFrameRegister, &m_vm->topCallFrame);
60
61 #if CPU(X86)
62     // Calling convention:      f(ecx, edx, ...);
63     // Host function signature: f(ExecState*);
64     move(callFrameRegister, X86Registers::ecx);
65
66     subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
67     storePtr(X86Registers::ecx, Address(stackPointerRegister));
68
69     // call the function
70     nativeCall = call();
71
72     addPtr(TrustedImm32(8), stackPointerRegister);
73
74 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
75 #if CPU(MIPS)
76     // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
77     subPtr(TrustedImm32(16), stackPointerRegister);
78 #endif
79
80     // Calling convention is f(argumentGPR0, argumentGPR1, ...).
81     // Host function signature is f(ExecState*).
82     move(callFrameRegister, argumentGPR0);
83
84     emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
85     loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
86
87     // call the function
88     nativeCall = call();
89
90 #if CPU(MIPS)
91     // Restore stack space
92     addPtr(TrustedImm32(16), stackPointerRegister);
93 #endif
94
95     restoreReturnAddressBeforeReturn(regT3);
96 #else
97 #error "JIT not supported on this platform."
98     abortWithReason(JITNotSupported);
99 #endif // CPU(X86)
100
101     // Check for an exception
102     Jump sawException = branch32(NotEqual, AbsoluteAddress(vm->addressOfException()), TrustedImm32(0));
103
104     emitFunctionEpilogue();
105     // Return.
106     ret();
107
108     // Handle an exception
109     sawException.link(this);
110
111     storePtr(callFrameRegister, &m_vm->topCallFrame);
112
113 #if CPU(X86)
114     addPtr(TrustedImm32(-4), stackPointerRegister);
115     move(callFrameRegister, X86Registers::ecx);
116     push(X86Registers::ecx);
117 #else
118     move(callFrameRegister, argumentGPR0);
119 #endif
120     move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
121     call(regT3);
122
123 #if CPU(X86)
124     addPtr(TrustedImm32(8), stackPointerRegister);
125 #endif
126
127     jumpToExceptionHandler();
128
129     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
130     LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
131
132     patchBuffer.link(nativeCall, FunctionPtr(func));
133     return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
134 }
135
136 void JIT::emit_op_mov(Instruction* currentInstruction)
137 {
138     int dst = currentInstruction[1].u.operand;
139     int src = currentInstruction[2].u.operand;
140     
141     if (m_codeBlock->isConstantRegisterIndex(src))
142         emitStore(dst, getConstantOperand(src));
143     else {
144         emitLoad(src, regT1, regT0);
145         emitStore(dst, regT1, regT0);
146     }
147 }
148
149 void JIT::emit_op_end(Instruction* currentInstruction)
150 {
151     ASSERT(returnValueGPR != callFrameRegister);
152     emitLoad(currentInstruction[1].u.operand, regT1, returnValueGPR);
153     emitRestoreCalleeSaves();
154     emitFunctionEpilogue();
155     ret();
156 }
157
158 void JIT::emit_op_jmp(Instruction* currentInstruction)
159 {
160     unsigned target = currentInstruction[1].u.operand;
161     addJump(jump(), target);
162 }
163
164 void JIT::emit_op_new_object(Instruction* currentInstruction)
165 {
166     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
167     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
168     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
169
170     RegisterID resultReg = returnValueGPR;
171     RegisterID allocatorReg = regT1;
172     RegisterID scratchReg = regT3;
173
174     move(TrustedImmPtr(allocator), allocatorReg);
175     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
176     emitStoreCell(currentInstruction[1].u.operand, resultReg);
177 }
178
179 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
180 {
181     linkSlowCase(iter);
182     int dst = currentInstruction[1].u.operand;
183     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
184     callOperation(operationNewObject, structure);
185     emitStoreCell(dst, returnValueGPR);
186 }
187
188 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
189 {
190     int dst = currentInstruction[1].u.operand;
191     int constructor = currentInstruction[2].u.operand;
192     int hasInstanceValue = currentInstruction[3].u.operand;
193
194     emitLoadPayload(hasInstanceValue, regT0);
195     // We don't jump if we know what Symbol.hasInstance would do.
196     Jump hasInstanceValueNotCell = emitJumpIfNotJSCell(hasInstanceValue);
197     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
198
199     // We know that constructor is an object from the way bytecode is emitted for instanceof expressions.
200     emitLoadPayload(constructor, regT0);
201
202     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
203     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
204     Jump done = jump();
205
206     hasInstanceValueNotCell.link(this);
207     customhasInstanceValue.link(this);
208     move(TrustedImm32(1), regT0);
209
210     done.link(this);
211     emitStoreBool(dst, regT0);
212
213 }
214
215 void JIT::emit_op_instanceof(Instruction* currentInstruction)
216 {
217     int dst = currentInstruction[1].u.operand;
218     int value = currentInstruction[2].u.operand;
219     int proto = currentInstruction[3].u.operand;
220
221     // Load the operands into registers.
222     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
223     emitLoadPayload(value, regT2);
224     emitLoadPayload(proto, regT1);
225
226     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
227     emitJumpSlowCaseIfNotJSCell(value);
228     emitJumpSlowCaseIfNotJSCell(proto);
229     
230     // Check that prototype is an object
231     addSlowCase(emitJumpIfCellNotObject(regT1));
232
233     // Optimistically load the result true, and start looping.
234     // Initially, regT1 still contains proto and regT2 still contains value.
235     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
236     move(TrustedImm32(1), regT0);
237     Label loop(this);
238
239     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
240
241     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
242     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
243     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
244     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
245     Jump isInstance = branchPtr(Equal, regT2, regT1);
246     branchTest32(NonZero, regT2).linkTo(loop, this);
247
248     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
249     move(TrustedImm32(0), regT0);
250
251     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
252     isInstance.link(this);
253     emitStoreBool(dst, regT0);
254 }
255
256 void JIT::emit_op_instanceof_custom(Instruction*)
257 {
258     // This always goes to slow path since we expect it to be rare.
259     addSlowCase(jump());
260 }
261
262 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
263 {
264     int dst = currentInstruction[1].u.operand;
265     int value = currentInstruction[2].u.operand;
266     int proto = currentInstruction[3].u.operand;
267
268     linkSlowCaseIfNotJSCell(iter, value);
269     linkSlowCaseIfNotJSCell(iter, proto);
270     linkSlowCase(iter);
271     linkSlowCase(iter);
272
273     emitLoad(value, regT1, regT0);
274     emitLoad(proto, regT3, regT2);
275     callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
276 }
277
278 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
279 {
280     int dst = currentInstruction[1].u.operand;
281     int value = currentInstruction[2].u.operand;
282     int constructor = currentInstruction[3].u.operand;
283     int hasInstanceValue = currentInstruction[4].u.operand;
284
285     linkSlowCase(iter);
286
287     emitLoad(value, regT1, regT0);
288     emitLoadPayload(constructor, regT2);
289     emitLoad(hasInstanceValue, regT4, regT3);
290     callOperation(operationInstanceOfCustom, regT1, regT0, regT2, regT4, regT3);
291     emitStoreBool(dst, returnValueGPR);
292 }
293
294 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
295 {
296     int dst = currentInstruction[1].u.operand;
297     int value = currentInstruction[2].u.operand;
298     
299     emitLoad(value, regT1, regT0);
300     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
301
302     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
303     Jump done = jump();
304     
305     isCell.link(this);
306     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
307     move(TrustedImm32(0), regT0);
308     Jump notMasqueradesAsUndefined = jump();
309     
310     isMasqueradesAsUndefined.link(this);
311     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
312     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
313     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
314     compare32(Equal, regT0, regT1, regT0);
315
316     notMasqueradesAsUndefined.link(this);
317     done.link(this);
318     emitStoreBool(dst, regT0);
319 }
320
321 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
322 {
323     int dst = currentInstruction[1].u.operand;
324     int value = currentInstruction[2].u.operand;
325     
326     emitLoadTag(value, regT0);
327     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
328     emitStoreBool(dst, regT0);
329 }
330
331 void JIT::emit_op_is_number(Instruction* currentInstruction)
332 {
333     int dst = currentInstruction[1].u.operand;
334     int value = currentInstruction[2].u.operand;
335     
336     emitLoadTag(value, regT0);
337     add32(TrustedImm32(1), regT0);
338     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
339     emitStoreBool(dst, regT0);
340 }
341
342 void JIT::emit_op_is_string(Instruction* currentInstruction)
343 {
344     int dst = currentInstruction[1].u.operand;
345     int value = currentInstruction[2].u.operand;
346     
347     emitLoad(value, regT1, regT0);
348     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
349     
350     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
351     Jump done = jump();
352     
353     isNotCell.link(this);
354     move(TrustedImm32(0), regT0);
355     
356     done.link(this);
357     emitStoreBool(dst, regT0);
358 }
359
360 void JIT::emit_op_is_object(Instruction* currentInstruction)
361 {
362     int dst = currentInstruction[1].u.operand;
363     int value = currentInstruction[2].u.operand;
364
365     emitLoad(value, regT1, regT0);
366     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
367
368     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
369     Jump done = jump();
370
371     isNotCell.link(this);
372     move(TrustedImm32(0), regT0);
373
374     done.link(this);
375     emitStoreBool(dst, regT0);
376 }
377
378 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
379 {
380     int dst = currentInstruction[1].u.operand;
381     int src = currentInstruction[2].u.operand;
382
383     emitLoad(src, regT1, regT0);
384
385     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
386     addSlowCase(emitJumpIfCellObject(regT0));
387     isImm.link(this);
388
389     if (dst != src)
390         emitStore(dst, regT1, regT0);
391 }
392
393 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
394 {
395     linkSlowCase(iter);
396
397     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
398     slowPathCall.call();
399 }
400
401 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
402 {
403     int func = currentInstruction[1].u.operand;
404     int name = currentInstruction[2].u.operand;
405     emitLoadPayload(func, regT1);
406     emitLoad(name, regT3, regT2);
407     callOperation(operationSetFunctionName, regT1, regT3, regT2);
408 }
409
410 void JIT::emit_op_strcat(Instruction* currentInstruction)
411 {
412     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
413     slowPathCall.call();
414 }
415
416 void JIT::emit_op_not(Instruction* currentInstruction)
417 {
418     int dst = currentInstruction[1].u.operand;
419     int src = currentInstruction[2].u.operand;
420
421     emitLoadTag(src, regT0);
422
423     emitLoad(src, regT1, regT0);
424     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
425     xor32(TrustedImm32(1), regT0);
426
427     emitStoreBool(dst, regT0, (dst == src));
428 }
429
430 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
431 {
432     linkSlowCase(iter);
433
434     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
435     slowPathCall.call();
436 }
437
438 void JIT::emit_op_jfalse(Instruction* currentInstruction)
439 {
440     int cond = currentInstruction[1].u.operand;
441     unsigned target = currentInstruction[2].u.operand;
442
443     emitLoad(cond, regT1, regT0);
444
445     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
446     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
447     addJump(branchTest32(Zero, regT0), target);
448 }
449
450 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
451 {
452     int cond = currentInstruction[1].u.operand;
453     unsigned target = currentInstruction[2].u.operand;
454
455     linkSlowCase(iter);
456
457     if (supportsFloatingPoint()) {
458         // regT1 contains the tag from the hot path.
459         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
460
461         emitLoadDouble(cond, fpRegT0);
462         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
463         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
464
465         notNumber.link(this);
466     }
467
468     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
469     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
470 }
471
472 void JIT::emit_op_jtrue(Instruction* currentInstruction)
473 {
474     int cond = currentInstruction[1].u.operand;
475     unsigned target = currentInstruction[2].u.operand;
476
477     emitLoad(cond, regT1, regT0);
478
479     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
480     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
481     addJump(branchTest32(NonZero, regT0), target);
482 }
483
484 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
485 {
486     int cond = currentInstruction[1].u.operand;
487     unsigned target = currentInstruction[2].u.operand;
488
489     linkSlowCase(iter);
490
491     if (supportsFloatingPoint()) {
492         // regT1 contains the tag from the hot path.
493         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
494
495         emitLoadDouble(cond, fpRegT0);
496         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
497         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
498
499         notNumber.link(this);
500     }
501
502     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
503     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
504 }
505
506 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
507 {
508     int src = currentInstruction[1].u.operand;
509     unsigned target = currentInstruction[2].u.operand;
510
511     emitLoad(src, regT1, regT0);
512
513     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
514
515     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
516     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
517     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
518     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
519     Jump masqueradesGlobalObjectIsForeign = jump();
520
521     // Now handle the immediate cases - undefined & null
522     isImmediate.link(this);
523     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
524     or32(TrustedImm32(1), regT1);
525     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
526
527     isNotMasqueradesAsUndefined.link(this);
528     masqueradesGlobalObjectIsForeign.link(this);
529 }
530
531 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
532 {
533     int src = currentInstruction[1].u.operand;
534     unsigned target = currentInstruction[2].u.operand;
535
536     emitLoad(src, regT1, regT0);
537
538     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
539
540     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
541     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
542     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
543     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
544     Jump wasNotImmediate = jump();
545
546     // Now handle the immediate cases - undefined & null
547     isImmediate.link(this);
548
549     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
550     or32(TrustedImm32(1), regT1);
551     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
552
553     wasNotImmediate.link(this);
554 }
555
556 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
557 {
558     int src = currentInstruction[1].u.operand;
559     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
560     unsigned target = currentInstruction[3].u.operand;
561
562     emitLoad(src, regT1, regT0);
563     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
564     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
565 }
566
567 void JIT::emit_op_eq(Instruction* currentInstruction)
568 {
569     int dst = currentInstruction[1].u.operand;
570     int src1 = currentInstruction[2].u.operand;
571     int src2 = currentInstruction[3].u.operand;
572
573     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
574     addSlowCase(branch32(NotEqual, regT1, regT3));
575     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
576     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
577
578     compare32(Equal, regT0, regT2, regT0);
579
580     emitStoreBool(dst, regT0);
581 }
582
583 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
584 {
585     int dst = currentInstruction[1].u.operand;
586     int op1 = currentInstruction[2].u.operand;
587     int op2 = currentInstruction[3].u.operand;
588
589     JumpList storeResult;
590     JumpList genericCase;
591
592     genericCase.append(getSlowCase(iter)); // tags not equal
593
594     linkSlowCase(iter); // tags equal and JSCell
595     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
596     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
597
598     // String case.
599     callOperation(operationCompareStringEq, regT0, regT2);
600     storeResult.append(jump());
601
602     // Generic case.
603     genericCase.append(getSlowCase(iter)); // doubles
604     genericCase.link(this);
605     emitLoad(op1, regT1, regT0);
606     emitLoad(op2, regT3, regT2);
607     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
608
609     storeResult.link(this);
610     emitStoreBool(dst, returnValueGPR);
611 }
612
613 void JIT::emit_op_neq(Instruction* currentInstruction)
614 {
615     int dst = currentInstruction[1].u.operand;
616     int src1 = currentInstruction[2].u.operand;
617     int src2 = currentInstruction[3].u.operand;
618
619     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
620     addSlowCase(branch32(NotEqual, regT1, regT3));
621     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
622     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
623
624     compare32(NotEqual, regT0, regT2, regT0);
625
626     emitStoreBool(dst, regT0);
627 }
628
629 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
630 {
631     int dst = currentInstruction[1].u.operand;
632
633     JumpList storeResult;
634     JumpList genericCase;
635
636     genericCase.append(getSlowCase(iter)); // tags not equal
637
638     linkSlowCase(iter); // tags equal and JSCell
639     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
640     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
641
642     // String case.
643     callOperation(operationCompareStringEq, regT0, regT2);
644     storeResult.append(jump());
645
646     // Generic case.
647     genericCase.append(getSlowCase(iter)); // doubles
648     genericCase.link(this);
649     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
650
651     storeResult.link(this);
652     xor32(TrustedImm32(0x1), returnValueGPR);
653     emitStoreBool(dst, returnValueGPR);
654 }
655
656 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
657 {
658     int dst = currentInstruction[1].u.operand;
659     int src1 = currentInstruction[2].u.operand;
660     int src2 = currentInstruction[3].u.operand;
661
662     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
663
664     // Bail if the tags differ, or are double.
665     addSlowCase(branch32(NotEqual, regT1, regT3));
666     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
667
668     // Jump to a slow case if both are strings or symbols (non object).
669     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
670     Jump firstIsObject = emitJumpIfCellObject(regT0);
671     addSlowCase(emitJumpIfCellNotObject(regT2));
672     notCell.link(this);
673     firstIsObject.link(this);
674
675     // Simply compare the payloads.
676     if (type == OpStrictEq)
677         compare32(Equal, regT0, regT2, regT0);
678     else
679         compare32(NotEqual, regT0, regT2, regT0);
680
681     emitStoreBool(dst, regT0);
682 }
683
684 void JIT::emit_op_stricteq(Instruction* currentInstruction)
685 {
686     compileOpStrictEq(currentInstruction, OpStrictEq);
687 }
688
689 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
690 {
691     linkSlowCase(iter);
692     linkSlowCase(iter);
693     linkSlowCase(iter);
694
695     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
696     slowPathCall.call();
697 }
698
699 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
700 {
701     compileOpStrictEq(currentInstruction, OpNStrictEq);
702 }
703
704 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
705 {
706     linkSlowCase(iter);
707     linkSlowCase(iter);
708     linkSlowCase(iter);
709
710     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
711     slowPathCall.call();
712 }
713
714 void JIT::emit_op_eq_null(Instruction* currentInstruction)
715 {
716     int dst = currentInstruction[1].u.operand;
717     int src = currentInstruction[2].u.operand;
718
719     emitLoad(src, regT1, regT0);
720     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
721
722     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
723     move(TrustedImm32(0), regT1);
724     Jump wasNotMasqueradesAsUndefined = jump();
725
726     isMasqueradesAsUndefined.link(this);
727     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
728     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
729     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
730     compare32(Equal, regT0, regT2, regT1);
731     Jump wasNotImmediate = jump();
732
733     isImmediate.link(this);
734
735     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
736     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
737     or32(regT2, regT1);
738
739     wasNotImmediate.link(this);
740     wasNotMasqueradesAsUndefined.link(this);
741
742     emitStoreBool(dst, regT1);
743 }
744
745 void JIT::emit_op_neq_null(Instruction* currentInstruction)
746 {
747     int dst = currentInstruction[1].u.operand;
748     int src = currentInstruction[2].u.operand;
749
750     emitLoad(src, regT1, regT0);
751     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
752
753     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
754     move(TrustedImm32(1), regT1);
755     Jump wasNotMasqueradesAsUndefined = jump();
756
757     isMasqueradesAsUndefined.link(this);
758     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
759     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
760     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
761     compare32(NotEqual, regT0, regT2, regT1);
762     Jump wasNotImmediate = jump();
763
764     isImmediate.link(this);
765
766     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
767     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
768     and32(regT2, regT1);
769
770     wasNotImmediate.link(this);
771     wasNotMasqueradesAsUndefined.link(this);
772
773     emitStoreBool(dst, regT1);
774 }
775
776 void JIT::emit_op_throw(Instruction* currentInstruction)
777 {
778     ASSERT(regT0 == returnValueGPR);
779     copyCalleeSavesToVMCalleeSavesBuffer();
780     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
781     callOperationNoExceptionCheck(operationThrow, regT1, regT0);
782     jumpToExceptionHandler();
783 }
784
785 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
786 {
787     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
788     slowPathCall.call();
789 }
790
791 void JIT::emit_op_to_number(Instruction* currentInstruction)
792 {
793     int dst = currentInstruction[1].u.operand;
794     int src = currentInstruction[2].u.operand;
795
796     emitLoad(src, regT1, regT0);
797
798     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
799     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
800     isInt32.link(this);
801
802     if (src != dst)
803         emitStore(dst, regT1, regT0);
804 }
805
806 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
807 {
808     linkSlowCase(iter);
809
810     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
811     slowPathCall.call();
812 }
813
814 void JIT::emit_op_to_string(Instruction* currentInstruction)
815 {
816     int dst = currentInstruction[1].u.operand;
817     int src = currentInstruction[2].u.operand;
818
819     emitLoad(src, regT1, regT0);
820
821     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
822     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
823
824     if (src != dst)
825         emitStore(dst, regT1, regT0);
826 }
827
828 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
829 {
830     linkSlowCase(iter); // Not JSCell.
831     linkSlowCase(iter); // Not JSString.
832
833     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
834     slowPathCall.call();
835 }
836
837 void JIT::emit_op_catch(Instruction* currentInstruction)
838 {
839     restoreCalleeSavesFromVMCalleeSavesBuffer();
840
841     move(TrustedImmPtr(m_vm), regT3);
842     // operationThrow returns the callFrame for the handler.
843     load32(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
844     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
845
846     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
847
848     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
849     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
850     jumpToExceptionHandler();
851     isCatchableException.link(this);
852
853     move(TrustedImmPtr(m_vm), regT3);
854
855     // Now store the exception returned by operationThrow.
856     load32(Address(regT3, VM::exceptionOffset()), regT2);
857     move(TrustedImm32(JSValue::CellTag), regT1);
858
859     store32(TrustedImm32(0), Address(regT3, VM::exceptionOffset()));
860
861     unsigned exception = currentInstruction[1].u.operand;
862     emitStore(exception, regT1, regT2);
863
864     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
865     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
866
867     unsigned thrownValue = currentInstruction[2].u.operand;
868     emitStore(thrownValue, regT1, regT0);
869 }
870
871 void JIT::emit_op_assert(Instruction* currentInstruction)
872 {
873     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
874     slowPathCall.call();
875 }
876
877 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
878 {
879     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
880     slowPathCall.call();
881 }
882
883 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
884 {
885     int currentScope = currentInstruction[2].u.operand;
886     emitLoadPayload(currentScope, regT0);
887     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
888     emitStoreCell(currentInstruction[1].u.operand, regT0);
889 }
890
891 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
892 {
893     size_t tableIndex = currentInstruction[1].u.operand;
894     unsigned defaultOffset = currentInstruction[2].u.operand;
895     unsigned scrutinee = currentInstruction[3].u.operand;
896
897     // create jump table for switch destinations, track this switch statement.
898     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
899     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
900     jumpTable->ensureCTITable();
901
902     emitLoad(scrutinee, regT1, regT0);
903     callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
904     jump(returnValueGPR);
905 }
906
907 void JIT::emit_op_switch_char(Instruction* currentInstruction)
908 {
909     size_t tableIndex = currentInstruction[1].u.operand;
910     unsigned defaultOffset = currentInstruction[2].u.operand;
911     unsigned scrutinee = currentInstruction[3].u.operand;
912
913     // create jump table for switch destinations, track this switch statement.
914     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
915     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
916     jumpTable->ensureCTITable();
917
918     emitLoad(scrutinee, regT1, regT0);
919     callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
920     jump(returnValueGPR);
921 }
922
923 void JIT::emit_op_switch_string(Instruction* currentInstruction)
924 {
925     size_t tableIndex = currentInstruction[1].u.operand;
926     unsigned defaultOffset = currentInstruction[2].u.operand;
927     unsigned scrutinee = currentInstruction[3].u.operand;
928
929     // create jump table for switch destinations, track this switch statement.
930     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
931     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
932
933     emitLoad(scrutinee, regT1, regT0);
934     callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
935     jump(returnValueGPR);
936 }
937
938 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
939 {
940     emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
941     callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
942 }
943
944 void JIT::emit_op_debug(Instruction* currentInstruction)
945 {
946     load32(codeBlock()->debuggerRequestsAddress(), regT0);
947     Jump noDebuggerRequests = branchTest32(Zero, regT0);
948     callOperation(operationDebug, currentInstruction[1].u.operand);
949     noDebuggerRequests.link(this);
950 }
951
952
953 void JIT::emit_op_enter(Instruction* currentInstruction)
954 {
955     emitEnterOptimizationCheck();
956     
957     // Even though JIT code doesn't use them, we initialize our constant
958     // registers to zap stale pointers, to avoid unnecessarily prolonging
959     // object lifetime and increasing GC pressure.
960     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
961         emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
962
963     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
964     slowPathCall.call();
965 }
966
967 void JIT::emit_op_get_scope(Instruction* currentInstruction)
968 {
969     int dst = currentInstruction[1].u.operand;
970     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
971     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
972     emitStoreCell(dst, regT0);
973 }
974
975 void JIT::emit_op_create_this(Instruction* currentInstruction)
976 {
977     int callee = currentInstruction[2].u.operand;
978     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
979     RegisterID calleeReg = regT0;
980     RegisterID rareDataReg = regT4;
981     RegisterID resultReg = regT0;
982     RegisterID allocatorReg = regT1;
983     RegisterID structureReg = regT2;
984     RegisterID cachedFunctionReg = regT4;
985     RegisterID scratchReg = regT3;
986
987     emitLoadPayload(callee, calleeReg);
988     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
989     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
990     addSlowCase(branchTestPtr(Zero, rareDataReg));
991     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
992     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
993     addSlowCase(branchTestPtr(Zero, allocatorReg));
994
995     loadPtr(cachedFunction, cachedFunctionReg);
996     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
997     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
998     hasSeenMultipleCallees.link(this);
999
1000     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
1001     emitStoreCell(currentInstruction[1].u.operand, resultReg);
1002 }
1003
1004 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1005 {
1006     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
1007     linkSlowCase(iter); // doesn't have rare data
1008     linkSlowCase(iter); // doesn't have an allocation profile
1009     linkSlowCase(iter); // allocation failed
1010     linkSlowCase(iter); // cached function didn't match
1011
1012     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
1013     slowPathCall.call();
1014 }
1015
1016 void JIT::emit_op_to_this(Instruction* currentInstruction)
1017 {
1018     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
1019     int thisRegister = currentInstruction[1].u.operand;
1020
1021     emitLoad(thisRegister, regT3, regT2);
1022
1023     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
1024     addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
1025     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
1026     loadPtr(cachedStructure, regT2);
1027     addSlowCase(branchPtr(NotEqual, regT0, regT2));
1028 }
1029
1030 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1031 {
1032     linkSlowCase(iter);
1033     linkSlowCase(iter);
1034     linkSlowCase(iter);
1035     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
1036     slowPathCall.call();
1037 }
1038
1039 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
1040 {
1041     emitLoadTag(currentInstruction[1].u.operand, regT0);
1042     addSlowCase(branch32(Equal, regT0, TrustedImm32(JSValue::EmptyValueTag)));
1043 }
1044
1045 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1046 {
1047     linkSlowCase(iter);
1048     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
1049     slowPathCall.call();
1050 }
1051
1052 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1053 {
1054     load32(m_vm->enabledProfilerAddress(), regT0);
1055     Jump profilerDone = branchTestPtr(Zero, regT0);
1056     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1057     callOperation(operationProfileWillCall, regT1, regT0);
1058     profilerDone.link(this);
1059 }
1060
1061 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1062 {
1063     load32(m_vm->enabledProfilerAddress(), regT0);
1064     Jump profilerDone = branchTestPtr(Zero, regT0);
1065     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1066     callOperation(operationProfileDidCall, regT1, regT0);
1067     profilerDone.link(this);
1068 }
1069
1070 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1071 {
1072     int dst = currentInstruction[1].u.operand;
1073     int base = currentInstruction[2].u.operand;
1074     int enumerator = currentInstruction[4].u.operand;
1075
1076     emitLoadPayload(base, regT0);
1077     emitJumpSlowCaseIfNotJSCell(base);
1078
1079     emitLoadPayload(enumerator, regT1);
1080
1081     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1082     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1083     
1084     move(TrustedImm32(1), regT0);
1085     emitStoreBool(dst, regT0);
1086 }
1087
1088 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1089 {
1090     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1091     
1092     PatchableJump badType;
1093     
1094     // FIXME: Add support for other types like TypedArrays and Arguments.
1095     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1096     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1097     move(TrustedImm32(1), regT0);
1098     Jump done = jump();
1099
1100     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1101     
1102     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1103     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1104     
1105     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1106     
1107     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1108         m_codeBlock, patchBuffer,
1109         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1110     
1111     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1112     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1113 }
1114
1115 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1116 {
1117     int dst = currentInstruction[1].u.operand;
1118     int base = currentInstruction[2].u.operand;
1119     int property = currentInstruction[3].u.operand;
1120     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1121     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1122     
1123     emitLoadPayload(base, regT0);
1124     emitJumpSlowCaseIfNotJSCell(base);
1125
1126     emitLoadPayload(property, regT1);
1127
1128     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1129     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1130     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1131     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1132     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1133     // extending since it makes it easier to re-tag the value in the slow case.
1134     zeroExtend32ToPtr(regT1, regT1);
1135
1136     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1137     and32(TrustedImm32(IndexingShapeMask), regT2);
1138
1139     JITArrayMode mode = chooseArrayMode(profile);
1140     PatchableJump badType;
1141
1142     // FIXME: Add support for other types like TypedArrays and Arguments.
1143     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1144     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1145     move(TrustedImm32(1), regT0);
1146
1147     addSlowCase(badType);
1148     addSlowCase(slowCases);
1149     
1150     Label done = label();
1151     
1152     emitStoreBool(dst, regT0);
1153
1154     Label nextHotPath = label();
1155     
1156     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1157 }
1158
1159 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1160 {
1161     int dst = currentInstruction[1].u.operand;
1162     int base = currentInstruction[2].u.operand;
1163     int property = currentInstruction[3].u.operand;
1164     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1165     
1166     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1167     linkSlowCase(iter); // base array check
1168     linkSlowCase(iter); // vector length check
1169     linkSlowCase(iter); // empty value
1170
1171     Label slowPath = label();
1172     
1173     emitLoad(base, regT1, regT0);
1174     emitLoad(property, regT3, regT2);
1175     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2, byValInfo);
1176
1177     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1178     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1179     m_byValInstructionIndex++;
1180 }
1181
1182 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1183 {
1184     int dst = currentInstruction[1].u.operand;
1185     int base = currentInstruction[2].u.operand;
1186     int index = currentInstruction[4].u.operand;
1187     int enumerator = currentInstruction[5].u.operand;
1188
1189     // Check that base is a cell
1190     emitLoadPayload(base, regT0);
1191     emitJumpSlowCaseIfNotJSCell(base);
1192
1193     // Check the structure
1194     emitLoadPayload(enumerator, regT1);
1195     load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1196     addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1197
1198     // Compute the offset
1199     emitLoadPayload(index, regT2);
1200     // If index is less than the enumerator's cached inline storage, then it's an inline access
1201     Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1202     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1203     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1204     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1205     
1206     Jump done = jump();
1207
1208     // Otherwise it's out of line
1209     outOfLineAccess.link(this);
1210     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1211     sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1212     neg32(regT2);
1213     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1214     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1215     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1216     
1217     done.link(this);
1218     emitValueProfilingSite();
1219     emitStore(dst, regT1, regT0);
1220 }
1221
1222 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1223 {
1224     int base = currentInstruction[2].u.operand;
1225     linkSlowCaseIfNotJSCell(iter, base);
1226     linkSlowCase(iter);
1227
1228     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1229     slowPathCall.call();
1230 }
1231
1232 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1233 {
1234     int dst = currentInstruction[1].u.operand;
1235     int enumerator = currentInstruction[2].u.operand;
1236     int index = currentInstruction[3].u.operand;
1237
1238     emitLoadPayload(index, regT0);
1239     emitLoadPayload(enumerator, regT1);
1240     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1241
1242     move(TrustedImm32(JSValue::NullTag), regT2);
1243     move(TrustedImm32(0), regT0);
1244
1245     Jump done = jump();
1246     inBounds.link(this);
1247
1248     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1249     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1250     move(TrustedImm32(JSValue::CellTag), regT2);
1251
1252     done.link(this);
1253     emitStore(dst, regT2, regT0);
1254 }
1255
1256 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1257 {
1258     int dst = currentInstruction[1].u.operand;
1259     int enumerator = currentInstruction[2].u.operand;
1260     int index = currentInstruction[3].u.operand;
1261
1262     emitLoadPayload(index, regT0);
1263     emitLoadPayload(enumerator, regT1);
1264     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1265
1266     move(TrustedImm32(JSValue::NullTag), regT2);
1267     move(TrustedImm32(0), regT0);
1268
1269     Jump done = jump();
1270     inBounds.link(this);
1271
1272     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1273     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1274     move(TrustedImm32(JSValue::CellTag), regT2);
1275     
1276     done.link(this);
1277     emitStore(dst, regT2, regT0);
1278 }
1279
1280 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1281 {
1282     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1283     int valueToProfile = currentInstruction[1].u.operand;
1284
1285     // Load payload in T0. Load tag in T3.
1286     emitLoadPayload(valueToProfile, regT0);
1287     emitLoadTag(valueToProfile, regT3);
1288
1289     JumpList jumpToEnd;
1290
1291     jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::EmptyValueTag)));
1292
1293     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1294     // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1295     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1296         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::UndefinedTag)));
1297     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1298         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::NullTag)));
1299     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1300         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::BooleanTag)));
1301     else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1302         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1303     else if (cachedTypeLocation->m_lastSeenType == TypeNumber) {
1304         jumpToEnd.append(branch32(Below, regT3, TrustedImm32(JSValue::LowestTag)));
1305         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1306     } else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1307         Jump isNotCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1308         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1309         isNotCell.link(this);
1310     }
1311
1312     // Load the type profiling log into T2.
1313     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1314     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1315
1316     // Load the next log entry into T1.
1317     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1318
1319     // Store the JSValue onto the log entry.
1320     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1321     store32(regT3, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1322
1323     // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1324     Jump notCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1325     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1326     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1327     Jump skipNotCell = jump();
1328     notCell.link(this);
1329     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1330     skipNotCell.link(this);
1331
1332     // Store the typeLocation on the log entry.
1333     move(TrustedImmPtr(cachedTypeLocation), regT0);
1334     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1335
1336     // Increment the current log entry.
1337     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1338     store32(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1339     jumpToEnd.append(branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())));
1340     // Clear the log if we're at the end of the log.
1341     callOperation(operationProcessTypeProfilerLog);
1342
1343     jumpToEnd.link(this);
1344 }
1345
1346 } // namespace JSC
1347
1348 #endif // USE(JSVALUE32_64)
1349 #endif // ENABLE(JIT)