5f9040d782d26c666c385cd3db0f1fa5a8fd91dc
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "CCallHelpers.h"
34 #include "Debugger.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "JSNameScope.h"
41 #include "JSPropertyNameEnumerator.h"
42 #include "LinkBuffer.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "RepatchBuffer.h"
45 #include "SlowPathCall.h"
46 #include "TypeProfilerLog.h"
47 #include "VirtualRegister.h"
48
49 namespace JSC {
50
51 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction func)
52 {
53     Call nativeCall;
54
55     emitFunctionPrologue();
56     emitPutImmediateToCallFrameHeader(0, JSStack::CodeBlock);
57     storePtr(callFrameRegister, &m_vm->topCallFrame);
58
59 #if CPU(X86)
60     // Calling convention:      f(ecx, edx, ...);
61     // Host function signature: f(ExecState*);
62     move(callFrameRegister, X86Registers::ecx);
63
64     subPtr(TrustedImm32(8), stackPointerRegister); // Align stack for call.
65     storePtr(X86Registers::ecx, Address(stackPointerRegister));
66
67     // call the function
68     nativeCall = call();
69
70     addPtr(TrustedImm32(8), stackPointerRegister);
71
72 #elif CPU(ARM) || CPU(SH4) || CPU(MIPS)
73 #if CPU(MIPS)
74     // Allocate stack space for (unused) 16 bytes (8-byte aligned) for 4 arguments.
75     subPtr(TrustedImm32(16), stackPointerRegister);
76 #endif
77
78     // Calling convention is f(argumentGPR0, argumentGPR1, ...).
79     // Host function signature is f(ExecState*).
80     move(callFrameRegister, argumentGPR0);
81
82     emitGetFromCallFrameHeaderPtr(JSStack::Callee, argumentGPR1);
83     loadPtr(Address(argumentGPR1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
84
85     // call the function
86     nativeCall = call();
87
88 #if CPU(MIPS)
89     // Restore stack space
90     addPtr(TrustedImm32(16), stackPointerRegister);
91 #endif
92
93     restoreReturnAddressBeforeReturn(regT3);
94 #else
95 #error "JIT not supported on this platform."
96     abortWithReason(JITNotSupported);
97 #endif // CPU(X86)
98
99     // Check for an exception
100     Jump sawException = branch32(NotEqual, AbsoluteAddress(reinterpret_cast<char*>(vm->addressOfException()) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag)); 
101
102     emitFunctionEpilogue();
103     // Return.
104     ret();
105
106     // Handle an exception
107     sawException.link(this);
108
109     storePtr(callFrameRegister, &m_vm->topCallFrame);
110
111 #if CPU(X86)
112     addPtr(TrustedImm32(-4), stackPointerRegister);
113     loadPtr(Address(callFrameRegister), X86Registers::ecx);
114     push(X86Registers::ecx);
115 #else
116     loadPtr(Address(callFrameRegister), argumentGPR0);
117 #endif
118     move(TrustedImmPtr(FunctionPtr(operationVMHandleException).value()), regT3);
119     call(regT3);
120
121 #if CPU(X86)
122     addPtr(TrustedImm32(8), stackPointerRegister);
123 #endif
124
125     jumpToExceptionHandler();
126
127     // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
128     LinkBuffer patchBuffer(*m_vm, *this, GLOBAL_THUNK_ID);
129
130     patchBuffer.link(nativeCall, FunctionPtr(func));
131     return FINALIZE_CODE(patchBuffer, ("JIT CTI native call"));
132 }
133
134 void JIT::emit_op_mov(Instruction* currentInstruction)
135 {
136     int dst = currentInstruction[1].u.operand;
137     int src = currentInstruction[2].u.operand;
138     
139     if (m_codeBlock->isConstantRegisterIndex(src))
140         emitStore(dst, getConstantOperand(src));
141     else {
142         emitLoad(src, regT1, regT0);
143         emitStore(dst, regT1, regT0);
144     }
145 }
146
147 void JIT::emit_op_end(Instruction* currentInstruction)
148 {
149     ASSERT(returnValueGPR != callFrameRegister);
150     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
151     emitFunctionEpilogue();
152     ret();
153 }
154
155 void JIT::emit_op_jmp(Instruction* currentInstruction)
156 {
157     unsigned target = currentInstruction[1].u.operand;
158     addJump(jump(), target);
159 }
160
161 void JIT::emit_op_new_object(Instruction* currentInstruction)
162 {
163     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
164     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
165     MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
166
167     RegisterID resultReg = regT0;
168     RegisterID allocatorReg = regT1;
169     RegisterID scratchReg = regT2;
170
171     move(TrustedImmPtr(allocator), allocatorReg);
172     emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
173     emitStoreCell(currentInstruction[1].u.operand, resultReg);
174 }
175
176 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
177 {
178     linkSlowCase(iter);
179     int dst = currentInstruction[1].u.operand;
180     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
181     callOperation(operationNewObject, structure);
182     emitStoreCell(dst, returnValueGPR);
183 }
184
185 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
186 {
187     int baseVal = currentInstruction[3].u.operand;
188
189     emitLoadPayload(baseVal, regT0);
190
191     // Check that baseVal is a cell.
192     emitJumpSlowCaseIfNotJSCell(baseVal);
193     
194     // Check that baseVal 'ImplementsHasInstance'.
195     addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
196 }
197
198 void JIT::emit_op_instanceof(Instruction* currentInstruction)
199 {
200     int dst = currentInstruction[1].u.operand;
201     int value = currentInstruction[2].u.operand;
202     int proto = currentInstruction[3].u.operand;
203
204     // Load the operands into registers.
205     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
206     emitLoadPayload(value, regT2);
207     emitLoadPayload(proto, regT1);
208
209     // Check that proto are cells.  baseVal must be a cell - this is checked by op_check_has_instance.
210     emitJumpSlowCaseIfNotJSCell(value);
211     emitJumpSlowCaseIfNotJSCell(proto);
212     
213     // Check that prototype is an object
214     addSlowCase(emitJumpIfCellNotObject(regT1));
215
216     // Optimistically load the result true, and start looping.
217     // Initially, regT1 still contains proto and regT2 still contains value.
218     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
219     move(TrustedImm32(1), regT0);
220     Label loop(this);
221
222     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
223     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
224     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT2);
225     load32(Address(regT2, Structure::prototypeOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
226     Jump isInstance = branchPtr(Equal, regT2, regT1);
227     branchTest32(NonZero, regT2).linkTo(loop, this);
228
229     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
230     move(TrustedImm32(0), regT0);
231
232     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
233     isInstance.link(this);
234     emitStoreBool(dst, regT0);
235 }
236
237 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
238 {
239     int dst = currentInstruction[1].u.operand;
240     int value = currentInstruction[2].u.operand;
241     int baseVal = currentInstruction[3].u.operand;
242
243     linkSlowCaseIfNotJSCell(iter, baseVal);
244     linkSlowCase(iter);
245
246     emitLoad(value, regT1, regT0);
247     emitLoad(baseVal, regT3, regT2);
248     callOperation(operationCheckHasInstance, dst, regT1, regT0, regT3, regT2);
249
250     emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
251 }
252
253 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
254 {
255     int dst = currentInstruction[1].u.operand;
256     int value = currentInstruction[2].u.operand;
257     int proto = currentInstruction[3].u.operand;
258
259     linkSlowCaseIfNotJSCell(iter, value);
260     linkSlowCaseIfNotJSCell(iter, proto);
261     linkSlowCase(iter);
262
263     emitLoad(value, regT1, regT0);
264     emitLoad(proto, regT3, regT2);
265     callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
266 }
267
268 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
269 {
270     int dst = currentInstruction[1].u.operand;
271     int value = currentInstruction[2].u.operand;
272     
273     emitLoad(value, regT1, regT0);
274     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
275
276     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
277     Jump done = jump();
278     
279     isCell.link(this);
280     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
281     move(TrustedImm32(0), regT0);
282     Jump notMasqueradesAsUndefined = jump();
283     
284     isMasqueradesAsUndefined.link(this);
285     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
286     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
287     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
288     compare32(Equal, regT0, regT1, regT0);
289
290     notMasqueradesAsUndefined.link(this);
291     done.link(this);
292     emitStoreBool(dst, regT0);
293 }
294
295 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
296 {
297     int dst = currentInstruction[1].u.operand;
298     int value = currentInstruction[2].u.operand;
299     
300     emitLoadTag(value, regT0);
301     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
302     emitStoreBool(dst, regT0);
303 }
304
305 void JIT::emit_op_is_number(Instruction* currentInstruction)
306 {
307     int dst = currentInstruction[1].u.operand;
308     int value = currentInstruction[2].u.operand;
309     
310     emitLoadTag(value, regT0);
311     add32(TrustedImm32(1), regT0);
312     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
313     emitStoreBool(dst, regT0);
314 }
315
316 void JIT::emit_op_is_string(Instruction* currentInstruction)
317 {
318     int dst = currentInstruction[1].u.operand;
319     int value = currentInstruction[2].u.operand;
320     
321     emitLoad(value, regT1, regT0);
322     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
323     
324     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
325     Jump done = jump();
326     
327     isNotCell.link(this);
328     move(TrustedImm32(0), regT0);
329     
330     done.link(this);
331     emitStoreBool(dst, regT0);
332 }
333
334 void JIT::emit_op_is_object(Instruction* currentInstruction)
335 {
336     int dst = currentInstruction[1].u.operand;
337     int value = currentInstruction[2].u.operand;
338
339     emitLoad(value, regT1, regT0);
340     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
341
342     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
343     Jump done = jump();
344
345     isNotCell.link(this);
346     move(TrustedImm32(0), regT0);
347
348     done.link(this);
349     emitStoreBool(dst, regT0);
350 }
351
352 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
353 {
354     int dst = currentInstruction[1].u.operand;
355     int src = currentInstruction[2].u.operand;
356
357     emitLoad(src, regT1, regT0);
358
359     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
360     addSlowCase(emitJumpIfCellObject(regT0));
361     isImm.link(this);
362
363     if (dst != src)
364         emitStore(dst, regT1, regT0);
365 }
366
367 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
368 {
369     linkSlowCase(iter);
370
371     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
372     slowPathCall.call();
373 }
374
375 void JIT::emit_op_strcat(Instruction* currentInstruction)
376 {
377     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
378     slowPathCall.call();
379 }
380
381 void JIT::emit_op_not(Instruction* currentInstruction)
382 {
383     int dst = currentInstruction[1].u.operand;
384     int src = currentInstruction[2].u.operand;
385
386     emitLoadTag(src, regT0);
387
388     emitLoad(src, regT1, regT0);
389     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
390     xor32(TrustedImm32(1), regT0);
391
392     emitStoreBool(dst, regT0, (dst == src));
393 }
394
395 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
396 {
397     linkSlowCase(iter);
398
399     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
400     slowPathCall.call();
401 }
402
403 void JIT::emit_op_jfalse(Instruction* currentInstruction)
404 {
405     int cond = currentInstruction[1].u.operand;
406     unsigned target = currentInstruction[2].u.operand;
407
408     emitLoad(cond, regT1, regT0);
409
410     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
411     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
412     addJump(branchTest32(Zero, regT0), target);
413 }
414
415 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
416 {
417     int cond = currentInstruction[1].u.operand;
418     unsigned target = currentInstruction[2].u.operand;
419
420     linkSlowCase(iter);
421
422     if (supportsFloatingPoint()) {
423         // regT1 contains the tag from the hot path.
424         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
425
426         emitLoadDouble(cond, fpRegT0);
427         emitJumpSlowToHot(branchDoubleZeroOrNaN(fpRegT0, fpRegT1), target);
428         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jfalse));
429
430         notNumber.link(this);
431     }
432
433     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
434     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target); // Inverted.
435 }
436
437 void JIT::emit_op_jtrue(Instruction* currentInstruction)
438 {
439     int cond = currentInstruction[1].u.operand;
440     unsigned target = currentInstruction[2].u.operand;
441
442     emitLoad(cond, regT1, regT0);
443
444     ASSERT((JSValue::BooleanTag + 1 == JSValue::Int32Tag) && !(JSValue::Int32Tag + 1));
445     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::BooleanTag)));
446     addJump(branchTest32(NonZero, regT0), target);
447 }
448
449 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
450 {
451     int cond = currentInstruction[1].u.operand;
452     unsigned target = currentInstruction[2].u.operand;
453
454     linkSlowCase(iter);
455
456     if (supportsFloatingPoint()) {
457         // regT1 contains the tag from the hot path.
458         Jump notNumber = branch32(Above, regT1, TrustedImm32(JSValue::LowestTag));
459
460         emitLoadDouble(cond, fpRegT0);
461         emitJumpSlowToHot(branchDoubleNonZero(fpRegT0, fpRegT1), target);
462         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jtrue));
463
464         notNumber.link(this);
465     }
466
467     callOperation(operationConvertJSValueToBoolean, regT1, regT0);
468     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
469 }
470
471 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
472 {
473     int src = currentInstruction[1].u.operand;
474     unsigned target = currentInstruction[2].u.operand;
475
476     emitLoad(src, regT1, regT0);
477
478     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
479
480     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
481     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
482     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
483     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
484     Jump masqueradesGlobalObjectIsForeign = jump();
485
486     // Now handle the immediate cases - undefined & null
487     isImmediate.link(this);
488     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
489     or32(TrustedImm32(1), regT1);
490     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
491
492     isNotMasqueradesAsUndefined.link(this);
493     masqueradesGlobalObjectIsForeign.link(this);
494 }
495
496 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
497 {
498     int src = currentInstruction[1].u.operand;
499     unsigned target = currentInstruction[2].u.operand;
500
501     emitLoad(src, regT1, regT0);
502
503     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
504
505     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
506     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
507     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
508     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
509     Jump wasNotImmediate = jump();
510
511     // Now handle the immediate cases - undefined & null
512     isImmediate.link(this);
513
514     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
515     or32(TrustedImm32(1), regT1);
516     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
517
518     wasNotImmediate.link(this);
519 }
520
521 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
522 {
523     int src = currentInstruction[1].u.operand;
524     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
525     unsigned target = currentInstruction[3].u.operand;
526
527     emitLoad(src, regT1, regT0);
528     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)), target);
529     addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
530 }
531
532 void JIT::emit_op_eq(Instruction* currentInstruction)
533 {
534     int dst = currentInstruction[1].u.operand;
535     int src1 = currentInstruction[2].u.operand;
536     int src2 = currentInstruction[3].u.operand;
537
538     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
539     addSlowCase(branch32(NotEqual, regT1, regT3));
540     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
541     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
542
543     compare32(Equal, regT0, regT2, regT0);
544
545     emitStoreBool(dst, regT0);
546 }
547
548 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
549 {
550     int dst = currentInstruction[1].u.operand;
551     int op1 = currentInstruction[2].u.operand;
552     int op2 = currentInstruction[3].u.operand;
553
554     JumpList storeResult;
555     JumpList genericCase;
556
557     genericCase.append(getSlowCase(iter)); // tags not equal
558
559     linkSlowCase(iter); // tags equal and JSCell
560     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
561     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
562
563     // String case.
564     callOperation(operationCompareStringEq, regT0, regT2);
565     storeResult.append(jump());
566
567     // Generic case.
568     genericCase.append(getSlowCase(iter)); // doubles
569     genericCase.link(this);
570     emitLoad(op1, regT1, regT0);
571     emitLoad(op2, regT3, regT2);
572     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
573
574     storeResult.link(this);
575     emitStoreBool(dst, returnValueGPR);
576 }
577
578 void JIT::emit_op_neq(Instruction* currentInstruction)
579 {
580     int dst = currentInstruction[1].u.operand;
581     int src1 = currentInstruction[2].u.operand;
582     int src2 = currentInstruction[3].u.operand;
583
584     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
585     addSlowCase(branch32(NotEqual, regT1, regT3));
586     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
587     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
588
589     compare32(NotEqual, regT0, regT2, regT0);
590
591     emitStoreBool(dst, regT0);
592 }
593
594 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
595 {
596     int dst = currentInstruction[1].u.operand;
597
598     JumpList storeResult;
599     JumpList genericCase;
600
601     genericCase.append(getSlowCase(iter)); // tags not equal
602
603     linkSlowCase(iter); // tags equal and JSCell
604     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
605     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
606
607     // String case.
608     callOperation(operationCompareStringEq, regT0, regT2);
609     storeResult.append(jump());
610
611     // Generic case.
612     genericCase.append(getSlowCase(iter)); // doubles
613     genericCase.link(this);
614     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
615
616     storeResult.link(this);
617     xor32(TrustedImm32(0x1), returnValueGPR);
618     emitStoreBool(dst, returnValueGPR);
619 }
620
621 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
622 {
623     int dst = currentInstruction[1].u.operand;
624     int src1 = currentInstruction[2].u.operand;
625     int src2 = currentInstruction[3].u.operand;
626
627     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
628
629     // Bail if the tags differ, or are double.
630     addSlowCase(branch32(NotEqual, regT1, regT3));
631     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
632
633     // Jump to a slow case if both are strings or symbols (non object).
634     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
635     Jump firstIsObject = emitJumpIfCellObject(regT0);
636     addSlowCase(emitJumpIfCellNotObject(regT2));
637     notCell.link(this);
638     firstIsObject.link(this);
639
640     // Simply compare the payloads.
641     if (type == OpStrictEq)
642         compare32(Equal, regT0, regT2, regT0);
643     else
644         compare32(NotEqual, regT0, regT2, regT0);
645
646     emitStoreBool(dst, regT0);
647 }
648
649 void JIT::emit_op_stricteq(Instruction* currentInstruction)
650 {
651     compileOpStrictEq(currentInstruction, OpStrictEq);
652 }
653
654 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
655 {
656     linkSlowCase(iter);
657     linkSlowCase(iter);
658     linkSlowCase(iter);
659
660     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
661     slowPathCall.call();
662 }
663
664 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
665 {
666     compileOpStrictEq(currentInstruction, OpNStrictEq);
667 }
668
669 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
670 {
671     linkSlowCase(iter);
672     linkSlowCase(iter);
673     linkSlowCase(iter);
674
675     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
676     slowPathCall.call();
677 }
678
679 void JIT::emit_op_eq_null(Instruction* currentInstruction)
680 {
681     int dst = currentInstruction[1].u.operand;
682     int src = currentInstruction[2].u.operand;
683
684     emitLoad(src, regT1, regT0);
685     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
686
687     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
688     move(TrustedImm32(0), regT1);
689     Jump wasNotMasqueradesAsUndefined = jump();
690
691     isMasqueradesAsUndefined.link(this);
692     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
693     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
694     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
695     compare32(Equal, regT0, regT2, regT1);
696     Jump wasNotImmediate = jump();
697
698     isImmediate.link(this);
699
700     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
701     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
702     or32(regT2, regT1);
703
704     wasNotImmediate.link(this);
705     wasNotMasqueradesAsUndefined.link(this);
706
707     emitStoreBool(dst, regT1);
708 }
709
710 void JIT::emit_op_neq_null(Instruction* currentInstruction)
711 {
712     int dst = currentInstruction[1].u.operand;
713     int src = currentInstruction[2].u.operand;
714
715     emitLoad(src, regT1, regT0);
716     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
717
718     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
719     move(TrustedImm32(1), regT1);
720     Jump wasNotMasqueradesAsUndefined = jump();
721
722     isMasqueradesAsUndefined.link(this);
723     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
724     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
725     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
726     compare32(NotEqual, regT0, regT2, regT1);
727     Jump wasNotImmediate = jump();
728
729     isImmediate.link(this);
730
731     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
732     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
733     and32(regT2, regT1);
734
735     wasNotImmediate.link(this);
736     wasNotMasqueradesAsUndefined.link(this);
737
738     emitStoreBool(dst, regT1);
739 }
740
741 void JIT::emit_op_throw(Instruction* currentInstruction)
742 {
743     ASSERT(regT0 == returnValueGPR);
744     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
745     callOperationNoExceptionCheck(operationThrow, regT1, regT0);
746     jumpToExceptionHandler();
747 }
748
749 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
750 {
751     int dst = currentInstruction[1].u.operand;
752     emitLoad(currentInstruction[2].u.operand, regT1, regT0);
753     callOperation(operationPushWithScope, dst, regT1, regT0);
754 }
755
756 void JIT::emit_op_pop_scope(Instruction* currentInstruction)
757 {
758     int scope = currentInstruction[1].u.operand;
759     callOperation(operationPopScope, scope);
760 }
761
762 void JIT::emit_op_to_number(Instruction* currentInstruction)
763 {
764     int dst = currentInstruction[1].u.operand;
765     int src = currentInstruction[2].u.operand;
766
767     emitLoad(src, regT1, regT0);
768
769     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
770     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
771     isInt32.link(this);
772
773     if (src != dst)
774         emitStore(dst, regT1, regT0);
775 }
776
777 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
778 {
779     linkSlowCase(iter);
780
781     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
782     slowPathCall.call();
783 }
784
785 void JIT::emit_op_to_string(Instruction* currentInstruction)
786 {
787     int dst = currentInstruction[1].u.operand;
788     int src = currentInstruction[2].u.operand;
789
790     emitLoad(src, regT1, regT0);
791
792     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
793     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
794
795     if (src != dst)
796         emitStore(dst, regT1, regT0);
797 }
798
799 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
800 {
801     linkSlowCase(iter); // Not JSCell.
802     linkSlowCase(iter); // Not JSString.
803
804     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
805     slowPathCall.call();
806 }
807
808 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
809 {
810     int dst = currentInstruction[1].u.operand;
811     emitLoad(currentInstruction[2].u.operand, regT1, regT0);
812     if (currentInstruction[4].u.operand == JSNameScope::CatchScope) {
813         callOperation(operationPushCatchScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT1, regT0);
814         return;
815     }
816
817     RELEASE_ASSERT(currentInstruction[4].u.operand == JSNameScope::FunctionNameScope);
818     callOperation(operationPushFunctionNameScope, dst, jsCast<SymbolTable*>(getConstantOperand(currentInstruction[3].u.operand)), regT1, regT0);
819 }
820
821 void JIT::emit_op_catch(Instruction* currentInstruction)
822 {
823     move(TrustedImmPtr(m_vm), regT3);
824     // operationThrow returns the callFrame for the handler.
825     load32(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
826     load32(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
827     store32(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
828
829     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
830
831     // Now store the exception returned by operationThrow.
832     load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
833     load32(Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
834     store32(TrustedImm32(JSValue().payload()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
835     store32(TrustedImm32(JSValue().tag()), Address(regT3, VM::exceptionOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
836
837     unsigned exception = currentInstruction[1].u.operand;
838     emitStore(exception, regT1, regT0);
839 }
840
841 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
842 {
843     size_t tableIndex = currentInstruction[1].u.operand;
844     unsigned defaultOffset = currentInstruction[2].u.operand;
845     unsigned scrutinee = currentInstruction[3].u.operand;
846
847     // create jump table for switch destinations, track this switch statement.
848     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
849     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
850     jumpTable->ensureCTITable();
851
852     emitLoad(scrutinee, regT1, regT0);
853     callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
854     jump(returnValueGPR);
855 }
856
857 void JIT::emit_op_switch_char(Instruction* currentInstruction)
858 {
859     size_t tableIndex = currentInstruction[1].u.operand;
860     unsigned defaultOffset = currentInstruction[2].u.operand;
861     unsigned scrutinee = currentInstruction[3].u.operand;
862
863     // create jump table for switch destinations, track this switch statement.
864     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
865     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
866     jumpTable->ensureCTITable();
867
868     emitLoad(scrutinee, regT1, regT0);
869     callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
870     jump(returnValueGPR);
871 }
872
873 void JIT::emit_op_switch_string(Instruction* currentInstruction)
874 {
875     size_t tableIndex = currentInstruction[1].u.operand;
876     unsigned defaultOffset = currentInstruction[2].u.operand;
877     unsigned scrutinee = currentInstruction[3].u.operand;
878
879     // create jump table for switch destinations, track this switch statement.
880     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
881     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
882
883     emitLoad(scrutinee, regT1, regT0);
884     callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
885     jump(returnValueGPR);
886 }
887
888 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
889 {
890     emitLoad(m_codeBlock->getConstant(currentInstruction[1].u.operand), regT1, regT0);
891     callOperation(operationThrowStaticError, regT1, regT0, currentInstruction[2].u.operand);
892 }
893
894 void JIT::emit_op_debug(Instruction* currentInstruction)
895 {
896     load32(codeBlock()->debuggerRequestsAddress(), regT0);
897     Jump noDebuggerRequests = branchTest32(Zero, regT0);
898     callOperation(operationDebug, currentInstruction[1].u.operand);
899     noDebuggerRequests.link(this);
900 }
901
902
903 void JIT::emit_op_enter(Instruction* currentInstruction)
904 {
905     emitEnterOptimizationCheck();
906     
907     // Even though JIT code doesn't use them, we initialize our constant
908     // registers to zap stale pointers, to avoid unnecessarily prolonging
909     // object lifetime and increasing GC pressure.
910     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
911         emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
912
913     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
914     slowPathCall.call();
915 }
916
917 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
918 {
919     int lexicalEnvironment = currentInstruction[1].u.operand;
920     int scope = currentInstruction[2].u.operand;
921
922     emitLoadPayload(currentInstruction[2].u.operand, regT0);
923     callOperation(operationCreateActivation, regT0);
924     emitStoreCell(lexicalEnvironment, returnValueGPR);
925     emitStoreCell(scope, returnValueGPR);
926 }
927
928 void JIT::emit_op_get_scope(Instruction* currentInstruction)
929 {
930     int dst = currentInstruction[1].u.operand;
931     emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
932     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
933     emitStoreCell(dst, regT0);
934 }
935
936 void JIT::emit_op_create_this(Instruction* currentInstruction)
937 {
938     int callee = currentInstruction[2].u.operand;
939     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
940     RegisterID calleeReg = regT0;
941     RegisterID rareDataReg = regT4;
942     RegisterID resultReg = regT0;
943     RegisterID allocatorReg = regT1;
944     RegisterID structureReg = regT2;
945     RegisterID cachedFunctionReg = regT4;
946     RegisterID scratchReg = regT3;
947
948     emitLoadPayload(callee, calleeReg);
949     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
950     addSlowCase(branchTestPtr(Zero, rareDataReg));
951     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
952     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
953     addSlowCase(branchTestPtr(Zero, allocatorReg));
954
955     loadPtr(cachedFunction, cachedFunctionReg);
956     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
957     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
958     hasSeenMultipleCallees.link(this);
959
960     emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
961     emitStoreCell(currentInstruction[1].u.operand, resultReg);
962 }
963
964 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
965 {
966     linkSlowCase(iter); // doesn't have rare data
967     linkSlowCase(iter); // doesn't have an allocation profile
968     linkSlowCase(iter); // allocation failed
969     linkSlowCase(iter); // cached function didn't match
970
971     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
972     slowPathCall.call();
973 }
974
975 void JIT::emit_op_to_this(Instruction* currentInstruction)
976 {
977     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
978     int thisRegister = currentInstruction[1].u.operand;
979
980     emitLoad(thisRegister, regT3, regT2);
981
982     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
983     addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
984     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
985     loadPtr(cachedStructure, regT2);
986     addSlowCase(branchPtr(NotEqual, regT0, regT2));
987 }
988
989 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
990 {
991     linkSlowCase(iter);
992     linkSlowCase(iter);
993     linkSlowCase(iter);
994     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
995     slowPathCall.call();
996 }
997
998 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
999 {
1000     emitLoadTag(currentInstruction[1].u.operand, regT0);
1001     addSlowCase(branch32(Equal, regT0, TrustedImm32(JSValue::EmptyValueTag)));
1002 }
1003
1004 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1005 {
1006     linkSlowCase(iter);
1007     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
1008     slowPathCall.call();
1009 }
1010
1011 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
1012 {
1013     load32(m_vm->enabledProfilerAddress(), regT0);
1014     Jump profilerDone = branchTestPtr(Zero, regT0);
1015     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1016     callOperation(operationProfileWillCall, regT1, regT0);
1017     profilerDone.link(this);
1018 }
1019
1020 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
1021 {
1022     load32(m_vm->enabledProfilerAddress(), regT0);
1023     Jump profilerDone = branchTestPtr(Zero, regT0);
1024     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
1025     callOperation(operationProfileDidCall, regT1, regT0);
1026     profilerDone.link(this);
1027 }
1028
1029 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1030 {
1031     int dst = currentInstruction[1].u.operand;
1032     int base = currentInstruction[2].u.operand;
1033     int enumerator = currentInstruction[4].u.operand;
1034
1035     emitLoadPayload(base, regT0);
1036     emitJumpSlowCaseIfNotJSCell(base);
1037
1038     emitLoadPayload(enumerator, regT1);
1039
1040     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1041     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1042     
1043     move(TrustedImm32(1), regT0);
1044     emitStoreBool(dst, regT0);
1045 }
1046
1047 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1048 {
1049     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1050     
1051     PatchableJump badType;
1052     
1053     // FIXME: Add support for other types like TypedArrays and Arguments.
1054     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1055     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1056     move(TrustedImm32(1), regT0);
1057     Jump done = jump();
1058
1059     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1060     
1061     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1062     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1063     
1064     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1065     
1066     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1067         m_codeBlock, patchBuffer,
1068         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1069     
1070     RepatchBuffer repatchBuffer(m_codeBlock);
1071     repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1072     repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1073 }
1074
1075 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1076 {
1077     int dst = currentInstruction[1].u.operand;
1078     int base = currentInstruction[2].u.operand;
1079     int property = currentInstruction[3].u.operand;
1080     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1081     
1082     emitLoadPayload(base, regT0);
1083     emitJumpSlowCaseIfNotJSCell(base);
1084
1085     emitLoadPayload(property, regT1);
1086
1087     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1088     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1089     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1090     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1091     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1092     // extending since it makes it easier to re-tag the value in the slow case.
1093     zeroExtend32ToPtr(regT1, regT1);
1094
1095     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1096     and32(TrustedImm32(IndexingShapeMask), regT2);
1097
1098     JITArrayMode mode = chooseArrayMode(profile);
1099     PatchableJump badType;
1100
1101     // FIXME: Add support for other types like TypedArrays and Arguments.
1102     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1103     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1104     move(TrustedImm32(1), regT0);
1105
1106     addSlowCase(badType);
1107     addSlowCase(slowCases);
1108     
1109     Label done = label();
1110     
1111     emitStoreBool(dst, regT0);
1112     
1113     m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1114 }
1115
1116 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1117 {
1118     int dst = currentInstruction[1].u.operand;
1119     int base = currentInstruction[2].u.operand;
1120     int property = currentInstruction[3].u.operand;
1121     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1122     
1123     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1124     linkSlowCase(iter); // base array check
1125     linkSlowCase(iter); // vector length check
1126     linkSlowCase(iter); // empty value
1127
1128     Label slowPath = label();
1129     
1130     emitLoad(base, regT1, regT0);
1131     emitLoad(property, regT3, regT2);
1132     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2, profile);
1133
1134     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1135     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1136     m_byValInstructionIndex++;
1137 }
1138
1139 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1140 {
1141     int dst = currentInstruction[1].u.operand;
1142     int base = currentInstruction[2].u.operand;
1143     int index = currentInstruction[4].u.operand;
1144     int enumerator = currentInstruction[5].u.operand;
1145
1146     // Check that base is a cell
1147     emitLoadPayload(base, regT0);
1148     emitJumpSlowCaseIfNotJSCell(base);
1149
1150     // Check the structure
1151     emitLoadPayload(enumerator, regT1);
1152     load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1153     addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1154
1155     // Compute the offset
1156     emitLoadPayload(index, regT2);
1157     // If index is less than the enumerator's cached inline storage, then it's an inline access
1158     Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1159     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1160     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1161     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1162     
1163     Jump done = jump();
1164
1165     // Otherwise it's out of line
1166     outOfLineAccess.link(this);
1167     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1168     sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1169     neg32(regT2);
1170     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1171     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1172     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1173     
1174     done.link(this);
1175     emitValueProfilingSite();
1176     emitStore(dst, regT1, regT0);
1177 }
1178
1179 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1180 {
1181     int base = currentInstruction[2].u.operand;
1182     linkSlowCaseIfNotJSCell(iter, base);
1183     linkSlowCase(iter);
1184
1185     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1186     slowPathCall.call();
1187 }
1188
1189 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1190 {
1191     int dst = currentInstruction[1].u.operand;
1192     int enumerator = currentInstruction[2].u.operand;
1193     int index = currentInstruction[3].u.operand;
1194
1195     emitLoadPayload(index, regT0);
1196     emitLoadPayload(enumerator, regT1);
1197     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1198
1199     move(TrustedImm32(JSValue::NullTag), regT2);
1200     move(TrustedImm32(0), regT0);
1201
1202     Jump done = jump();
1203     inBounds.link(this);
1204
1205     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1206     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1207     move(TrustedImm32(JSValue::CellTag), regT2);
1208
1209     done.link(this);
1210     emitStore(dst, regT2, regT0);
1211 }
1212
1213 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1214 {
1215     int dst = currentInstruction[1].u.operand;
1216     int enumerator = currentInstruction[2].u.operand;
1217     int index = currentInstruction[3].u.operand;
1218
1219     emitLoadPayload(index, regT0);
1220     emitLoadPayload(enumerator, regT1);
1221     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1222
1223     move(TrustedImm32(JSValue::NullTag), regT2);
1224     move(TrustedImm32(0), regT0);
1225
1226     Jump done = jump();
1227     inBounds.link(this);
1228
1229     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1230     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1231     move(TrustedImm32(JSValue::CellTag), regT2);
1232     
1233     done.link(this);
1234     emitStore(dst, regT2, regT0);
1235 }
1236
1237 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1238 {
1239     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1240     int valueToProfile = currentInstruction[1].u.operand;
1241
1242     // Load payload in T0. Load tag in T3.
1243     emitLoadPayload(valueToProfile, regT0);
1244     emitLoadTag(valueToProfile, regT3);
1245
1246     JumpList jumpToEnd;
1247
1248     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1249     // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1250     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1251         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::UndefinedTag)));
1252     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1253         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::NullTag)));
1254     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1255         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::BooleanTag)));
1256     else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1257         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1258     else if (cachedTypeLocation->m_lastSeenType == TypeNumber) {
1259         jumpToEnd.append(branch32(Below, regT3, TrustedImm32(JSValue::LowestTag)));
1260         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1261     } else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1262         Jump isNotCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1263         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1264         isNotCell.link(this);
1265     }
1266
1267     // Load the type profiling log into T2.
1268     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1269     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1270
1271     // Load the next log entry into T1.
1272     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1273
1274     // Store the JSValue onto the log entry.
1275     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1276     store32(regT3, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1277
1278     // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1279     Jump notCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1280     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1281     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1282     Jump skipNotCell = jump();
1283     notCell.link(this);
1284     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1285     skipNotCell.link(this);
1286
1287     // Store the typeLocation on the log entry.
1288     move(TrustedImmPtr(cachedTypeLocation), regT0);
1289     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1290
1291     // Increment the current log entry.
1292     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1293     store32(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1294     jumpToEnd.append(branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())));
1295     // Clear the log if we're at the end of the log.
1296     callOperation(operationProcessTypeProfilerLog);
1297
1298     jumpToEnd.link(this);
1299 }
1300
1301 } // namespace JSC
1302
1303 #endif // USE(JSVALUE32_64)
1304 #endif // ENABLE(JIT)