[JSC] Remove per-host-function CTI stub in 32bit environment
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes32_64.cpp
1 /*
2  * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26
27 #include "config.h"
28
29 #if ENABLE(JIT)
30 #if USE(JSVALUE32_64)
31 #include "JIT.h"
32
33 #include "BytecodeStructs.h"
34 #include "CCallHelpers.h"
35 #include "Exception.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "Opcode.h"
44 #include "SlowPathCall.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
47
48 namespace JSC {
49
50 void JIT::emit_op_mov(Instruction* currentInstruction)
51 {
52     int dst = currentInstruction[1].u.operand;
53     int src = currentInstruction[2].u.operand;
54     
55     if (m_codeBlock->isConstantRegisterIndex(src))
56         emitStore(dst, getConstantOperand(src));
57     else {
58         emitLoad(src, regT1, regT0);
59         emitStore(dst, regT1, regT0);
60     }
61 }
62
63 void JIT::emit_op_end(Instruction* currentInstruction)
64 {
65     ASSERT(returnValueGPR != callFrameRegister);
66     emitLoad(currentInstruction[1].u.operand, regT1, returnValueGPR);
67     emitRestoreCalleeSaves();
68     emitFunctionEpilogue();
69     ret();
70 }
71
72 void JIT::emit_op_jmp(Instruction* currentInstruction)
73 {
74     unsigned target = currentInstruction[1].u.operand;
75     addJump(jump(), target);
76 }
77
78 void JIT::emit_op_new_object(Instruction* currentInstruction)
79 {
80     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
81     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
82     MarkedAllocator* allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorFor(allocationSize);
83
84     RegisterID resultReg = returnValueGPR;
85     RegisterID allocatorReg = regT1;
86     RegisterID scratchReg = regT3;
87
88     move(TrustedImmPtr(allocator), allocatorReg);
89     if (allocator)
90         addSlowCase(Jump());
91     JumpList slowCases;
92     emitAllocateJSObject(resultReg, allocator, allocatorReg, TrustedImmPtr(structure), TrustedImmPtr(0), scratchReg, slowCases);
93     emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
94     addSlowCase(slowCases);
95     emitStoreCell(currentInstruction[1].u.operand, resultReg);
96 }
97
98 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
99 {
100     linkSlowCase(iter);
101     linkSlowCase(iter);
102     int dst = currentInstruction[1].u.operand;
103     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
104     callOperation(operationNewObject, structure);
105     emitStoreCell(dst, returnValueGPR);
106 }
107
108 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
109 {
110     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
111     int dst = bytecode.dst();
112     int constructor = bytecode.constructor();
113     int hasInstanceValue = bytecode.hasInstanceValue();
114
115     emitLoadPayload(hasInstanceValue, regT0);
116     // We don't jump if we know what Symbol.hasInstance would do.
117     Jump hasInstanceValueNotCell = emitJumpIfNotJSCell(hasInstanceValue);
118     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
119
120     // We know that constructor is an object from the way bytecode is emitted for instanceof expressions.
121     emitLoadPayload(constructor, regT0);
122
123     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
124     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
125     Jump done = jump();
126
127     hasInstanceValueNotCell.link(this);
128     customhasInstanceValue.link(this);
129     move(TrustedImm32(1), regT0);
130
131     done.link(this);
132     emitStoreBool(dst, regT0);
133
134 }
135
136 void JIT::emit_op_instanceof(Instruction* currentInstruction)
137 {
138     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
139     int dst = bytecode.dst();
140     int value = bytecode.value();
141     int proto = bytecode.prototype();
142
143     // Load the operands into registers.
144     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
145     emitLoadPayload(value, regT2);
146     emitLoadPayload(proto, regT1);
147
148     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
149     emitJumpSlowCaseIfNotJSCell(value);
150     emitJumpSlowCaseIfNotJSCell(proto);
151     
152     // Check that prototype is an object
153     addSlowCase(emitJumpIfCellNotObject(regT1));
154
155     // Optimistically load the result true, and start looping.
156     // Initially, regT1 still contains proto and regT2 still contains value.
157     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
158     move(TrustedImm32(1), regT0);
159     Label loop(this);
160
161     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
162
163     // Load the prototype of the cell in regT2.  If this is equal to regT1 - WIN!
164     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
165     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT4);
166     load32(Address(regT4, Structure::prototypeOffset() + TagOffset), regT3);
167     load32(Address(regT4, Structure::prototypeOffset() + PayloadOffset), regT4);
168     auto hasMonoProto = branch32(NotEqual, regT3, TrustedImm32(JSValue::EmptyValueTag));
169     load32(Address(regT2, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), regT4);
170     hasMonoProto.link(this);
171     move(regT4, regT2);
172     Jump isInstance = branchPtr(Equal, regT2, regT1);
173     branchTest32(NonZero, regT2).linkTo(loop, this);
174
175     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
176     move(TrustedImm32(0), regT0);
177
178     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
179     isInstance.link(this);
180     emitStoreBool(dst, regT0);
181 }
182
183 void JIT::emit_op_instanceof_custom(Instruction*)
184 {
185     // This always goes to slow path since we expect it to be rare.
186     addSlowCase(jump());
187 }
188
189 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
190 {
191     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
192     int dst = bytecode.dst();
193     int value = bytecode.value();
194     int proto = bytecode.prototype();
195
196     linkSlowCaseIfNotJSCell(iter, value);
197     linkSlowCaseIfNotJSCell(iter, proto);
198     linkSlowCase(iter);
199     linkSlowCase(iter);
200
201     emitLoad(value, regT1, regT0);
202     emitLoad(proto, regT3, regT2);
203     callOperation(operationInstanceOf, dst, regT1, regT0, regT3, regT2);
204 }
205
206 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
207 {
208     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
209     int dst = bytecode.dst();
210     int value = bytecode.value();
211     int constructor = bytecode.constructor();
212     int hasInstanceValue = bytecode.hasInstanceValue();
213
214     linkSlowCase(iter);
215
216     emitLoad(value, regT1, regT0);
217     emitLoadPayload(constructor, regT2);
218     emitLoad(hasInstanceValue, regT4, regT3);
219     callOperation(operationInstanceOfCustom, regT1, regT0, regT2, regT4, regT3);
220     emitStoreBool(dst, returnValueGPR);
221 }
222     
223 void JIT::emit_op_is_empty(Instruction* currentInstruction)
224 {
225     int dst = currentInstruction[1].u.operand;
226     int value = currentInstruction[2].u.operand;
227     
228     emitLoad(value, regT1, regT0);
229     compare32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag), regT0);
230
231     emitStoreBool(dst, regT0);
232 }
233
234 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
235 {
236     int dst = currentInstruction[1].u.operand;
237     int value = currentInstruction[2].u.operand;
238     
239     emitLoad(value, regT1, regT0);
240     Jump isCell = branch32(Equal, regT1, TrustedImm32(JSValue::CellTag));
241
242     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT0);
243     Jump done = jump();
244     
245     isCell.link(this);
246     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
247     move(TrustedImm32(0), regT0);
248     Jump notMasqueradesAsUndefined = jump();
249     
250     isMasqueradesAsUndefined.link(this);
251     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT1);
252     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
253     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
254     compare32(Equal, regT0, regT1, regT0);
255
256     notMasqueradesAsUndefined.link(this);
257     done.link(this);
258     emitStoreBool(dst, regT0);
259 }
260
261 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
262 {
263     int dst = currentInstruction[1].u.operand;
264     int value = currentInstruction[2].u.operand;
265     
266     emitLoadTag(value, regT0);
267     compare32(Equal, regT0, TrustedImm32(JSValue::BooleanTag), regT0);
268     emitStoreBool(dst, regT0);
269 }
270
271 void JIT::emit_op_is_number(Instruction* currentInstruction)
272 {
273     int dst = currentInstruction[1].u.operand;
274     int value = currentInstruction[2].u.operand;
275     
276     emitLoadTag(value, regT0);
277     add32(TrustedImm32(1), regT0);
278     compare32(Below, regT0, TrustedImm32(JSValue::LowestTag + 1), regT0);
279     emitStoreBool(dst, regT0);
280 }
281
282 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
283 {
284     int dst = currentInstruction[1].u.operand;
285     int value = currentInstruction[2].u.operand;
286     int type = currentInstruction[3].u.operand;
287
288     emitLoad(value, regT1, regT0);
289     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
290
291     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
292     Jump done = jump();
293
294     isNotCell.link(this);
295     move(TrustedImm32(0), regT0);
296
297     done.link(this);
298     emitStoreBool(dst, regT0);
299 }
300
301 void JIT::emit_op_is_object(Instruction* currentInstruction)
302 {
303     int dst = currentInstruction[1].u.operand;
304     int value = currentInstruction[2].u.operand;
305
306     emitLoad(value, regT1, regT0);
307     Jump isNotCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
308
309     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
310     Jump done = jump();
311
312     isNotCell.link(this);
313     move(TrustedImm32(0), regT0);
314
315     done.link(this);
316     emitStoreBool(dst, regT0);
317 }
318
319 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
320 {
321     int dst = currentInstruction[1].u.operand;
322     int src = currentInstruction[2].u.operand;
323
324     emitLoad(src, regT1, regT0);
325
326     Jump isImm = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
327     addSlowCase(emitJumpIfCellObject(regT0));
328     isImm.link(this);
329
330     if (dst != src)
331         emitStore(dst, regT1, regT0);
332 }
333
334 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
335 {
336     linkSlowCase(iter);
337
338     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
339     slowPathCall.call();
340 }
341
342 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
343 {
344     int func = currentInstruction[1].u.operand;
345     int name = currentInstruction[2].u.operand;
346     emitLoadPayload(func, regT1);
347     emitLoad(name, regT3, regT2);
348     callOperation(operationSetFunctionName, regT1, regT3, regT2);
349 }
350
351 void JIT::emit_op_strcat(Instruction* currentInstruction)
352 {
353     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
354     slowPathCall.call();
355 }
356
357 void JIT::emit_op_not(Instruction* currentInstruction)
358 {
359     int dst = currentInstruction[1].u.operand;
360     int src = currentInstruction[2].u.operand;
361
362     emitLoadTag(src, regT0);
363
364     emitLoad(src, regT1, regT0);
365     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::BooleanTag)));
366     xor32(TrustedImm32(1), regT0);
367
368     emitStoreBool(dst, regT0, (dst == src));
369 }
370
371 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
372 {
373     linkSlowCase(iter);
374
375     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
376     slowPathCall.call();
377 }
378
379 void JIT::emit_op_jfalse(Instruction* currentInstruction)
380 {
381     int cond = currentInstruction[1].u.operand;
382     unsigned target = currentInstruction[2].u.operand;
383
384     emitLoad(cond, regT1, regT0);
385
386     JSValueRegs value(regT1, regT0);
387     GPRReg scratch = regT2;
388     GPRReg result = regT3;
389     bool shouldCheckMasqueradesAsUndefined = true;
390     emitConvertValueToBoolean(*vm(), value, result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
391
392     addJump(branchTest32(Zero, result), target);
393 }
394
395 void JIT::emit_op_jtrue(Instruction* currentInstruction)
396 {
397     int cond = currentInstruction[1].u.operand;
398     unsigned target = currentInstruction[2].u.operand;
399
400     emitLoad(cond, regT1, regT0);
401     bool shouldCheckMasqueradesAsUndefined = true;
402     JSValueRegs value(regT1, regT0);
403     GPRReg scratch = regT2;
404     GPRReg result = regT3;
405     emitConvertValueToBoolean(*vm(), value, result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
406
407     addJump(branchTest32(NonZero, result), target);
408 }
409
410 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
411 {
412     int src = currentInstruction[1].u.operand;
413     unsigned target = currentInstruction[2].u.operand;
414
415     emitLoad(src, regT1, regT0);
416
417     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
418
419     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
420     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
421     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
422     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
423     Jump masqueradesGlobalObjectIsForeign = jump();
424
425     // Now handle the immediate cases - undefined & null
426     isImmediate.link(this);
427     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
428     or32(TrustedImm32(1), regT1);
429     addJump(branch32(Equal, regT1, TrustedImm32(JSValue::NullTag)), target);
430
431     isNotMasqueradesAsUndefined.link(this);
432     masqueradesGlobalObjectIsForeign.link(this);
433 }
434
435 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
436 {
437     int src = currentInstruction[1].u.operand;
438     unsigned target = currentInstruction[2].u.operand;
439
440     emitLoad(src, regT1, regT0);
441
442     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
443
444     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
445     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
446     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
447     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
448     Jump wasNotImmediate = jump();
449
450     // Now handle the immediate cases - undefined & null
451     isImmediate.link(this);
452
453     ASSERT((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1));
454     or32(TrustedImm32(1), regT1);
455     addJump(branch32(NotEqual, regT1, TrustedImm32(JSValue::NullTag)), target);
456
457     wasNotImmediate.link(this);
458 }
459
460 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
461 {
462     int src = currentInstruction[1].u.operand;
463     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
464     unsigned target = currentInstruction[3].u.operand;
465
466     emitLoad(src, regT1, regT0);
467     CCallHelpers::Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
468     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
469     notCell.link(this);
470     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
471     addJump(jump(), target);
472     equal.link(this);
473 }
474
475 void JIT::emit_op_eq(Instruction* currentInstruction)
476 {
477     int dst = currentInstruction[1].u.operand;
478     int src1 = currentInstruction[2].u.operand;
479     int src2 = currentInstruction[3].u.operand;
480
481     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
482     addSlowCase(branch32(NotEqual, regT1, regT3));
483     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
484     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
485
486     compare32(Equal, regT0, regT2, regT0);
487
488     emitStoreBool(dst, regT0);
489 }
490
491 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
492 {
493     int dst = currentInstruction[1].u.operand;
494     int op1 = currentInstruction[2].u.operand;
495     int op2 = currentInstruction[3].u.operand;
496
497     JumpList storeResult;
498     JumpList genericCase;
499
500     genericCase.append(getSlowCase(iter)); // tags not equal
501
502     linkSlowCase(iter); // tags equal and JSCell
503     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
504     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
505
506     // String case.
507     callOperation(operationCompareStringEq, regT0, regT2);
508     storeResult.append(jump());
509
510     // Generic case.
511     genericCase.append(getSlowCase(iter)); // doubles
512     genericCase.link(this);
513     emitLoad(op1, regT1, regT0);
514     emitLoad(op2, regT3, regT2);
515     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
516
517     storeResult.link(this);
518     emitStoreBool(dst, returnValueGPR);
519 }
520
521 void JIT::emit_op_neq(Instruction* currentInstruction)
522 {
523     int dst = currentInstruction[1].u.operand;
524     int src1 = currentInstruction[2].u.operand;
525     int src2 = currentInstruction[3].u.operand;
526
527     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
528     addSlowCase(branch32(NotEqual, regT1, regT3));
529     addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::CellTag)));
530     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
531
532     compare32(NotEqual, regT0, regT2, regT0);
533
534     emitStoreBool(dst, regT0);
535 }
536
537 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
538 {
539     int dst = currentInstruction[1].u.operand;
540
541     JumpList storeResult;
542     JumpList genericCase;
543
544     genericCase.append(getSlowCase(iter)); // tags not equal
545
546     linkSlowCase(iter); // tags equal and JSCell
547     genericCase.append(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
548     genericCase.append(branchPtr(NotEqual, Address(regT2, JSCell::structureIDOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
549
550     // String case.
551     callOperation(operationCompareStringEq, regT0, regT2);
552     storeResult.append(jump());
553
554     // Generic case.
555     genericCase.append(getSlowCase(iter)); // doubles
556     genericCase.link(this);
557     callOperation(operationCompareEq, regT1, regT0, regT3, regT2);
558
559     storeResult.link(this);
560     xor32(TrustedImm32(0x1), returnValueGPR);
561     emitStoreBool(dst, returnValueGPR);
562 }
563
564 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
565 {
566     int dst = currentInstruction[1].u.operand;
567     int src1 = currentInstruction[2].u.operand;
568     int src2 = currentInstruction[3].u.operand;
569
570     emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
571
572     // Bail if the tags differ, or are double.
573     addSlowCase(branch32(NotEqual, regT1, regT3));
574     addSlowCase(branch32(Below, regT1, TrustedImm32(JSValue::LowestTag)));
575
576     // Jump to a slow case if both are strings or symbols (non object).
577     Jump notCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
578     Jump firstIsObject = emitJumpIfCellObject(regT0);
579     addSlowCase(emitJumpIfCellNotObject(regT2));
580     notCell.link(this);
581     firstIsObject.link(this);
582
583     // Simply compare the payloads.
584     if (type == OpStrictEq)
585         compare32(Equal, regT0, regT2, regT0);
586     else
587         compare32(NotEqual, regT0, regT2, regT0);
588
589     emitStoreBool(dst, regT0);
590 }
591
592 void JIT::emit_op_stricteq(Instruction* currentInstruction)
593 {
594     compileOpStrictEq(currentInstruction, OpStrictEq);
595 }
596
597 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
598 {
599     linkSlowCase(iter);
600     linkSlowCase(iter);
601     linkSlowCase(iter);
602
603     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
604     slowPathCall.call();
605 }
606
607 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
608 {
609     compileOpStrictEq(currentInstruction, OpNStrictEq);
610 }
611
612 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
613 {
614     linkSlowCase(iter);
615     linkSlowCase(iter);
616     linkSlowCase(iter);
617
618     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
619     slowPathCall.call();
620 }
621
622 void JIT::emit_op_eq_null(Instruction* currentInstruction)
623 {
624     int dst = currentInstruction[1].u.operand;
625     int src = currentInstruction[2].u.operand;
626
627     emitLoad(src, regT1, regT0);
628     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
629
630     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
631     move(TrustedImm32(0), regT1);
632     Jump wasNotMasqueradesAsUndefined = jump();
633
634     isMasqueradesAsUndefined.link(this);
635     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
636     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
637     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
638     compare32(Equal, regT0, regT2, regT1);
639     Jump wasNotImmediate = jump();
640
641     isImmediate.link(this);
642
643     compare32(Equal, regT1, TrustedImm32(JSValue::NullTag), regT2);
644     compare32(Equal, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
645     or32(regT2, regT1);
646
647     wasNotImmediate.link(this);
648     wasNotMasqueradesAsUndefined.link(this);
649
650     emitStoreBool(dst, regT1);
651 }
652
653 void JIT::emit_op_neq_null(Instruction* currentInstruction)
654 {
655     int dst = currentInstruction[1].u.operand;
656     int src = currentInstruction[2].u.operand;
657
658     emitLoad(src, regT1, regT0);
659     Jump isImmediate = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
660
661     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
662     move(TrustedImm32(1), regT1);
663     Jump wasNotMasqueradesAsUndefined = jump();
664
665     isMasqueradesAsUndefined.link(this);
666     loadPtr(Address(regT0, JSCell::structureIDOffset()), regT2);
667     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
668     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
669     compare32(NotEqual, regT0, regT2, regT1);
670     Jump wasNotImmediate = jump();
671
672     isImmediate.link(this);
673
674     compare32(NotEqual, regT1, TrustedImm32(JSValue::NullTag), regT2);
675     compare32(NotEqual, regT1, TrustedImm32(JSValue::UndefinedTag), regT1);
676     and32(regT2, regT1);
677
678     wasNotImmediate.link(this);
679     wasNotMasqueradesAsUndefined.link(this);
680
681     emitStoreBool(dst, regT1);
682 }
683
684 void JIT::emit_op_throw(Instruction* currentInstruction)
685 {
686     ASSERT(regT0 == returnValueGPR);
687     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
688     emitLoad(currentInstruction[1].u.operand, regT1, regT0);
689     callOperationNoExceptionCheck(operationThrow, regT1, regT0);
690     jumpToExceptionHandler(*vm());
691 }
692
693 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
694 {
695     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
696     slowPathCall.call();
697 }
698
699 void JIT::emit_op_to_number(Instruction* currentInstruction)
700 {
701     int dst = currentInstruction[1].u.operand;
702     int src = currentInstruction[2].u.operand;
703
704     emitLoad(src, regT1, regT0);
705
706     Jump isInt32 = branch32(Equal, regT1, TrustedImm32(JSValue::Int32Tag));
707     addSlowCase(branch32(AboveOrEqual, regT1, TrustedImm32(JSValue::LowestTag)));
708     isInt32.link(this);
709
710     emitValueProfilingSite();
711     if (src != dst)
712         emitStore(dst, regT1, regT0);
713 }
714
715 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717     linkSlowCase(iter);
718
719     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
720     slowPathCall.call();
721 }
722
723 void JIT::emit_op_to_string(Instruction* currentInstruction)
724 {
725     int dst = currentInstruction[1].u.operand;
726     int src = currentInstruction[2].u.operand;
727
728     emitLoad(src, regT1, regT0);
729
730     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
731     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
732
733     if (src != dst)
734         emitStore(dst, regT1, regT0);
735 }
736
737 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
738 {
739     linkSlowCase(iter); // Not JSCell.
740     linkSlowCase(iter); // Not JSString.
741
742     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
743     slowPathCall.call();
744 }
745
746 void JIT::emit_op_catch(Instruction* currentInstruction)
747 {
748     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
749
750     move(TrustedImmPtr(m_vm), regT3);
751     // operationThrow returns the callFrame for the handler.
752     load32(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
753     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
754
755     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
756
757     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
758     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
759     jumpToExceptionHandler(*vm());
760     isCatchableException.link(this);
761
762     move(TrustedImmPtr(m_vm), regT3);
763
764     // Now store the exception returned by operationThrow.
765     load32(Address(regT3, VM::exceptionOffset()), regT2);
766     move(TrustedImm32(JSValue::CellTag), regT1);
767
768     store32(TrustedImm32(0), Address(regT3, VM::exceptionOffset()));
769
770     unsigned exception = currentInstruction[1].u.operand;
771     emitStore(exception, regT1, regT2);
772
773     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
774     load32(Address(regT2, Exception::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
775
776     unsigned thrownValue = currentInstruction[2].u.operand;
777     emitStore(thrownValue, regT1, regT0);
778
779 #if ENABLE(DFG_JIT)
780     // FIXME: consider inline caching the process of doing OSR entry, including
781     // argument type proofs, storing locals to the buffer, etc
782     // https://bugs.webkit.org/show_bug.cgi?id=175598
783
784     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
785     if (buffer || !shouldEmitProfiling())
786         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
787     else
788         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
789     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
790     emitRestoreCalleeSaves();
791     jump(returnValueGPR);
792     skipOSREntry.link(this);
793     if (buffer && shouldEmitProfiling()) {
794         buffer->forEach([&] (ValueProfileAndOperand& profile) {
795             JSValueRegs regs(regT1, regT0);
796             emitGetVirtualRegister(profile.m_operand, regs);
797             emitValueProfilingSite(profile.m_profile);
798         });
799     }
800 #endif // ENABLE(DFG_JIT)
801 }
802
803 void JIT::emit_op_assert(Instruction* currentInstruction)
804 {
805     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
806     slowPathCall.call();
807 }
808
809 void JIT::emit_op_identity_with_profile(Instruction*)
810 {
811     // We don't need to do anything here...
812 }
813
814 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
815 {
816     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
817     slowPathCall.call();
818 }
819
820 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
821 {
822     int currentScope = currentInstruction[2].u.operand;
823     emitLoadPayload(currentScope, regT0);
824     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
825     emitStoreCell(currentInstruction[1].u.operand, regT0);
826 }
827
828 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
829 {
830     size_t tableIndex = currentInstruction[1].u.operand;
831     unsigned defaultOffset = currentInstruction[2].u.operand;
832     unsigned scrutinee = currentInstruction[3].u.operand;
833
834     // create jump table for switch destinations, track this switch statement.
835     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
836     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
837     jumpTable->ensureCTITable();
838
839     emitLoad(scrutinee, regT1, regT0);
840     callOperation(operationSwitchImmWithUnknownKeyType, regT1, regT0, tableIndex);
841     jump(returnValueGPR);
842 }
843
844 void JIT::emit_op_switch_char(Instruction* currentInstruction)
845 {
846     size_t tableIndex = currentInstruction[1].u.operand;
847     unsigned defaultOffset = currentInstruction[2].u.operand;
848     unsigned scrutinee = currentInstruction[3].u.operand;
849
850     // create jump table for switch destinations, track this switch statement.
851     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
852     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
853     jumpTable->ensureCTITable();
854
855     emitLoad(scrutinee, regT1, regT0);
856     callOperation(operationSwitchCharWithUnknownKeyType, regT1, regT0, tableIndex);
857     jump(returnValueGPR);
858 }
859
860 void JIT::emit_op_switch_string(Instruction* currentInstruction)
861 {
862     size_t tableIndex = currentInstruction[1].u.operand;
863     unsigned defaultOffset = currentInstruction[2].u.operand;
864     unsigned scrutinee = currentInstruction[3].u.operand;
865
866     // create jump table for switch destinations, track this switch statement.
867     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
868     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
869
870     emitLoad(scrutinee, regT1, regT0);
871     callOperation(operationSwitchStringWithUnknownKeyType, regT1, regT0, tableIndex);
872     jump(returnValueGPR);
873 }
874
875 void JIT::emit_op_debug(Instruction* currentInstruction)
876 {
877     load32(codeBlock()->debuggerRequestsAddress(), regT0);
878     Jump noDebuggerRequests = branchTest32(Zero, regT0);
879     callOperation(operationDebug, currentInstruction[1].u.operand);
880     noDebuggerRequests.link(this);
881 }
882
883
884 void JIT::emit_op_enter(Instruction* currentInstruction)
885 {
886     emitEnterOptimizationCheck();
887     
888     // Even though JIT code doesn't use them, we initialize our constant
889     // registers to zap stale pointers, to avoid unnecessarily prolonging
890     // object lifetime and increasing GC pressure.
891     for (int i = 0; i < m_codeBlock->m_numVars; ++i)
892         emitStore(virtualRegisterForLocal(i).offset(), jsUndefined());
893
894     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter);
895     slowPathCall.call();
896 }
897
898 void JIT::emit_op_get_scope(Instruction* currentInstruction)
899 {
900     int dst = currentInstruction[1].u.operand;
901     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
902     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
903     emitStoreCell(dst, regT0);
904 }
905
906 void JIT::emit_op_create_this(Instruction* currentInstruction)
907 {
908     int callee = currentInstruction[2].u.operand;
909     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
910     RegisterID calleeReg = regT0;
911     RegisterID rareDataReg = regT4;
912     RegisterID resultReg = regT0;
913     RegisterID allocatorReg = regT1;
914     RegisterID structureReg = regT2;
915     RegisterID cachedFunctionReg = regT4;
916     RegisterID scratchReg = regT3;
917
918     emitLoadPayload(callee, calleeReg);
919     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
920     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
921     addSlowCase(branchTestPtr(Zero, rareDataReg));
922     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
923     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
924     addSlowCase(branchTestPtr(Zero, allocatorReg));
925
926     loadPtr(cachedFunction, cachedFunctionReg);
927     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
928     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
929     hasSeenMultipleCallees.link(this);
930
931     JumpList slowCases;
932     emitAllocateJSObject(resultReg, nullptr, allocatorReg, structureReg, TrustedImmPtr(0), scratchReg, slowCases);
933     addSlowCase(slowCases);
934     emitStoreCell(currentInstruction[1].u.operand, resultReg);
935 }
936
937 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
938 {
939     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
940     linkSlowCase(iter); // doesn't have rare data
941     linkSlowCase(iter); // doesn't have an allocation profile
942     linkSlowCase(iter); // allocation failed (no allocator)
943     linkSlowCase(iter); // allocation failed (allocator empty)
944     linkSlowCase(iter); // cached function didn't match
945
946     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
947     slowPathCall.call();
948 }
949
950 void JIT::emit_op_to_this(Instruction* currentInstruction)
951 {
952     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
953     int thisRegister = currentInstruction[1].u.operand;
954
955     emitLoad(thisRegister, regT3, regT2);
956
957     addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag)));
958     addSlowCase(branch8(NotEqual, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
959     loadPtr(Address(regT2, JSCell::structureIDOffset()), regT0);
960     loadPtr(cachedStructure, regT2);
961     addSlowCase(branchPtr(NotEqual, regT0, regT2));
962 }
963
964 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
965 {
966     linkSlowCase(iter);
967     linkSlowCase(iter);
968     linkSlowCase(iter);
969     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
970     slowPathCall.call();
971 }
972
973 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
974 {
975     emitLoadTag(currentInstruction[1].u.operand, regT0);
976     addSlowCase(branch32(Equal, regT0, TrustedImm32(JSValue::EmptyValueTag)));
977 }
978
979 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
980 {
981     linkSlowCase(iter);
982     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
983     slowPathCall.call();
984 }
985
986 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
987 {
988     int dst = currentInstruction[1].u.operand;
989     int base = currentInstruction[2].u.operand;
990     int enumerator = currentInstruction[4].u.operand;
991
992     emitLoadPayload(base, regT0);
993     emitJumpSlowCaseIfNotJSCell(base);
994
995     emitLoadPayload(enumerator, regT1);
996
997     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
998     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
999     
1000     move(TrustedImm32(1), regT0);
1001     emitStoreBool(dst, regT0);
1002 }
1003
1004 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1005 {
1006     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1007     
1008     PatchableJump badType;
1009     
1010     // FIXME: Add support for other types like TypedArrays and Arguments.
1011     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1012     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1013     move(TrustedImm32(1), regT0);
1014     Jump done = jump();
1015
1016     LinkBuffer patchBuffer(*this, m_codeBlock);
1017     
1018     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1019     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1020     
1021     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1022     
1023     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1024         m_codeBlock, patchBuffer,
1025         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1026     
1027     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1028     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1029 }
1030
1031 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1032 {
1033     int dst = currentInstruction[1].u.operand;
1034     int base = currentInstruction[2].u.operand;
1035     int property = currentInstruction[3].u.operand;
1036     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1037     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1038     
1039     emitLoadPayload(base, regT0);
1040     emitJumpSlowCaseIfNotJSCell(base);
1041
1042     emitLoadPayload(property, regT1);
1043
1044     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1045     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1046     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1047     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1048     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1049     // extending since it makes it easier to re-tag the value in the slow case.
1050     zeroExtend32ToPtr(regT1, regT1);
1051
1052     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1053     and32(TrustedImm32(IndexingShapeMask), regT2);
1054
1055     JITArrayMode mode = chooseArrayMode(profile);
1056     PatchableJump badType;
1057
1058     // FIXME: Add support for other types like TypedArrays and Arguments.
1059     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1060     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1061     move(TrustedImm32(1), regT0);
1062
1063     addSlowCase(badType);
1064     addSlowCase(slowCases);
1065     
1066     Label done = label();
1067     
1068     emitStoreBool(dst, regT0);
1069
1070     Label nextHotPath = label();
1071     
1072     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1073 }
1074
1075 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1076 {
1077     int dst = currentInstruction[1].u.operand;
1078     int base = currentInstruction[2].u.operand;
1079     int property = currentInstruction[3].u.operand;
1080     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1081     
1082     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1083     linkSlowCase(iter); // base array check
1084     linkSlowCase(iter); // vector length check
1085     linkSlowCase(iter); // empty value
1086
1087     Label slowPath = label();
1088     
1089     emitLoad(base, regT1, regT0);
1090     emitLoad(property, regT3, regT2);
1091     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT1, regT0, regT3, regT2, byValInfo);
1092
1093     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1094     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1095     m_byValInstructionIndex++;
1096 }
1097
1098 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1099 {
1100     int dst = currentInstruction[1].u.operand;
1101     int base = currentInstruction[2].u.operand;
1102     int index = currentInstruction[4].u.operand;
1103     int enumerator = currentInstruction[5].u.operand;
1104
1105     // Check that base is a cell
1106     emitLoadPayload(base, regT0);
1107     emitJumpSlowCaseIfNotJSCell(base);
1108
1109     // Check the structure
1110     emitLoadPayload(enumerator, regT1);
1111     load32(Address(regT0, JSCell::structureIDOffset()), regT2);
1112     addSlowCase(branch32(NotEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1113
1114     // Compute the offset
1115     emitLoadPayload(index, regT2);
1116     // If index is less than the enumerator's cached inline storage, then it's an inline access
1117     Jump outOfLineAccess = branch32(AboveOrEqual, regT2, Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1118     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1119     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1120     load32(BaseIndex(regT0, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1121     
1122     Jump done = jump();
1123
1124     // Otherwise it's out of line
1125     outOfLineAccess.link(this);
1126     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1127     sub32(Address(regT1, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT2);
1128     neg32(regT2);
1129     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1130     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
1131     load32(BaseIndex(regT0, regT2, TimesEight, offsetOfFirstProperty + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
1132     
1133     done.link(this);
1134     emitValueProfilingSite();
1135     emitStore(dst, regT1, regT0);
1136 }
1137
1138 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1139 {
1140     int base = currentInstruction[2].u.operand;
1141     linkSlowCaseIfNotJSCell(iter, base);
1142     linkSlowCase(iter);
1143
1144     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1145     slowPathCall.call();
1146 }
1147
1148 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1149 {
1150     int dst = currentInstruction[1].u.operand;
1151     int enumerator = currentInstruction[2].u.operand;
1152     int index = currentInstruction[3].u.operand;
1153
1154     emitLoadPayload(index, regT0);
1155     emitLoadPayload(enumerator, regT1);
1156     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1157
1158     move(TrustedImm32(JSValue::NullTag), regT2);
1159     move(TrustedImm32(0), regT0);
1160
1161     Jump done = jump();
1162     inBounds.link(this);
1163
1164     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1165     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1166     move(TrustedImm32(JSValue::CellTag), regT2);
1167
1168     done.link(this);
1169     emitStore(dst, regT2, regT0);
1170 }
1171
1172 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1173 {
1174     int dst = currentInstruction[1].u.operand;
1175     int enumerator = currentInstruction[2].u.operand;
1176     int index = currentInstruction[3].u.operand;
1177
1178     emitLoadPayload(index, regT0);
1179     emitLoadPayload(enumerator, regT1);
1180     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1181
1182     move(TrustedImm32(JSValue::NullTag), regT2);
1183     move(TrustedImm32(0), regT0);
1184
1185     Jump done = jump();
1186     inBounds.link(this);
1187
1188     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1189     loadPtr(BaseIndex(regT1, regT0, timesPtr()), regT0);
1190     move(TrustedImm32(JSValue::CellTag), regT2);
1191     
1192     done.link(this);
1193     emitStore(dst, regT2, regT0);
1194 }
1195
1196 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1197 {
1198     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1199     int valueToProfile = currentInstruction[1].u.operand;
1200
1201     // Load payload in T0. Load tag in T3.
1202     emitLoadPayload(valueToProfile, regT0);
1203     emitLoadTag(valueToProfile, regT3);
1204
1205     JumpList jumpToEnd;
1206
1207     jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::EmptyValueTag)));
1208
1209     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1210     // These typechecks are inlined to match those of the 32-bit JSValue type checks.
1211     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1212         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::UndefinedTag)));
1213     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1214         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::NullTag)));
1215     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1216         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::BooleanTag)));
1217     else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1218         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1219     else if (cachedTypeLocation->m_lastSeenType == TypeNumber) {
1220         jumpToEnd.append(branch32(Below, regT3, TrustedImm32(JSValue::LowestTag)));
1221         jumpToEnd.append(branch32(Equal, regT3, TrustedImm32(JSValue::Int32Tag)));
1222     } else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1223         Jump isNotCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1224         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1225         isNotCell.link(this);
1226     }
1227
1228     // Load the type profiling log into T2.
1229     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1230     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1231
1232     // Load the next log entry into T1.
1233     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1234
1235     // Store the JSValue onto the log entry.
1236     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
1237     store32(regT3, Address(regT1, TypeProfilerLog::LogEntry::valueOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
1238
1239     // Store the structureID of the cell if argument is a cell, otherwise, store 0 on the log entry.
1240     Jump notCell = branch32(NotEqual, regT3, TrustedImm32(JSValue::CellTag));
1241     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1242     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1243     Jump skipNotCell = jump();
1244     notCell.link(this);
1245     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1246     skipNotCell.link(this);
1247
1248     // Store the typeLocation on the log entry.
1249     move(TrustedImmPtr(cachedTypeLocation), regT0);
1250     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1251
1252     // Increment the current log entry.
1253     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1254     store32(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1255     jumpToEnd.append(branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())));
1256     // Clear the log if we're at the end of the log.
1257     callOperation(operationProcessTypeProfilerLog);
1258
1259     jumpToEnd.link(this);
1260 }
1261
1262 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1263 {
1264     updateTopCallFrame();
1265     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1266     GPRReg shadowPacketReg = regT0;
1267     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1268     GPRReg scratch2Reg = regT2;
1269     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1270
1271     scratch1Reg = regT4;
1272     emitLoadPayload(currentInstruction[1].u.operand, regT3);
1273     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1274 }
1275
1276 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1277 {
1278     updateTopCallFrame();
1279     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1280     GPRReg shadowPacketReg = regT0;
1281     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1282     GPRReg scratch2Reg = regT2;
1283     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1284
1285     emitLoadPayload(currentInstruction[1].u.operand, regT2);
1286     emitLoadTag(currentInstruction[1].u.operand, regT1);
1287     JSValueRegs thisRegs(regT1, regT2);
1288     emitLoadPayload(currentInstruction[2].u.operand, regT3);
1289     logShadowChickenTailPacket(shadowPacketReg, thisRegs, regT3, m_codeBlock, CallSiteIndex(currentInstruction));
1290 }
1291
1292 } // namespace JSC
1293
1294 #endif // USE(JSVALUE32_64)
1295 #endif // ENABLE(JIT)