8fb56be0a20031dd86098ad338f8a604b004d855
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
34 #include "Heap.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCast.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "SuperSampler.h"
45 #include "ThunkGenerators.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
49 #include "Watchdog.h"
50
51 namespace JSC {
52
53 #if USE(JSVALUE64)
54
55 void JIT::emit_op_mov(Instruction* currentInstruction)
56 {
57     int dst = currentInstruction[1].u.operand;
58     int src = currentInstruction[2].u.operand;
59
60     emitGetVirtualRegister(src, regT0);
61     emitPutVirtualRegister(dst);
62 }
63
64
65 void JIT::emit_op_end(Instruction* currentInstruction)
66 {
67     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
68     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
69     emitRestoreCalleeSaves();
70     emitFunctionEpilogue();
71     ret();
72 }
73
74 void JIT::emit_op_jmp(Instruction* currentInstruction)
75 {
76     unsigned target = currentInstruction[1].u.operand;
77     addJump(jump(), target);
78 }
79
80 void JIT::emit_op_new_object(Instruction* currentInstruction)
81 {
82     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
83     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
84     Allocator allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorForNonVirtual(allocationSize, AllocatorForMode::AllocatorIfExists);
85
86     RegisterID resultReg = regT0;
87     RegisterID allocatorReg = regT1;
88     RegisterID scratchReg = regT2;
89
90     if (!allocator)
91         addSlowCase(jump());
92     else {
93         JumpList slowCases;
94         auto butterfly = TrustedImmPtr(nullptr);
95         auto mask = TrustedImm32(0);
96         emitAllocateJSObject(resultReg, JITAllocator::constant(allocator), allocatorReg, TrustedImmPtr(structure), butterfly, mask, scratchReg, slowCases);
97         emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
98         addSlowCase(slowCases);
99         emitPutVirtualRegister(currentInstruction[1].u.operand);
100     }
101 }
102
103 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
104 {
105     linkAllSlowCases(iter);
106
107     int dst = currentInstruction[1].u.operand;
108     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
109     callOperation(operationNewObject, structure);
110     emitStoreCell(dst, returnValueGPR);
111 }
112
113 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
114 {
115     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
116     int dst = bytecode.dst();
117     int constructor = bytecode.constructor();
118     int hasInstanceValue = bytecode.hasInstanceValue();
119
120     emitGetVirtualRegister(hasInstanceValue, regT0);
121
122     // We don't jump if we know what Symbol.hasInstance would do.
123     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
124
125     emitGetVirtualRegister(constructor, regT0);
126
127     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
128     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
129     emitTagBool(regT0);
130     Jump done = jump();
131
132     customhasInstanceValue.link(this);
133     move(TrustedImm32(ValueTrue), regT0);
134
135     done.link(this);
136     emitPutVirtualRegister(dst);
137 }
138
139 void JIT::emit_op_instanceof(Instruction* currentInstruction)
140 {
141     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
142     int dst = bytecode.dst();
143     int value = bytecode.value();
144     int proto = bytecode.prototype();
145
146     // Load the operands (baseVal, proto, and value respectively) into registers.
147     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
148     emitGetVirtualRegister(value, regT2);
149     emitGetVirtualRegister(proto, regT1);
150
151     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
152     emitJumpSlowCaseIfNotJSCell(regT2, value);
153     emitJumpSlowCaseIfNotJSCell(regT1, proto);
154
155     // Check that prototype is an object
156     addSlowCase(emitJumpIfCellNotObject(regT1));
157     
158     // Optimistically load the result true, and start looping.
159     // Initially, regT1 still contains proto and regT2 still contains value.
160     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
161     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
162     Label loop(this);
163
164     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
165
166     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
167     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
168     emitLoadStructure(*vm(), regT2, regT4, regT3);
169     load64(Address(regT4, Structure::prototypeOffset()), regT4);
170     auto hasMonoProto = branchTest64(NonZero, regT4);
171     load64(Address(regT2, offsetRelativeToBase(knownPolyProtoOffset)), regT4);
172     hasMonoProto.link(this);
173     move(regT4, regT2);
174     Jump isInstance = branchPtr(Equal, regT2, regT1);
175     emitJumpIfJSCell(regT2).linkTo(loop, this);
176
177     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
178     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
179
180     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
181     isInstance.link(this);
182     emitPutVirtualRegister(dst);
183 }
184
185 void JIT::emit_op_instanceof_custom(Instruction*)
186 {
187     // This always goes to slow path since we expect it to be rare.
188     addSlowCase(jump());
189 }
190     
191 void JIT::emit_op_is_empty(Instruction* currentInstruction)
192 {
193     int dst = currentInstruction[1].u.operand;
194     int value = currentInstruction[2].u.operand;
195
196     emitGetVirtualRegister(value, regT0);
197     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
198
199     emitTagBool(regT0);
200     emitPutVirtualRegister(dst);
201 }
202
203 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
204 {
205     int dst = currentInstruction[1].u.operand;
206     int value = currentInstruction[2].u.operand;
207     
208     emitGetVirtualRegister(value, regT0);
209     Jump isCell = emitJumpIfJSCell(regT0);
210
211     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
212     Jump done = jump();
213     
214     isCell.link(this);
215     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
216     move(TrustedImm32(0), regT0);
217     Jump notMasqueradesAsUndefined = jump();
218
219     isMasqueradesAsUndefined.link(this);
220     emitLoadStructure(*vm(), regT0, regT1, regT2);
221     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
222     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
223     comparePtr(Equal, regT0, regT1, regT0);
224
225     notMasqueradesAsUndefined.link(this);
226     done.link(this);
227     emitTagBool(regT0);
228     emitPutVirtualRegister(dst);
229 }
230
231 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
232 {
233     int dst = currentInstruction[1].u.operand;
234     int value = currentInstruction[2].u.operand;
235     
236     emitGetVirtualRegister(value, regT0);
237     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
238     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
239     emitTagBool(regT0);
240     emitPutVirtualRegister(dst);
241 }
242
243 void JIT::emit_op_is_number(Instruction* currentInstruction)
244 {
245     int dst = currentInstruction[1].u.operand;
246     int value = currentInstruction[2].u.operand;
247     
248     emitGetVirtualRegister(value, regT0);
249     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
250     emitTagBool(regT0);
251     emitPutVirtualRegister(dst);
252 }
253
254 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
255 {
256     int dst = currentInstruction[1].u.operand;
257     int value = currentInstruction[2].u.operand;
258     int type = currentInstruction[3].u.operand;
259
260     emitGetVirtualRegister(value, regT0);
261     Jump isNotCell = emitJumpIfNotJSCell(regT0);
262
263     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
264     emitTagBool(regT0);
265     Jump done = jump();
266
267     isNotCell.link(this);
268     move(TrustedImm32(ValueFalse), regT0);
269
270     done.link(this);
271     emitPutVirtualRegister(dst);
272 }
273
274 void JIT::emit_op_is_object(Instruction* currentInstruction)
275 {
276     int dst = currentInstruction[1].u.operand;
277     int value = currentInstruction[2].u.operand;
278
279     emitGetVirtualRegister(value, regT0);
280     Jump isNotCell = emitJumpIfNotJSCell(regT0);
281
282     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
283     emitTagBool(regT0);
284     Jump done = jump();
285
286     isNotCell.link(this);
287     move(TrustedImm32(ValueFalse), regT0);
288
289     done.link(this);
290     emitPutVirtualRegister(dst);
291 }
292
293 void JIT::emit_op_ret(Instruction* currentInstruction)
294 {
295     ASSERT(callFrameRegister != regT1);
296     ASSERT(regT1 != returnValueGPR);
297     ASSERT(returnValueGPR != callFrameRegister);
298
299     // Return the result in %eax.
300     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
301
302     checkStackPointerAlignment();
303     emitRestoreCalleeSaves();
304     emitFunctionEpilogue();
305     ret();
306 }
307
308 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
309 {
310     int dst = currentInstruction[1].u.operand;
311     int src = currentInstruction[2].u.operand;
312
313     emitGetVirtualRegister(src, regT0);
314     
315     Jump isImm = emitJumpIfNotJSCell(regT0);
316     addSlowCase(emitJumpIfCellObject(regT0));
317     isImm.link(this);
318
319     if (dst != src)
320         emitPutVirtualRegister(dst);
321
322 }
323
324 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
325 {
326     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
327     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
328     callOperation(operationSetFunctionName, regT0, regT1);
329 }
330
331 void JIT::emit_op_not(Instruction* currentInstruction)
332 {
333     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
334
335     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
336     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
337     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
338     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
339     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
340     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
341
342     emitPutVirtualRegister(currentInstruction[1].u.operand);
343 }
344
345 void JIT::emit_op_jfalse(Instruction* currentInstruction)
346 {
347     unsigned target = currentInstruction[2].u.operand;
348
349     GPRReg value = regT0;
350     GPRReg result = regT1;
351     GPRReg scratch = regT2;
352     bool shouldCheckMasqueradesAsUndefined = true;
353
354     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
355     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
356
357     addJump(branchTest32(Zero, result), target);
358 }
359
360 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
361 {
362     int src = currentInstruction[1].u.operand;
363     unsigned target = currentInstruction[2].u.operand;
364
365     emitGetVirtualRegister(src, regT0);
366     Jump isImmediate = emitJumpIfNotJSCell(regT0);
367
368     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
369     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
370     emitLoadStructure(*vm(), regT0, regT2, regT1);
371     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
372     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
373     Jump masqueradesGlobalObjectIsForeign = jump();
374
375     // Now handle the immediate cases - undefined & null
376     isImmediate.link(this);
377     and64(TrustedImm32(~TagBitUndefined), regT0);
378     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
379
380     isNotMasqueradesAsUndefined.link(this);
381     masqueradesGlobalObjectIsForeign.link(this);
382 };
383 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
384 {
385     int src = currentInstruction[1].u.operand;
386     unsigned target = currentInstruction[2].u.operand;
387
388     emitGetVirtualRegister(src, regT0);
389     Jump isImmediate = emitJumpIfNotJSCell(regT0);
390
391     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
392     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
393     emitLoadStructure(*vm(), regT0, regT2, regT1);
394     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
395     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
396     Jump wasNotImmediate = jump();
397
398     // Now handle the immediate cases - undefined & null
399     isImmediate.link(this);
400     and64(TrustedImm32(~TagBitUndefined), regT0);
401     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
402
403     wasNotImmediate.link(this);
404 }
405
406 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
407 {
408     int src = currentInstruction[1].u.operand;
409     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
410     unsigned target = currentInstruction[3].u.operand;
411     
412     emitGetVirtualRegister(src, regT0);
413     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
414     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
415     addJump(jump(), target);
416     equal.link(this);
417 }
418
419 void JIT::emit_op_eq(Instruction* currentInstruction)
420 {
421     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
422     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
423     compare32(Equal, regT1, regT0, regT0);
424     emitTagBool(regT0);
425     emitPutVirtualRegister(currentInstruction[1].u.operand);
426 }
427
428 void JIT::emit_op_jtrue(Instruction* currentInstruction)
429 {
430     unsigned target = currentInstruction[2].u.operand;
431
432     GPRReg value = regT0;
433     GPRReg result = regT1;
434     GPRReg scratch = regT2;
435     bool shouldCheckMasqueradesAsUndefined = true;
436     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
437     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
438     addJump(branchTest32(NonZero, result), target);
439 }
440
441 void JIT::emit_op_neq(Instruction* currentInstruction)
442 {
443     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
444     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
445     compare32(NotEqual, regT1, regT0, regT0);
446     emitTagBool(regT0);
447
448     emitPutVirtualRegister(currentInstruction[1].u.operand);
449
450 }
451
452 void JIT::emit_op_throw(Instruction* currentInstruction)
453 {
454     ASSERT(regT0 == returnValueGPR);
455     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
456     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
457     callOperationNoExceptionCheck(operationThrow, regT0);
458     jumpToExceptionHandler(*vm());
459 }
460
461 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
462 {
463     int dst = currentInstruction[1].u.operand;
464     int src1 = currentInstruction[2].u.operand;
465     int src2 = currentInstruction[3].u.operand;
466
467     emitGetVirtualRegisters(src1, regT0, src2, regT1);
468     
469     // Jump slow if both are cells (to cover strings).
470     move(regT0, regT2);
471     or64(regT1, regT2);
472     addSlowCase(emitJumpIfJSCell(regT2));
473     
474     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
475     // if it's a double.
476     Jump leftOK = emitJumpIfInt(regT0);
477     addSlowCase(emitJumpIfNumber(regT0));
478     leftOK.link(this);
479     Jump rightOK = emitJumpIfInt(regT1);
480     addSlowCase(emitJumpIfNumber(regT1));
481     rightOK.link(this);
482
483     if (type == OpStrictEq)
484         compare64(Equal, regT1, regT0, regT0);
485     else
486         compare64(NotEqual, regT1, regT0, regT0);
487     emitTagBool(regT0);
488
489     emitPutVirtualRegister(dst);
490 }
491
492 void JIT::emit_op_stricteq(Instruction* currentInstruction)
493 {
494     compileOpStrictEq(currentInstruction, OpStrictEq);
495 }
496
497 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
498 {
499     compileOpStrictEq(currentInstruction, OpNStrictEq);
500 }
501
502 void JIT::emit_op_to_number(Instruction* currentInstruction)
503 {
504     int dstVReg = currentInstruction[1].u.operand;
505     int srcVReg = currentInstruction[2].u.operand;
506     emitGetVirtualRegister(srcVReg, regT0);
507     
508     addSlowCase(emitJumpIfNotNumber(regT0));
509
510     emitValueProfilingSite();
511     if (srcVReg != dstVReg)
512         emitPutVirtualRegister(dstVReg);
513 }
514
515 void JIT::emit_op_to_string(Instruction* currentInstruction)
516 {
517     int srcVReg = currentInstruction[2].u.operand;
518     emitGetVirtualRegister(srcVReg, regT0);
519
520     addSlowCase(emitJumpIfNotJSCell(regT0));
521     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
522
523     emitPutVirtualRegister(currentInstruction[1].u.operand);
524 }
525
526 void JIT::emit_op_to_object(Instruction* currentInstruction)
527 {
528     int dstVReg = currentInstruction[1].u.operand;
529     int srcVReg = currentInstruction[2].u.operand;
530     emitGetVirtualRegister(srcVReg, regT0);
531
532     addSlowCase(emitJumpIfNotJSCell(regT0));
533     addSlowCase(branch8(Below, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
534
535     emitValueProfilingSite();
536     if (srcVReg != dstVReg)
537         emitPutVirtualRegister(dstVReg);
538 }
539
540 void JIT::emit_op_catch(Instruction* currentInstruction)
541 {
542     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
543
544     move(TrustedImmPtr(m_vm), regT3);
545     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
546     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
547
548     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
549
550     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
551     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
552     jumpToExceptionHandler(*vm());
553     isCatchableException.link(this);
554
555     move(TrustedImmPtr(m_vm), regT3);
556     load64(Address(regT3, VM::exceptionOffset()), regT0);
557     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
558     emitPutVirtualRegister(currentInstruction[1].u.operand);
559
560     load64(Address(regT0, Exception::valueOffset()), regT0);
561     emitPutVirtualRegister(currentInstruction[2].u.operand);
562
563 #if ENABLE(DFG_JIT)
564     // FIXME: consider inline caching the process of doing OSR entry, including
565     // argument type proofs, storing locals to the buffer, etc
566     // https://bugs.webkit.org/show_bug.cgi?id=175598
567
568     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
569     if (buffer || !shouldEmitProfiling())
570         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
571     else
572         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
573     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
574     emitRestoreCalleeSaves();
575     jump(returnValueGPR);
576     skipOSREntry.link(this);
577     if (buffer && shouldEmitProfiling()) {
578         buffer->forEach([&] (ValueProfileAndOperand& profile) {
579             JSValueRegs regs(regT0);
580             emitGetVirtualRegister(profile.m_operand, regs);
581             emitValueProfilingSite(profile.m_profile);
582         });
583     }
584 #endif // ENABLE(DFG_JIT)
585 }
586
587 void JIT::emit_op_identity_with_profile(Instruction*)
588 {
589     // We don't need to do anything here...
590 }
591
592 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
593 {
594     int currentScope = currentInstruction[2].u.operand;
595     emitGetVirtualRegister(currentScope, regT0);
596     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
597     emitStoreCell(currentInstruction[1].u.operand, regT0);
598 }
599
600 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
601 {
602     size_t tableIndex = currentInstruction[1].u.operand;
603     unsigned defaultOffset = currentInstruction[2].u.operand;
604     unsigned scrutinee = currentInstruction[3].u.operand;
605
606     // create jump table for switch destinations, track this switch statement.
607     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
608     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
609     jumpTable->ensureCTITable();
610
611     emitGetVirtualRegister(scrutinee, regT0);
612     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
613     jump(returnValueGPR);
614 }
615
616 void JIT::emit_op_switch_char(Instruction* currentInstruction)
617 {
618     size_t tableIndex = currentInstruction[1].u.operand;
619     unsigned defaultOffset = currentInstruction[2].u.operand;
620     unsigned scrutinee = currentInstruction[3].u.operand;
621
622     // create jump table for switch destinations, track this switch statement.
623     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
624     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
625     jumpTable->ensureCTITable();
626
627     emitGetVirtualRegister(scrutinee, regT0);
628     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
629     jump(returnValueGPR);
630 }
631
632 void JIT::emit_op_switch_string(Instruction* currentInstruction)
633 {
634     size_t tableIndex = currentInstruction[1].u.operand;
635     unsigned defaultOffset = currentInstruction[2].u.operand;
636     unsigned scrutinee = currentInstruction[3].u.operand;
637
638     // create jump table for switch destinations, track this switch statement.
639     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
640     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
641
642     emitGetVirtualRegister(scrutinee, regT0);
643     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
644     jump(returnValueGPR);
645 }
646
647 void JIT::emit_op_debug(Instruction* currentInstruction)
648 {
649     load32(codeBlock()->debuggerRequestsAddress(), regT0);
650     Jump noDebuggerRequests = branchTest32(Zero, regT0);
651     callOperation(operationDebug, currentInstruction[1].u.operand);
652     noDebuggerRequests.link(this);
653 }
654
655 void JIT::emit_op_eq_null(Instruction* currentInstruction)
656 {
657     int dst = currentInstruction[1].u.operand;
658     int src1 = currentInstruction[2].u.operand;
659
660     emitGetVirtualRegister(src1, regT0);
661     Jump isImmediate = emitJumpIfNotJSCell(regT0);
662
663     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
664     move(TrustedImm32(0), regT0);
665     Jump wasNotMasqueradesAsUndefined = jump();
666
667     isMasqueradesAsUndefined.link(this);
668     emitLoadStructure(*vm(), regT0, regT2, regT1);
669     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
670     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
671     comparePtr(Equal, regT0, regT2, regT0);
672     Jump wasNotImmediate = jump();
673
674     isImmediate.link(this);
675
676     and64(TrustedImm32(~TagBitUndefined), regT0);
677     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
678
679     wasNotImmediate.link(this);
680     wasNotMasqueradesAsUndefined.link(this);
681
682     emitTagBool(regT0);
683     emitPutVirtualRegister(dst);
684
685 }
686
687 void JIT::emit_op_neq_null(Instruction* currentInstruction)
688 {
689     int dst = currentInstruction[1].u.operand;
690     int src1 = currentInstruction[2].u.operand;
691
692     emitGetVirtualRegister(src1, regT0);
693     Jump isImmediate = emitJumpIfNotJSCell(regT0);
694
695     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
696     move(TrustedImm32(1), regT0);
697     Jump wasNotMasqueradesAsUndefined = jump();
698
699     isMasqueradesAsUndefined.link(this);
700     emitLoadStructure(*vm(), regT0, regT2, regT1);
701     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
702     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
703     comparePtr(NotEqual, regT0, regT2, regT0);
704     Jump wasNotImmediate = jump();
705
706     isImmediate.link(this);
707
708     and64(TrustedImm32(~TagBitUndefined), regT0);
709     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
710
711     wasNotImmediate.link(this);
712     wasNotMasqueradesAsUndefined.link(this);
713
714     emitTagBool(regT0);
715     emitPutVirtualRegister(dst);
716 }
717
718 void JIT::emit_op_enter(Instruction*)
719 {
720     // Even though CTI doesn't use them, we initialize our constant
721     // registers to zap stale pointers, to avoid unnecessarily prolonging
722     // object lifetime and increasing GC pressure.
723     size_t count = m_codeBlock->m_numVars;
724     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
725         emitInitRegister(virtualRegisterForLocal(j).offset());
726
727     emitWriteBarrier(m_codeBlock);
728
729     emitEnterOptimizationCheck();
730 }
731
732 void JIT::emit_op_get_scope(Instruction* currentInstruction)
733 {
734     int dst = currentInstruction[1].u.operand;
735     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
736     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
737     emitStoreCell(dst, regT0);
738 }
739
740 void JIT::emit_op_to_this(Instruction* currentInstruction)
741 {
742     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
743     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
744
745     emitJumpSlowCaseIfNotJSCell(regT1);
746
747     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
748     loadPtr(cachedStructure, regT2);
749     addSlowCase(branchTestPtr(Zero, regT2));
750     load32(Address(regT2, Structure::structureIDOffset()), regT2);
751     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
752 }
753
754 void JIT::emit_op_create_this(Instruction* currentInstruction)
755 {
756     int callee = currentInstruction[2].u.operand;
757     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
758     RegisterID calleeReg = regT0;
759     RegisterID rareDataReg = regT4;
760     RegisterID resultReg = regT0;
761     RegisterID allocatorReg = regT1;
762     RegisterID structureReg = regT2;
763     RegisterID cachedFunctionReg = regT4;
764     RegisterID scratchReg = regT3;
765
766     emitGetVirtualRegister(callee, calleeReg);
767     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
768     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
769     addSlowCase(branchTestPtr(Zero, rareDataReg));
770     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), rareDataReg);
771     load32(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
772     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
773     addSlowCase(branch32(Equal, allocatorReg, TrustedImm32(Allocator().offset())));
774
775     loadPtr(cachedFunction, cachedFunctionReg);
776     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
777     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
778     hasSeenMultipleCallees.link(this);
779
780     JumpList slowCases;
781     auto butterfly = TrustedImmPtr(nullptr);
782     auto mask = TrustedImm32(0);
783     emitAllocateJSObject(resultReg, JITAllocator::variable(), allocatorReg, structureReg, butterfly, mask, scratchReg, slowCases);
784     emitGetVirtualRegister(callee, scratchReg);
785     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
786     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), scratchReg);
787     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
788     emitInitializeInlineStorage(resultReg, scratchReg);
789     addSlowCase(slowCases);
790     emitPutVirtualRegister(currentInstruction[1].u.operand);
791 }
792
793 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
794 {
795     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
796     addSlowCase(branchTest64(Zero, regT0));
797 }
798
799
800 // Slow cases
801
802 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
803 {
804     linkAllSlowCases(iter);
805
806     callOperation(operationCompareEq, regT0, regT1);
807     emitTagBool(returnValueGPR);
808     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
809 }
810
811 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
812 {
813     linkAllSlowCases(iter);
814
815     callOperation(operationCompareEq, regT0, regT1);
816     xor32(TrustedImm32(0x1), regT0);
817     emitTagBool(returnValueGPR);
818     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
819 }
820
821 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
822 {
823     linkAllSlowCases(iter);
824
825     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
826     int dst = bytecode.dst();
827     int value = bytecode.value();
828     int proto = bytecode.prototype();
829
830     emitGetVirtualRegister(value, regT0);
831     emitGetVirtualRegister(proto, regT1);
832     callOperation(operationInstanceOf, dst, regT0, regT1);
833 }
834
835 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
836 {
837     linkAllSlowCases(iter);
838
839     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
840     int dst = bytecode.dst();
841     int value = bytecode.value();
842     int constructor = bytecode.constructor();
843     int hasInstanceValue = bytecode.hasInstanceValue();
844
845     emitGetVirtualRegister(value, regT0);
846     emitGetVirtualRegister(constructor, regT1);
847     emitGetVirtualRegister(hasInstanceValue, regT2);
848     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
849     emitTagBool(returnValueGPR);
850     emitPutVirtualRegister(dst, returnValueGPR);
851 }
852
853 #endif // USE(JSVALUE64)
854
855 void JIT::emit_op_loop_hint(Instruction*)
856 {
857     // Emit the JIT optimization check: 
858     if (canBeOptimized()) {
859         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
860             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
861     }
862 }
863
864 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
865 {
866 #if ENABLE(DFG_JIT)
867     // Emit the slow path for the JIT optimization check:
868     if (canBeOptimized()) {
869         linkAllSlowCases(iter);
870
871         copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
872
873         callOperation(operationOptimize, m_bytecodeOffset);
874         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
875         if (!ASSERT_DISABLED) {
876             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
877             abortWithReason(JITUnreasonableLoopHintJumpTarget);
878             ok.link(this);
879         }
880         jump(returnValueGPR);
881         noOptimizedEntry.link(this);
882
883         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
884     }
885 #else
886     UNUSED_PARAM(iter);
887 #endif
888 }
889
890 void JIT::emit_op_check_traps(Instruction*)
891 {
892     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
893 }
894
895 void JIT::emit_op_nop(Instruction*)
896 {
897 }
898
899 void JIT::emit_op_super_sampler_begin(Instruction*)
900 {
901     add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
902 }
903
904 void JIT::emit_op_super_sampler_end(Instruction*)
905 {
906     sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
907 }
908
909 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
910 {
911     linkAllSlowCases(iter);
912
913     callOperation(operationHandleTraps);
914 }
915
916 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
917 {
918     int dst = currentInstruction[1].u.operand;
919     callOperation(operationNewRegexp, m_codeBlock->regexp(currentInstruction[2].u.operand));
920     emitStoreCell(dst, returnValueGPR);
921 }
922
923 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
924 {
925     Jump lazyJump;
926     int dst = currentInstruction[1].u.operand;
927
928 #if USE(JSVALUE64)
929     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
930 #else
931     emitLoadPayload(currentInstruction[2].u.operand, regT0);
932 #endif
933     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
934
935     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
936     if (opcodeID == op_new_func)
937         callOperation(operationNewFunction, dst, regT0, funcExec);
938     else if (opcodeID == op_new_generator_func)
939         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
940     else if (opcodeID == op_new_async_func)
941         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
942     else {
943         ASSERT(opcodeID == op_new_async_generator_func);
944         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
945     }
946 }
947
948 void JIT::emit_op_new_func(Instruction* currentInstruction)
949 {
950     emitNewFuncCommon(currentInstruction);
951 }
952
953 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
954 {
955     emitNewFuncCommon(currentInstruction);
956 }
957
958 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
959 {
960     emitNewFuncCommon(currentInstruction);
961 }
962
963 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
964 {
965     emitNewFuncCommon(currentInstruction);
966 }
967     
968 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
969 {
970     Jump notUndefinedScope;
971     int dst = currentInstruction[1].u.operand;
972 #if USE(JSVALUE64)
973     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
974     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
975     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
976 #else
977     emitLoadPayload(currentInstruction[2].u.operand, regT0);
978     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
979     emitStore(dst, jsUndefined());
980 #endif
981     Jump done = jump();
982     notUndefinedScope.link(this);
983         
984     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
985     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
986
987     if (opcodeID == op_new_func_exp)
988         callOperation(operationNewFunction, dst, regT0, function);
989     else if (opcodeID == op_new_generator_func_exp)
990         callOperation(operationNewGeneratorFunction, dst, regT0, function);
991     else if (opcodeID == op_new_async_func_exp)
992         callOperation(operationNewAsyncFunction, dst, regT0, function);
993     else {
994         ASSERT(opcodeID == op_new_async_generator_func_exp);
995         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
996     }
997
998     done.link(this);
999 }
1000
1001 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1002 {
1003     emitNewFuncExprCommon(currentInstruction);
1004 }
1005
1006 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1007 {
1008     emitNewFuncExprCommon(currentInstruction);
1009 }
1010
1011 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1012 {
1013     emitNewFuncExprCommon(currentInstruction);
1014 }
1015     
1016 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1017 {
1018     emitNewFuncExprCommon(currentInstruction);
1019 }
1020     
1021 void JIT::emit_op_new_array(Instruction* currentInstruction)
1022 {
1023     int dst = currentInstruction[1].u.operand;
1024     int valuesIndex = currentInstruction[2].u.operand;
1025     int size = currentInstruction[3].u.operand;
1026     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1027     callOperation(operationNewArrayWithProfile, dst,
1028         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1029 }
1030
1031 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1032 {
1033     int dst = currentInstruction[1].u.operand;
1034     int sizeIndex = currentInstruction[2].u.operand;
1035 #if USE(JSVALUE64)
1036     emitGetVirtualRegister(sizeIndex, regT0);
1037     callOperation(operationNewArrayWithSizeAndProfile, dst,
1038         currentInstruction[3].u.arrayAllocationProfile, regT0);
1039 #else
1040     emitLoad(sizeIndex, regT1, regT0);
1041     callOperation(operationNewArrayWithSizeAndProfile, dst,
1042         currentInstruction[3].u.arrayAllocationProfile, JSValueRegs(regT1, regT0));
1043 #endif
1044 }
1045
1046 #if USE(JSVALUE64)
1047 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1048 {
1049     int dst = currentInstruction[1].u.operand;
1050     int base = currentInstruction[2].u.operand;
1051     int enumerator = currentInstruction[4].u.operand;
1052
1053     emitGetVirtualRegister(base, regT0);
1054     emitGetVirtualRegister(enumerator, regT1);
1055     emitJumpSlowCaseIfNotJSCell(regT0, base);
1056
1057     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1058     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1059     
1060     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1061     emitPutVirtualRegister(dst);
1062 }
1063
1064 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1065 {
1066     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1067     
1068     PatchableJump badType;
1069     
1070     // FIXME: Add support for other types like TypedArrays and Arguments.
1071     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1072     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1073     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1074     Jump done = jump();
1075
1076     LinkBuffer patchBuffer(*this, m_codeBlock);
1077     
1078     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1079     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1080     
1081     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1082     
1083     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1084         m_codeBlock, patchBuffer,
1085         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1086     
1087     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1088     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1089 }
1090
1091 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1092 {
1093     int dst = currentInstruction[1].u.operand;
1094     int base = currentInstruction[2].u.operand;
1095     int property = currentInstruction[3].u.operand;
1096     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1097     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1098     
1099     emitGetVirtualRegisters(base, regT0, property, regT1);
1100
1101     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1102     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1103     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1104     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1105     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1106     // extending since it makes it easier to re-tag the value in the slow case.
1107     zeroExtend32ToPtr(regT1, regT1);
1108
1109     emitJumpSlowCaseIfNotJSCell(regT0, base);
1110     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1111     and32(TrustedImm32(IndexingShapeMask), regT2);
1112
1113     JITArrayMode mode = chooseArrayMode(profile);
1114     PatchableJump badType;
1115
1116     // FIXME: Add support for other types like TypedArrays and Arguments.
1117     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1118     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1119     
1120     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1121
1122     addSlowCase(badType);
1123     addSlowCase(slowCases);
1124     
1125     Label done = label();
1126     
1127     emitPutVirtualRegister(dst);
1128
1129     Label nextHotPath = label();
1130     
1131     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1132 }
1133
1134 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1135 {
1136     linkAllSlowCases(iter);
1137
1138     int dst = currentInstruction[1].u.operand;
1139     int base = currentInstruction[2].u.operand;
1140     int property = currentInstruction[3].u.operand;
1141     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1142
1143     Label slowPath = label();
1144     
1145     emitGetVirtualRegister(base, regT0);
1146     emitGetVirtualRegister(property, regT1);
1147     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1148
1149     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1150     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1151     m_byValInstructionIndex++;
1152 }
1153
1154 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1155 {
1156     int dst = currentInstruction[1].u.operand;
1157     int base = currentInstruction[2].u.operand;
1158     int index = currentInstruction[4].u.operand;
1159     int enumerator = currentInstruction[5].u.operand;
1160
1161     // Check that base is a cell
1162     emitGetVirtualRegister(base, regT0);
1163     emitJumpSlowCaseIfNotJSCell(regT0, base);
1164
1165     // Check the structure
1166     emitGetVirtualRegister(enumerator, regT2);
1167     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1168     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1169
1170     // Compute the offset
1171     emitGetVirtualRegister(index, regT1);
1172     // If index is less than the enumerator's cached inline storage, then it's an inline access
1173     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1174     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1175     signExtend32ToPtr(regT1, regT1);
1176     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1177     
1178     Jump done = jump();
1179
1180     // Otherwise it's out of line
1181     outOfLineAccess.link(this);
1182     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1183     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1184     neg32(regT1);
1185     signExtend32ToPtr(regT1, regT1);
1186     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1187     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1188     
1189     done.link(this);
1190     emitValueProfilingSite();
1191     emitPutVirtualRegister(dst, regT0);
1192 }
1193
1194 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1195 {
1196     int dst = currentInstruction[1].u.operand;
1197     int enumerator = currentInstruction[2].u.operand;
1198     int index = currentInstruction[3].u.operand;
1199
1200     emitGetVirtualRegister(index, regT0);
1201     emitGetVirtualRegister(enumerator, regT1);
1202     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1203
1204     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1205
1206     Jump done = jump();
1207     inBounds.link(this);
1208
1209     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1210     signExtend32ToPtr(regT0, regT0);
1211     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1212
1213     done.link(this);
1214     emitPutVirtualRegister(dst);
1215 }
1216
1217 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1218 {
1219     int dst = currentInstruction[1].u.operand;
1220     int enumerator = currentInstruction[2].u.operand;
1221     int index = currentInstruction[3].u.operand;
1222
1223     emitGetVirtualRegister(index, regT0);
1224     emitGetVirtualRegister(enumerator, regT1);
1225     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1226
1227     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1228
1229     Jump done = jump();
1230     inBounds.link(this);
1231
1232     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1233     signExtend32ToPtr(regT0, regT0);
1234     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1235     
1236     done.link(this);
1237     emitPutVirtualRegister(dst);
1238 }
1239
1240 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1241 {
1242     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1243     int valueToProfile = currentInstruction[1].u.operand;
1244
1245     emitGetVirtualRegister(valueToProfile, regT0);
1246
1247     JumpList jumpToEnd;
1248
1249     jumpToEnd.append(branchTest64(Zero, regT0));
1250
1251     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1252     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1253     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1254         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1255     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1256         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1257     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1258         move(regT0, regT1);
1259         and64(TrustedImm32(~1), regT1);
1260         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1261     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1262         jumpToEnd.append(emitJumpIfInt(regT0));
1263     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1264         jumpToEnd.append(emitJumpIfNumber(regT0));
1265     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1266         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1267         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1268         isNotCell.link(this);
1269     }
1270
1271     // Load the type profiling log into T2.
1272     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1273     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1274     // Load the next log entry into T1.
1275     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1276
1277     // Store the JSValue onto the log entry.
1278     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1279
1280     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1281     Jump notCell = emitJumpIfNotJSCell(regT0);
1282     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1283     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1284     Jump skipIsCell = jump();
1285     notCell.link(this);
1286     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1287     skipIsCell.link(this);
1288
1289     // Store the typeLocation on the log entry.
1290     move(TrustedImmPtr(cachedTypeLocation), regT0);
1291     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1292
1293     // Increment the current log entry.
1294     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1295     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1296     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1297     // Clear the log if we're at the end of the log.
1298     callOperation(operationProcessTypeProfilerLog);
1299     skipClearLog.link(this);
1300
1301     jumpToEnd.link(this);
1302 }
1303
1304 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1305 {
1306     updateTopCallFrame();
1307     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1308     GPRReg shadowPacketReg = regT0;
1309     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1310     GPRReg scratch2Reg = regT2;
1311     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1312     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1313     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1314 }
1315
1316 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1317 {
1318     updateTopCallFrame();
1319     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1320     GPRReg shadowPacketReg = regT0;
1321     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1322     GPRReg scratch2Reg = regT2;
1323     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1324     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1325     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1326     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1327 }
1328
1329 #endif // USE(JSVALUE64)
1330
1331 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1332 {
1333     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1334 #if USE(JSVALUE64)
1335     basicBlockLocation->emitExecuteCode(*this);
1336 #else
1337     basicBlockLocation->emitExecuteCode(*this, regT0);
1338 #endif
1339 }
1340
1341 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1342 {
1343     int dst = currentInstruction[1].u.operand;
1344     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1345     sub32(TrustedImm32(1), regT0);
1346     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1347     boxInt32(regT0, result);
1348     emitPutVirtualRegister(dst, result);
1349 }
1350
1351 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1352 {
1353     int dst = currentInstruction[1].u.operand;
1354     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1355     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1356     sub32(TrustedImm32(1), regT0);
1357     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1358     sub32(Imm32(numParamsToSkip), regT0);
1359 #if USE(JSVALUE64)
1360     boxInt32(regT0, JSValueRegs(regT0));
1361 #endif
1362     Jump done = jump();
1363
1364     zeroLength.link(this);
1365 #if USE(JSVALUE64)
1366     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1367 #else
1368     move(TrustedImm32(0), regT0);
1369 #endif
1370
1371     done.link(this);
1372 #if USE(JSVALUE64)
1373     emitPutVirtualRegister(dst, regT0);
1374 #else
1375     move(TrustedImm32(JSValue::Int32Tag), regT1);
1376     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1377 #endif
1378 }
1379
1380 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1381 {
1382     int dst = currentInstruction[1].u.operand;
1383     int index = currentInstruction[2].u.operand;
1384 #if USE(JSVALUE64)
1385     JSValueRegs resultRegs(regT0);
1386 #else
1387     JSValueRegs resultRegs(regT1, regT0);
1388 #endif
1389
1390     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1391     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1392     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1393     Jump done = jump();
1394
1395     argumentOutOfBounds.link(this);
1396     moveValue(jsUndefined(), resultRegs);
1397
1398     done.link(this);
1399     emitValueProfilingSite();
1400     emitPutVirtualRegister(dst, resultRegs);
1401 }
1402
1403 } // namespace JSC
1404
1405 #endif // ENABLE(JIT)