Templatize CodePtr/Refs/FunctionPtrs with PtrTags.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
34 #include "Heap.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCast.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "SuperSampler.h"
45 #include "ThunkGenerators.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
49 #include "Watchdog.h"
50
51 namespace JSC {
52
53 #if USE(JSVALUE64)
54
55 void JIT::emit_op_mov(Instruction* currentInstruction)
56 {
57     int dst = currentInstruction[1].u.operand;
58     int src = currentInstruction[2].u.operand;
59
60     emitGetVirtualRegister(src, regT0);
61     emitPutVirtualRegister(dst);
62 }
63
64
65 void JIT::emit_op_end(Instruction* currentInstruction)
66 {
67     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
68     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
69     emitRestoreCalleeSaves();
70     emitFunctionEpilogue();
71     ret();
72 }
73
74 void JIT::emit_op_jmp(Instruction* currentInstruction)
75 {
76     unsigned target = currentInstruction[1].u.operand;
77     addJump(jump(), target);
78 }
79
80 void JIT::emit_op_new_object(Instruction* currentInstruction)
81 {
82     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
83     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
84     Allocator allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorForNonVirtual(allocationSize, AllocatorForMode::AllocatorIfExists);
85
86     RegisterID resultReg = regT0;
87     RegisterID allocatorReg = regT1;
88     RegisterID scratchReg = regT2;
89
90     if (!allocator)
91         addSlowCase(jump());
92     else {
93         JumpList slowCases;
94         auto butterfly = TrustedImmPtr(nullptr);
95         emitAllocateJSObject(resultReg, JITAllocator::constant(allocator), allocatorReg, TrustedImmPtr(structure), butterfly, scratchReg, slowCases);
96         emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
97         addSlowCase(slowCases);
98         emitPutVirtualRegister(currentInstruction[1].u.operand);
99     }
100 }
101
102 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
103 {
104     linkAllSlowCases(iter);
105
106     int dst = currentInstruction[1].u.operand;
107     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
108     callOperation(operationNewObject, structure);
109     emitStoreCell(dst, returnValueGPR);
110 }
111
112 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
113 {
114     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
115     int dst = bytecode.dst();
116     int constructor = bytecode.constructor();
117     int hasInstanceValue = bytecode.hasInstanceValue();
118
119     emitGetVirtualRegister(hasInstanceValue, regT0);
120
121     // We don't jump if we know what Symbol.hasInstance would do.
122     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
123
124     emitGetVirtualRegister(constructor, regT0);
125
126     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
127     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
128     emitTagBool(regT0);
129     Jump done = jump();
130
131     customhasInstanceValue.link(this);
132     move(TrustedImm32(ValueTrue), regT0);
133
134     done.link(this);
135     emitPutVirtualRegister(dst);
136 }
137
138 void JIT::emit_op_instanceof(Instruction* currentInstruction)
139 {
140     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
141     int dst = bytecode.dst();
142     int value = bytecode.value();
143     int proto = bytecode.prototype();
144
145     // Load the operands (baseVal, proto, and value respectively) into registers.
146     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
147     emitGetVirtualRegister(value, regT2);
148     emitGetVirtualRegister(proto, regT1);
149
150     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
151     emitJumpSlowCaseIfNotJSCell(regT2, value);
152     emitJumpSlowCaseIfNotJSCell(regT1, proto);
153
154     // Check that prototype is an object
155     addSlowCase(emitJumpIfCellNotObject(regT1));
156     
157     // Optimistically load the result true, and start looping.
158     // Initially, regT1 still contains proto and regT2 still contains value.
159     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
160     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
161     Label loop(this);
162
163     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
164
165     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
166     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
167     emitLoadStructure(*vm(), regT2, regT4, regT3);
168     load64(Address(regT4, Structure::prototypeOffset()), regT4);
169     auto hasMonoProto = branchTest64(NonZero, regT4);
170     load64(Address(regT2, offsetRelativeToBase(knownPolyProtoOffset)), regT4);
171     hasMonoProto.link(this);
172     move(regT4, regT2);
173     Jump isInstance = branchPtr(Equal, regT2, regT1);
174     emitJumpIfJSCell(regT2).linkTo(loop, this);
175
176     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
177     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
178
179     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
180     isInstance.link(this);
181     emitPutVirtualRegister(dst);
182 }
183
184 void JIT::emit_op_instanceof_custom(Instruction*)
185 {
186     // This always goes to slow path since we expect it to be rare.
187     addSlowCase(jump());
188 }
189     
190 void JIT::emit_op_is_empty(Instruction* currentInstruction)
191 {
192     int dst = currentInstruction[1].u.operand;
193     int value = currentInstruction[2].u.operand;
194
195     emitGetVirtualRegister(value, regT0);
196     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
197
198     emitTagBool(regT0);
199     emitPutVirtualRegister(dst);
200 }
201
202 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
203 {
204     int dst = currentInstruction[1].u.operand;
205     int value = currentInstruction[2].u.operand;
206     
207     emitGetVirtualRegister(value, regT0);
208     Jump isCell = emitJumpIfJSCell(regT0);
209
210     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
211     Jump done = jump();
212     
213     isCell.link(this);
214     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
215     move(TrustedImm32(0), regT0);
216     Jump notMasqueradesAsUndefined = jump();
217
218     isMasqueradesAsUndefined.link(this);
219     emitLoadStructure(*vm(), regT0, regT1, regT2);
220     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
221     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
222     comparePtr(Equal, regT0, regT1, regT0);
223
224     notMasqueradesAsUndefined.link(this);
225     done.link(this);
226     emitTagBool(regT0);
227     emitPutVirtualRegister(dst);
228 }
229
230 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
231 {
232     int dst = currentInstruction[1].u.operand;
233     int value = currentInstruction[2].u.operand;
234     
235     emitGetVirtualRegister(value, regT0);
236     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
237     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
238     emitTagBool(regT0);
239     emitPutVirtualRegister(dst);
240 }
241
242 void JIT::emit_op_is_number(Instruction* currentInstruction)
243 {
244     int dst = currentInstruction[1].u.operand;
245     int value = currentInstruction[2].u.operand;
246     
247     emitGetVirtualRegister(value, regT0);
248     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
249     emitTagBool(regT0);
250     emitPutVirtualRegister(dst);
251 }
252
253 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
254 {
255     int dst = currentInstruction[1].u.operand;
256     int value = currentInstruction[2].u.operand;
257     int type = currentInstruction[3].u.operand;
258
259     emitGetVirtualRegister(value, regT0);
260     Jump isNotCell = emitJumpIfNotJSCell(regT0);
261
262     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
263     emitTagBool(regT0);
264     Jump done = jump();
265
266     isNotCell.link(this);
267     move(TrustedImm32(ValueFalse), regT0);
268
269     done.link(this);
270     emitPutVirtualRegister(dst);
271 }
272
273 void JIT::emit_op_is_object(Instruction* currentInstruction)
274 {
275     int dst = currentInstruction[1].u.operand;
276     int value = currentInstruction[2].u.operand;
277
278     emitGetVirtualRegister(value, regT0);
279     Jump isNotCell = emitJumpIfNotJSCell(regT0);
280
281     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
282     emitTagBool(regT0);
283     Jump done = jump();
284
285     isNotCell.link(this);
286     move(TrustedImm32(ValueFalse), regT0);
287
288     done.link(this);
289     emitPutVirtualRegister(dst);
290 }
291
292 void JIT::emit_op_ret(Instruction* currentInstruction)
293 {
294     ASSERT(callFrameRegister != regT1);
295     ASSERT(regT1 != returnValueGPR);
296     ASSERT(returnValueGPR != callFrameRegister);
297
298     // Return the result in %eax.
299     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
300
301     checkStackPointerAlignment();
302     emitRestoreCalleeSaves();
303     emitFunctionEpilogue();
304     ret();
305 }
306
307 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
308 {
309     int dst = currentInstruction[1].u.operand;
310     int src = currentInstruction[2].u.operand;
311
312     emitGetVirtualRegister(src, regT0);
313     
314     Jump isImm = emitJumpIfNotJSCell(regT0);
315     addSlowCase(emitJumpIfCellObject(regT0));
316     isImm.link(this);
317
318     if (dst != src)
319         emitPutVirtualRegister(dst);
320
321 }
322
323 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
324 {
325     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
326     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
327     callOperation(operationSetFunctionName, regT0, regT1);
328 }
329
330 void JIT::emit_op_not(Instruction* currentInstruction)
331 {
332     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
333
334     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
335     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
336     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
337     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
338     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
339     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
340
341     emitPutVirtualRegister(currentInstruction[1].u.operand);
342 }
343
344 void JIT::emit_op_jfalse(Instruction* currentInstruction)
345 {
346     unsigned target = currentInstruction[2].u.operand;
347
348     GPRReg value = regT0;
349     GPRReg result = regT1;
350     GPRReg scratch = regT2;
351     bool shouldCheckMasqueradesAsUndefined = true;
352
353     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
354     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
355
356     addJump(branchTest32(Zero, result), target);
357 }
358
359 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
360 {
361     int src = currentInstruction[1].u.operand;
362     unsigned target = currentInstruction[2].u.operand;
363
364     emitGetVirtualRegister(src, regT0);
365     Jump isImmediate = emitJumpIfNotJSCell(regT0);
366
367     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
368     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
369     emitLoadStructure(*vm(), regT0, regT2, regT1);
370     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
371     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
372     Jump masqueradesGlobalObjectIsForeign = jump();
373
374     // Now handle the immediate cases - undefined & null
375     isImmediate.link(this);
376     and64(TrustedImm32(~TagBitUndefined), regT0);
377     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
378
379     isNotMasqueradesAsUndefined.link(this);
380     masqueradesGlobalObjectIsForeign.link(this);
381 };
382 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
383 {
384     int src = currentInstruction[1].u.operand;
385     unsigned target = currentInstruction[2].u.operand;
386
387     emitGetVirtualRegister(src, regT0);
388     Jump isImmediate = emitJumpIfNotJSCell(regT0);
389
390     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
391     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
392     emitLoadStructure(*vm(), regT0, regT2, regT1);
393     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
394     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
395     Jump wasNotImmediate = jump();
396
397     // Now handle the immediate cases - undefined & null
398     isImmediate.link(this);
399     and64(TrustedImm32(~TagBitUndefined), regT0);
400     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
401
402     wasNotImmediate.link(this);
403 }
404
405 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
406 {
407     int src = currentInstruction[1].u.operand;
408     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
409     unsigned target = currentInstruction[3].u.operand;
410     
411     emitGetVirtualRegister(src, regT0);
412     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
413     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
414     addJump(jump(), target);
415     equal.link(this);
416 }
417
418 void JIT::emit_op_eq(Instruction* currentInstruction)
419 {
420     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
421     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
422     compare32(Equal, regT1, regT0, regT0);
423     emitTagBool(regT0);
424     emitPutVirtualRegister(currentInstruction[1].u.operand);
425 }
426
427 void JIT::emit_op_jeq(Instruction* currentInstruction)
428 {
429     unsigned target = currentInstruction[3].u.operand;
430     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
431     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
432     addJump(branch32(Equal, regT0, regT1), target);
433 }
434
435 void JIT::emit_op_jtrue(Instruction* currentInstruction)
436 {
437     unsigned target = currentInstruction[2].u.operand;
438
439     GPRReg value = regT0;
440     GPRReg result = regT1;
441     GPRReg scratch = regT2;
442     bool shouldCheckMasqueradesAsUndefined = true;
443     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
444     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
445     addJump(branchTest32(NonZero, result), target);
446 }
447
448 void JIT::emit_op_neq(Instruction* currentInstruction)
449 {
450     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
451     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
452     compare32(NotEqual, regT1, regT0, regT0);
453     emitTagBool(regT0);
454
455     emitPutVirtualRegister(currentInstruction[1].u.operand);
456 }
457
458 void JIT::emit_op_jneq(Instruction* currentInstruction)
459 {
460     unsigned target = currentInstruction[3].u.operand;
461     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
462     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
463     addJump(branch32(NotEqual, regT0, regT1), target);
464 }
465
466 void JIT::emit_op_throw(Instruction* currentInstruction)
467 {
468     ASSERT(regT0 == returnValueGPR);
469     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
470     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
471     callOperationNoExceptionCheck(operationThrow, regT0);
472     jumpToExceptionHandler(*vm());
473 }
474
475 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
476 {
477     int dst = currentInstruction[1].u.operand;
478     int src1 = currentInstruction[2].u.operand;
479     int src2 = currentInstruction[3].u.operand;
480
481     emitGetVirtualRegisters(src1, regT0, src2, regT1);
482     
483     // Jump slow if both are cells (to cover strings).
484     move(regT0, regT2);
485     or64(regT1, regT2);
486     addSlowCase(emitJumpIfJSCell(regT2));
487     
488     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
489     // if it's a double.
490     Jump leftOK = emitJumpIfInt(regT0);
491     addSlowCase(emitJumpIfNumber(regT0));
492     leftOK.link(this);
493     Jump rightOK = emitJumpIfInt(regT1);
494     addSlowCase(emitJumpIfNumber(regT1));
495     rightOK.link(this);
496
497     if (type == CompileOpStrictEqType::StrictEq)
498         compare64(Equal, regT1, regT0, regT0);
499     else
500         compare64(NotEqual, regT1, regT0, regT0);
501     emitTagBool(regT0);
502
503     emitPutVirtualRegister(dst);
504 }
505
506 void JIT::emit_op_stricteq(Instruction* currentInstruction)
507 {
508     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::StrictEq);
509 }
510
511 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
512 {
513     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::NStrictEq);
514 }
515
516 void JIT::compileOpStrictEqJump(Instruction* currentInstruction, CompileOpStrictEqType type)
517 {
518     int target = currentInstruction[3].u.operand;
519     int src1 = currentInstruction[1].u.operand;
520     int src2 = currentInstruction[2].u.operand;
521
522     emitGetVirtualRegisters(src1, regT0, src2, regT1);
523
524     // Jump slow if both are cells (to cover strings).
525     move(regT0, regT2);
526     or64(regT1, regT2);
527     addSlowCase(emitJumpIfJSCell(regT2));
528
529     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
530     // if it's a double.
531     Jump leftOK = emitJumpIfInt(regT0);
532     addSlowCase(emitJumpIfNumber(regT0));
533     leftOK.link(this);
534     Jump rightOK = emitJumpIfInt(regT1);
535     addSlowCase(emitJumpIfNumber(regT1));
536     rightOK.link(this);
537
538     if (type == CompileOpStrictEqType::StrictEq)
539         addJump(branch64(Equal, regT1, regT0), target);
540     else
541         addJump(branch64(NotEqual, regT1, regT0), target);
542 }
543
544 void JIT::emit_op_jstricteq(Instruction* currentInstruction)
545 {
546     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::StrictEq);
547 }
548
549 void JIT::emit_op_jnstricteq(Instruction* currentInstruction)
550 {
551     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::NStrictEq);
552 }
553
554 void JIT::emitSlow_op_jstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
555 {
556     linkAllSlowCases(iter);
557
558     unsigned target = currentInstruction[3].u.operand;
559     callOperation(operationCompareStrictEq, regT0, regT1);
560     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
561 }
562
563 void JIT::emitSlow_op_jnstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
564 {
565     linkAllSlowCases(iter);
566
567     unsigned target = currentInstruction[3].u.operand;
568     callOperation(operationCompareStrictEq, regT0, regT1);
569     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
570 }
571
572 void JIT::emit_op_to_number(Instruction* currentInstruction)
573 {
574     int dstVReg = currentInstruction[1].u.operand;
575     int srcVReg = currentInstruction[2].u.operand;
576     emitGetVirtualRegister(srcVReg, regT0);
577     
578     addSlowCase(emitJumpIfNotNumber(regT0));
579
580     emitValueProfilingSite();
581     if (srcVReg != dstVReg)
582         emitPutVirtualRegister(dstVReg);
583 }
584
585 void JIT::emit_op_to_string(Instruction* currentInstruction)
586 {
587     int srcVReg = currentInstruction[2].u.operand;
588     emitGetVirtualRegister(srcVReg, regT0);
589
590     addSlowCase(emitJumpIfNotJSCell(regT0));
591     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
592
593     emitPutVirtualRegister(currentInstruction[1].u.operand);
594 }
595
596 void JIT::emit_op_to_object(Instruction* currentInstruction)
597 {
598     int dstVReg = currentInstruction[1].u.operand;
599     int srcVReg = currentInstruction[2].u.operand;
600     emitGetVirtualRegister(srcVReg, regT0);
601
602     addSlowCase(emitJumpIfNotJSCell(regT0));
603     addSlowCase(branch8(Below, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
604
605     emitValueProfilingSite();
606     if (srcVReg != dstVReg)
607         emitPutVirtualRegister(dstVReg);
608 }
609
610 void JIT::emit_op_catch(Instruction* currentInstruction)
611 {
612     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
613
614     move(TrustedImmPtr(m_vm), regT3);
615     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
616     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
617
618     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
619
620     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
621     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
622     jumpToExceptionHandler(*vm());
623     isCatchableException.link(this);
624
625     move(TrustedImmPtr(m_vm), regT3);
626     load64(Address(regT3, VM::exceptionOffset()), regT0);
627     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
628     emitPutVirtualRegister(currentInstruction[1].u.operand);
629
630     load64(Address(regT0, Exception::valueOffset()), regT0);
631     emitPutVirtualRegister(currentInstruction[2].u.operand);
632
633 #if ENABLE(DFG_JIT)
634     // FIXME: consider inline caching the process of doing OSR entry, including
635     // argument type proofs, storing locals to the buffer, etc
636     // https://bugs.webkit.org/show_bug.cgi?id=175598
637
638     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
639     if (buffer || !shouldEmitProfiling())
640         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
641     else
642         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
643     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
644     emitRestoreCalleeSaves();
645     jump(returnValueGPR, ExceptionHandlerPtrTag);
646     skipOSREntry.link(this);
647     if (buffer && shouldEmitProfiling()) {
648         buffer->forEach([&] (ValueProfileAndOperand& profile) {
649             JSValueRegs regs(regT0);
650             emitGetVirtualRegister(profile.m_operand, regs);
651             emitValueProfilingSite(profile.m_profile);
652         });
653     }
654 #endif // ENABLE(DFG_JIT)
655 }
656
657 void JIT::emit_op_identity_with_profile(Instruction*)
658 {
659     // We don't need to do anything here...
660 }
661
662 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
663 {
664     int currentScope = currentInstruction[2].u.operand;
665     emitGetVirtualRegister(currentScope, regT0);
666     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
667     emitStoreCell(currentInstruction[1].u.operand, regT0);
668 }
669
670 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
671 {
672     size_t tableIndex = currentInstruction[1].u.operand;
673     unsigned defaultOffset = currentInstruction[2].u.operand;
674     unsigned scrutinee = currentInstruction[3].u.operand;
675
676     // create jump table for switch destinations, track this switch statement.
677     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
678     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
679     jumpTable->ensureCTITable();
680
681     emitGetVirtualRegister(scrutinee, regT0);
682     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
683     jump(returnValueGPR, JSSwitchPtrTag);
684 }
685
686 void JIT::emit_op_switch_char(Instruction* currentInstruction)
687 {
688     size_t tableIndex = currentInstruction[1].u.operand;
689     unsigned defaultOffset = currentInstruction[2].u.operand;
690     unsigned scrutinee = currentInstruction[3].u.operand;
691
692     // create jump table for switch destinations, track this switch statement.
693     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
694     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
695     jumpTable->ensureCTITable();
696
697     emitGetVirtualRegister(scrutinee, regT0);
698     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
699     jump(returnValueGPR, JSSwitchPtrTag);
700 }
701
702 void JIT::emit_op_switch_string(Instruction* currentInstruction)
703 {
704     size_t tableIndex = currentInstruction[1].u.operand;
705     unsigned defaultOffset = currentInstruction[2].u.operand;
706     unsigned scrutinee = currentInstruction[3].u.operand;
707
708     // create jump table for switch destinations, track this switch statement.
709     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
710     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
711
712     emitGetVirtualRegister(scrutinee, regT0);
713     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
714     jump(returnValueGPR, JSSwitchPtrTag);
715 }
716
717 void JIT::emit_op_debug(Instruction* currentInstruction)
718 {
719     load32(codeBlock()->debuggerRequestsAddress(), regT0);
720     Jump noDebuggerRequests = branchTest32(Zero, regT0);
721     callOperation(operationDebug, currentInstruction[1].u.operand);
722     noDebuggerRequests.link(this);
723 }
724
725 void JIT::emit_op_eq_null(Instruction* currentInstruction)
726 {
727     int dst = currentInstruction[1].u.operand;
728     int src1 = currentInstruction[2].u.operand;
729
730     emitGetVirtualRegister(src1, regT0);
731     Jump isImmediate = emitJumpIfNotJSCell(regT0);
732
733     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
734     move(TrustedImm32(0), regT0);
735     Jump wasNotMasqueradesAsUndefined = jump();
736
737     isMasqueradesAsUndefined.link(this);
738     emitLoadStructure(*vm(), regT0, regT2, regT1);
739     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
740     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
741     comparePtr(Equal, regT0, regT2, regT0);
742     Jump wasNotImmediate = jump();
743
744     isImmediate.link(this);
745
746     and64(TrustedImm32(~TagBitUndefined), regT0);
747     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
748
749     wasNotImmediate.link(this);
750     wasNotMasqueradesAsUndefined.link(this);
751
752     emitTagBool(regT0);
753     emitPutVirtualRegister(dst);
754
755 }
756
757 void JIT::emit_op_neq_null(Instruction* currentInstruction)
758 {
759     int dst = currentInstruction[1].u.operand;
760     int src1 = currentInstruction[2].u.operand;
761
762     emitGetVirtualRegister(src1, regT0);
763     Jump isImmediate = emitJumpIfNotJSCell(regT0);
764
765     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
766     move(TrustedImm32(1), regT0);
767     Jump wasNotMasqueradesAsUndefined = jump();
768
769     isMasqueradesAsUndefined.link(this);
770     emitLoadStructure(*vm(), regT0, regT2, regT1);
771     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
772     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
773     comparePtr(NotEqual, regT0, regT2, regT0);
774     Jump wasNotImmediate = jump();
775
776     isImmediate.link(this);
777
778     and64(TrustedImm32(~TagBitUndefined), regT0);
779     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
780
781     wasNotImmediate.link(this);
782     wasNotMasqueradesAsUndefined.link(this);
783
784     emitTagBool(regT0);
785     emitPutVirtualRegister(dst);
786 }
787
788 void JIT::emit_op_enter(Instruction*)
789 {
790     // Even though CTI doesn't use them, we initialize our constant
791     // registers to zap stale pointers, to avoid unnecessarily prolonging
792     // object lifetime and increasing GC pressure.
793     size_t count = m_codeBlock->m_numVars;
794     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
795         emitInitRegister(virtualRegisterForLocal(j).offset());
796
797     emitWriteBarrier(m_codeBlock);
798
799     emitEnterOptimizationCheck();
800 }
801
802 void JIT::emit_op_get_scope(Instruction* currentInstruction)
803 {
804     int dst = currentInstruction[1].u.operand;
805     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
806     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
807     emitStoreCell(dst, regT0);
808 }
809
810 void JIT::emit_op_to_this(Instruction* currentInstruction)
811 {
812     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
813     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
814
815     emitJumpSlowCaseIfNotJSCell(regT1);
816
817     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
818     loadPtr(cachedStructure, regT2);
819     addSlowCase(branchTestPtr(Zero, regT2));
820     load32(Address(regT2, Structure::structureIDOffset()), regT2);
821     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
822 }
823
824 void JIT::emit_op_create_this(Instruction* currentInstruction)
825 {
826     int callee = currentInstruction[2].u.operand;
827     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
828     RegisterID calleeReg = regT0;
829     RegisterID rareDataReg = regT4;
830     RegisterID resultReg = regT0;
831     RegisterID allocatorReg = regT1;
832     RegisterID structureReg = regT2;
833     RegisterID cachedFunctionReg = regT4;
834     RegisterID scratchReg = regT3;
835
836     emitGetVirtualRegister(callee, calleeReg);
837     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
838     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
839     addSlowCase(branchTestPtr(Zero, rareDataReg));
840     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), rareDataReg);
841     load32(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
842     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
843     addSlowCase(branch32(Equal, allocatorReg, TrustedImm32(Allocator().offset())));
844
845     loadPtr(cachedFunction, cachedFunctionReg);
846     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
847     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
848     hasSeenMultipleCallees.link(this);
849
850     JumpList slowCases;
851     auto butterfly = TrustedImmPtr(nullptr);
852     emitAllocateJSObject(resultReg, JITAllocator::variable(), allocatorReg, structureReg, butterfly, scratchReg, slowCases);
853     emitGetVirtualRegister(callee, scratchReg);
854     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
855     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), scratchReg);
856     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
857     emitInitializeInlineStorage(resultReg, scratchReg);
858     addSlowCase(slowCases);
859     emitPutVirtualRegister(currentInstruction[1].u.operand);
860 }
861
862 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
863 {
864     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
865     addSlowCase(branchTest64(Zero, regT0));
866 }
867
868
869 // Slow cases
870
871 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
872 {
873     linkAllSlowCases(iter);
874
875     callOperation(operationCompareEq, regT0, regT1);
876     emitTagBool(returnValueGPR);
877     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
878 }
879
880 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
881 {
882     linkAllSlowCases(iter);
883
884     callOperation(operationCompareEq, regT0, regT1);
885     xor32(TrustedImm32(0x1), regT0);
886     emitTagBool(returnValueGPR);
887     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
888 }
889
890 void JIT::emitSlow_op_jeq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
891 {
892     linkAllSlowCases(iter);
893
894     unsigned target = currentInstruction[3].u.operand;
895     callOperation(operationCompareEq, regT0, regT1);
896     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
897 }
898
899 void JIT::emitSlow_op_jneq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
900 {
901     linkAllSlowCases(iter);
902
903     unsigned target = currentInstruction[3].u.operand;
904     callOperation(operationCompareEq, regT0, regT1);
905     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
906 }
907
908 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
909 {
910     linkAllSlowCases(iter);
911
912     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
913     int dst = bytecode.dst();
914     int value = bytecode.value();
915     int proto = bytecode.prototype();
916
917     emitGetVirtualRegister(value, regT0);
918     emitGetVirtualRegister(proto, regT1);
919     callOperation(operationInstanceOf, dst, regT0, regT1);
920 }
921
922 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
923 {
924     linkAllSlowCases(iter);
925
926     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
927     int dst = bytecode.dst();
928     int value = bytecode.value();
929     int constructor = bytecode.constructor();
930     int hasInstanceValue = bytecode.hasInstanceValue();
931
932     emitGetVirtualRegister(value, regT0);
933     emitGetVirtualRegister(constructor, regT1);
934     emitGetVirtualRegister(hasInstanceValue, regT2);
935     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
936     emitTagBool(returnValueGPR);
937     emitPutVirtualRegister(dst, returnValueGPR);
938 }
939
940 #endif // USE(JSVALUE64)
941
942 void JIT::emit_op_loop_hint(Instruction*)
943 {
944     // Emit the JIT optimization check: 
945     if (canBeOptimized()) {
946         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
947             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
948     }
949 }
950
951 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
952 {
953 #if ENABLE(DFG_JIT)
954     // Emit the slow path for the JIT optimization check:
955     if (canBeOptimized()) {
956         linkAllSlowCases(iter);
957
958         copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
959
960         callOperation(operationOptimize, m_bytecodeOffset);
961         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
962         if (!ASSERT_DISABLED) {
963             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
964             abortWithReason(JITUnreasonableLoopHintJumpTarget);
965             ok.link(this);
966         }
967         jump(returnValueGPR, GPRInfo::callFrameRegister);
968         noOptimizedEntry.link(this);
969
970         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
971     }
972 #else
973     UNUSED_PARAM(iter);
974 #endif
975 }
976
977 void JIT::emit_op_check_traps(Instruction*)
978 {
979     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
980 }
981
982 void JIT::emit_op_nop(Instruction*)
983 {
984 }
985
986 void JIT::emit_op_super_sampler_begin(Instruction*)
987 {
988     add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
989 }
990
991 void JIT::emit_op_super_sampler_end(Instruction*)
992 {
993     sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
994 }
995
996 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
997 {
998     linkAllSlowCases(iter);
999
1000     callOperation(operationHandleTraps);
1001 }
1002
1003 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1004 {
1005     int dst = currentInstruction[1].u.operand;
1006     callOperation(operationNewRegexp, m_codeBlock->regexp(currentInstruction[2].u.operand));
1007     emitStoreCell(dst, returnValueGPR);
1008 }
1009
1010 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
1011 {
1012     Jump lazyJump;
1013     int dst = currentInstruction[1].u.operand;
1014
1015 #if USE(JSVALUE64)
1016     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1017 #else
1018     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1019 #endif
1020     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
1021
1022     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1023     if (opcodeID == op_new_func)
1024         callOperation(operationNewFunction, dst, regT0, funcExec);
1025     else if (opcodeID == op_new_generator_func)
1026         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
1027     else if (opcodeID == op_new_async_func)
1028         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
1029     else {
1030         ASSERT(opcodeID == op_new_async_generator_func);
1031         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
1032     }
1033 }
1034
1035 void JIT::emit_op_new_func(Instruction* currentInstruction)
1036 {
1037     emitNewFuncCommon(currentInstruction);
1038 }
1039
1040 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
1041 {
1042     emitNewFuncCommon(currentInstruction);
1043 }
1044
1045 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
1046 {
1047     emitNewFuncCommon(currentInstruction);
1048 }
1049
1050 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
1051 {
1052     emitNewFuncCommon(currentInstruction);
1053 }
1054     
1055 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1056 {
1057     Jump notUndefinedScope;
1058     int dst = currentInstruction[1].u.operand;
1059 #if USE(JSVALUE64)
1060     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1061     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
1062     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
1063 #else
1064     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1065     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1066     emitStore(dst, jsUndefined());
1067 #endif
1068     Jump done = jump();
1069     notUndefinedScope.link(this);
1070         
1071     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1072     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1073
1074     if (opcodeID == op_new_func_exp)
1075         callOperation(operationNewFunction, dst, regT0, function);
1076     else if (opcodeID == op_new_generator_func_exp)
1077         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1078     else if (opcodeID == op_new_async_func_exp)
1079         callOperation(operationNewAsyncFunction, dst, regT0, function);
1080     else {
1081         ASSERT(opcodeID == op_new_async_generator_func_exp);
1082         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
1083     }
1084
1085     done.link(this);
1086 }
1087
1088 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1089 {
1090     emitNewFuncExprCommon(currentInstruction);
1091 }
1092
1093 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1094 {
1095     emitNewFuncExprCommon(currentInstruction);
1096 }
1097
1098 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1099 {
1100     emitNewFuncExprCommon(currentInstruction);
1101 }
1102     
1103 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1104 {
1105     emitNewFuncExprCommon(currentInstruction);
1106 }
1107     
1108 void JIT::emit_op_new_array(Instruction* currentInstruction)
1109 {
1110     int dst = currentInstruction[1].u.operand;
1111     int valuesIndex = currentInstruction[2].u.operand;
1112     int size = currentInstruction[3].u.operand;
1113     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1114     callOperation(operationNewArrayWithProfile, dst,
1115         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1116 }
1117
1118 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1119 {
1120     int dst = currentInstruction[1].u.operand;
1121     int sizeIndex = currentInstruction[2].u.operand;
1122 #if USE(JSVALUE64)
1123     emitGetVirtualRegister(sizeIndex, regT0);
1124     callOperation(operationNewArrayWithSizeAndProfile, dst,
1125         currentInstruction[3].u.arrayAllocationProfile, regT0);
1126 #else
1127     emitLoad(sizeIndex, regT1, regT0);
1128     callOperation(operationNewArrayWithSizeAndProfile, dst,
1129         currentInstruction[3].u.arrayAllocationProfile, JSValueRegs(regT1, regT0));
1130 #endif
1131 }
1132
1133 #if USE(JSVALUE64)
1134 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1135 {
1136     int dst = currentInstruction[1].u.operand;
1137     int base = currentInstruction[2].u.operand;
1138     int enumerator = currentInstruction[4].u.operand;
1139
1140     emitGetVirtualRegister(base, regT0);
1141     emitGetVirtualRegister(enumerator, regT1);
1142     emitJumpSlowCaseIfNotJSCell(regT0, base);
1143
1144     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1145     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1146     
1147     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1148     emitPutVirtualRegister(dst);
1149 }
1150
1151 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1152 {
1153     Instruction* currentInstruction = &m_codeBlock->instructions()[byValInfo->bytecodeIndex];
1154     
1155     PatchableJump badType;
1156     
1157     // FIXME: Add support for other types like TypedArrays and Arguments.
1158     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1159     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1160     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1161     Jump done = jump();
1162
1163     LinkBuffer patchBuffer(*this, m_codeBlock);
1164     
1165     patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1166     patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1167     
1168     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1169     
1170     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1171         m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
1172         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1173     
1174     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
1175     MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationHasIndexedPropertyGeneric));
1176 }
1177
1178 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1179 {
1180     int dst = currentInstruction[1].u.operand;
1181     int base = currentInstruction[2].u.operand;
1182     int property = currentInstruction[3].u.operand;
1183     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1184     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1185     
1186     emitGetVirtualRegisters(base, regT0, property, regT1);
1187
1188     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1189     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1190     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1191     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1192     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1193     // extending since it makes it easier to re-tag the value in the slow case.
1194     zeroExtend32ToPtr(regT1, regT1);
1195
1196     emitJumpSlowCaseIfNotJSCell(regT0, base);
1197     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1198     and32(TrustedImm32(IndexingShapeMask), regT2);
1199
1200     JITArrayMode mode = chooseArrayMode(profile);
1201     PatchableJump badType;
1202
1203     // FIXME: Add support for other types like TypedArrays and Arguments.
1204     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1205     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1206     
1207     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1208
1209     addSlowCase(badType);
1210     addSlowCase(slowCases);
1211     
1212     Label done = label();
1213     
1214     emitPutVirtualRegister(dst);
1215
1216     Label nextHotPath = label();
1217     
1218     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1219 }
1220
1221 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1222 {
1223     linkAllSlowCases(iter);
1224
1225     int dst = currentInstruction[1].u.operand;
1226     int base = currentInstruction[2].u.operand;
1227     int property = currentInstruction[3].u.operand;
1228     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1229
1230     Label slowPath = label();
1231     
1232     emitGetVirtualRegister(base, regT0);
1233     emitGetVirtualRegister(property, regT1);
1234     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1235
1236     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1237     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1238     m_byValInstructionIndex++;
1239 }
1240
1241 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1242 {
1243     int dst = currentInstruction[1].u.operand;
1244     int base = currentInstruction[2].u.operand;
1245     int index = currentInstruction[4].u.operand;
1246     int enumerator = currentInstruction[5].u.operand;
1247
1248     // Check that base is a cell
1249     emitGetVirtualRegister(base, regT0);
1250     emitJumpSlowCaseIfNotJSCell(regT0, base);
1251
1252     // Check the structure
1253     emitGetVirtualRegister(enumerator, regT2);
1254     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1255     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1256
1257     // Compute the offset
1258     emitGetVirtualRegister(index, regT1);
1259     // If index is less than the enumerator's cached inline storage, then it's an inline access
1260     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1261     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1262     signExtend32ToPtr(regT1, regT1);
1263     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1264     
1265     Jump done = jump();
1266
1267     // Otherwise it's out of line
1268     outOfLineAccess.link(this);
1269     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1270     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1271     neg32(regT1);
1272     signExtend32ToPtr(regT1, regT1);
1273     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1274     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1275     
1276     done.link(this);
1277     emitValueProfilingSite();
1278     emitPutVirtualRegister(dst, regT0);
1279 }
1280
1281 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1282 {
1283     int dst = currentInstruction[1].u.operand;
1284     int enumerator = currentInstruction[2].u.operand;
1285     int index = currentInstruction[3].u.operand;
1286
1287     emitGetVirtualRegister(index, regT0);
1288     emitGetVirtualRegister(enumerator, regT1);
1289     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1290
1291     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1292
1293     Jump done = jump();
1294     inBounds.link(this);
1295
1296     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1297     signExtend32ToPtr(regT0, regT0);
1298     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1299
1300     done.link(this);
1301     emitPutVirtualRegister(dst);
1302 }
1303
1304 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1305 {
1306     int dst = currentInstruction[1].u.operand;
1307     int enumerator = currentInstruction[2].u.operand;
1308     int index = currentInstruction[3].u.operand;
1309
1310     emitGetVirtualRegister(index, regT0);
1311     emitGetVirtualRegister(enumerator, regT1);
1312     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1313
1314     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1315
1316     Jump done = jump();
1317     inBounds.link(this);
1318
1319     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1320     signExtend32ToPtr(regT0, regT0);
1321     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1322     
1323     done.link(this);
1324     emitPutVirtualRegister(dst);
1325 }
1326
1327 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1328 {
1329     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1330     int valueToProfile = currentInstruction[1].u.operand;
1331
1332     emitGetVirtualRegister(valueToProfile, regT0);
1333
1334     JumpList jumpToEnd;
1335
1336     jumpToEnd.append(branchTest64(Zero, regT0));
1337
1338     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1339     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1340     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1341         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1342     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1343         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1344     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1345         move(regT0, regT1);
1346         and64(TrustedImm32(~1), regT1);
1347         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1348     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1349         jumpToEnd.append(emitJumpIfInt(regT0));
1350     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1351         jumpToEnd.append(emitJumpIfNumber(regT0));
1352     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1353         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1354         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1355         isNotCell.link(this);
1356     }
1357
1358     // Load the type profiling log into T2.
1359     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1360     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1361     // Load the next log entry into T1.
1362     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1363
1364     // Store the JSValue onto the log entry.
1365     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1366
1367     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1368     Jump notCell = emitJumpIfNotJSCell(regT0);
1369     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1370     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1371     Jump skipIsCell = jump();
1372     notCell.link(this);
1373     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1374     skipIsCell.link(this);
1375
1376     // Store the typeLocation on the log entry.
1377     move(TrustedImmPtr(cachedTypeLocation), regT0);
1378     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1379
1380     // Increment the current log entry.
1381     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1382     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1383     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1384     // Clear the log if we're at the end of the log.
1385     callOperation(operationProcessTypeProfilerLog);
1386     skipClearLog.link(this);
1387
1388     jumpToEnd.link(this);
1389 }
1390
1391 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1392 {
1393     updateTopCallFrame();
1394     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1395     GPRReg shadowPacketReg = regT0;
1396     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1397     GPRReg scratch2Reg = regT2;
1398     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1399     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1400     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1401 }
1402
1403 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1404 {
1405     updateTopCallFrame();
1406     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1407     GPRReg shadowPacketReg = regT0;
1408     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1409     GPRReg scratch2Reg = regT2;
1410     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1411     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1412     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1413     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1414 }
1415
1416 #endif // USE(JSVALUE64)
1417
1418 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1419 {
1420     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1421 #if USE(JSVALUE64)
1422     basicBlockLocation->emitExecuteCode(*this);
1423 #else
1424     basicBlockLocation->emitExecuteCode(*this, regT0);
1425 #endif
1426 }
1427
1428 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1429 {
1430     int dst = currentInstruction[1].u.operand;
1431     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1432     sub32(TrustedImm32(1), regT0);
1433     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1434     boxInt32(regT0, result);
1435     emitPutVirtualRegister(dst, result);
1436 }
1437
1438 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1439 {
1440     int dst = currentInstruction[1].u.operand;
1441     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1442     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1443     sub32(TrustedImm32(1), regT0);
1444     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1445     sub32(Imm32(numParamsToSkip), regT0);
1446 #if USE(JSVALUE64)
1447     boxInt32(regT0, JSValueRegs(regT0));
1448 #endif
1449     Jump done = jump();
1450
1451     zeroLength.link(this);
1452 #if USE(JSVALUE64)
1453     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1454 #else
1455     move(TrustedImm32(0), regT0);
1456 #endif
1457
1458     done.link(this);
1459 #if USE(JSVALUE64)
1460     emitPutVirtualRegister(dst, regT0);
1461 #else
1462     move(TrustedImm32(JSValue::Int32Tag), regT1);
1463     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1464 #endif
1465 }
1466
1467 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1468 {
1469     int dst = currentInstruction[1].u.operand;
1470     int index = currentInstruction[2].u.operand;
1471 #if USE(JSVALUE64)
1472     JSValueRegs resultRegs(regT0);
1473 #else
1474     JSValueRegs resultRegs(regT1, regT0);
1475 #endif
1476
1477     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1478     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1479     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1480     Jump done = jump();
1481
1482     argumentOutOfBounds.link(this);
1483     moveValue(jsUndefined(), resultRegs);
1484
1485     done.link(this);
1486     emitValueProfilingSite();
1487     emitPutVirtualRegister(dst, resultRegs);
1488 }
1489
1490 } // namespace JSC
1491
1492 #endif // ENABLE(JIT)