[Baseline] Store constant directly in emit_op_mov
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
34 #include "Heap.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCast.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "SuperSampler.h"
45 #include "ThunkGenerators.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
49 #include "Watchdog.h"
50
51 namespace JSC {
52
53 #if USE(JSVALUE64)
54
55 void JIT::emit_op_mov(Instruction* currentInstruction)
56 {
57     int dst = currentInstruction[1].u.operand;
58     int src = currentInstruction[2].u.operand;
59
60     if (m_codeBlock->isConstantRegisterIndex(src)) {
61         JSValue value = m_codeBlock->getConstant(src);
62         if (!value.isNumber())
63             store64(TrustedImm64(JSValue::encode(value)), addressFor(dst));
64         else
65             store64(Imm64(JSValue::encode(value)), addressFor(dst));
66         return;
67     }
68
69     load64(addressFor(src), regT0);
70     store64(regT0, addressFor(dst));
71 }
72
73
74 void JIT::emit_op_end(Instruction* currentInstruction)
75 {
76     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
77     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
78     emitRestoreCalleeSaves();
79     emitFunctionEpilogue();
80     ret();
81 }
82
83 void JIT::emit_op_jmp(Instruction* currentInstruction)
84 {
85     unsigned target = currentInstruction[1].u.operand;
86     addJump(jump(), target);
87 }
88
89 void JIT::emit_op_new_object(Instruction* currentInstruction)
90 {
91     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
92     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
93     Allocator allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorForNonVirtual(allocationSize, AllocatorForMode::AllocatorIfExists);
94
95     RegisterID resultReg = regT0;
96     RegisterID allocatorReg = regT1;
97     RegisterID scratchReg = regT2;
98
99     if (!allocator)
100         addSlowCase(jump());
101     else {
102         JumpList slowCases;
103         auto butterfly = TrustedImmPtr(nullptr);
104         emitAllocateJSObject(resultReg, JITAllocator::constant(allocator), allocatorReg, TrustedImmPtr(structure), butterfly, scratchReg, slowCases);
105         emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
106         addSlowCase(slowCases);
107         emitPutVirtualRegister(currentInstruction[1].u.operand);
108     }
109 }
110
111 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
112 {
113     linkAllSlowCases(iter);
114
115     int dst = currentInstruction[1].u.operand;
116     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
117     callOperation(operationNewObject, structure);
118     emitStoreCell(dst, returnValueGPR);
119 }
120
121 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
122 {
123     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
124     int dst = bytecode.dst();
125     int constructor = bytecode.constructor();
126     int hasInstanceValue = bytecode.hasInstanceValue();
127
128     emitGetVirtualRegister(hasInstanceValue, regT0);
129
130     // We don't jump if we know what Symbol.hasInstance would do.
131     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
132
133     emitGetVirtualRegister(constructor, regT0);
134
135     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
136     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
137     boxBoolean(regT0, JSValueRegs { regT0 });
138     Jump done = jump();
139
140     customhasInstanceValue.link(this);
141     move(TrustedImm32(ValueTrue), regT0);
142
143     done.link(this);
144     emitPutVirtualRegister(dst);
145 }
146
147 void JIT::emit_op_instanceof(Instruction* currentInstruction)
148 {
149     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
150     int dst = bytecode.dst();
151     int value = bytecode.value();
152     int proto = bytecode.prototype();
153
154     // Load the operands (baseVal, proto, and value respectively) into registers.
155     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
156     emitGetVirtualRegister(value, regT2);
157     emitGetVirtualRegister(proto, regT1);
158     
159     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
160     emitJumpSlowCaseIfNotJSCell(regT2, value);
161     emitJumpSlowCaseIfNotJSCell(regT1, proto);
162
163     JITInstanceOfGenerator gen(
164         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset),
165         RegisterSet::stubUnavailableRegisters(),
166         regT0, // result
167         regT2, // value
168         regT1, // proto
169         regT3, regT4); // scratch
170     gen.generateFastPath(*this);
171     m_instanceOfs.append(gen);
172     
173     emitPutVirtualRegister(dst);
174 }
175
176 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
177 {
178     linkAllSlowCases(iter);
179     
180     int resultVReg = currentInstruction[1].u.operand;
181     
182     JITInstanceOfGenerator& gen = m_instanceOfs[m_instanceOfIndex++];
183     
184     Label coldPathBegin = label();
185     Call call = callOperation(operationInstanceOfOptimize, resultVReg, gen.stubInfo(), regT2, regT1);
186     gen.reportSlowPathCall(coldPathBegin, call);
187 }
188
189 void JIT::emit_op_instanceof_custom(Instruction*)
190 {
191     // This always goes to slow path since we expect it to be rare.
192     addSlowCase(jump());
193 }
194     
195 void JIT::emit_op_is_empty(Instruction* currentInstruction)
196 {
197     int dst = currentInstruction[1].u.operand;
198     int value = currentInstruction[2].u.operand;
199
200     emitGetVirtualRegister(value, regT0);
201     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
202
203     boxBoolean(regT0, JSValueRegs { regT0 });
204     emitPutVirtualRegister(dst);
205 }
206
207 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
208 {
209     int dst = currentInstruction[1].u.operand;
210     int value = currentInstruction[2].u.operand;
211     
212     emitGetVirtualRegister(value, regT0);
213     Jump isCell = branchIfCell(regT0);
214
215     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
216     Jump done = jump();
217     
218     isCell.link(this);
219     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
220     move(TrustedImm32(0), regT0);
221     Jump notMasqueradesAsUndefined = jump();
222
223     isMasqueradesAsUndefined.link(this);
224     emitLoadStructure(*vm(), regT0, regT1, regT2);
225     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
226     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
227     comparePtr(Equal, regT0, regT1, regT0);
228
229     notMasqueradesAsUndefined.link(this);
230     done.link(this);
231     boxBoolean(regT0, JSValueRegs { regT0 });
232     emitPutVirtualRegister(dst);
233 }
234
235 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
236 {
237     int dst = currentInstruction[1].u.operand;
238     int value = currentInstruction[2].u.operand;
239     
240     emitGetVirtualRegister(value, regT0);
241     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
242     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
243     boxBoolean(regT0, JSValueRegs { regT0 });
244     emitPutVirtualRegister(dst);
245 }
246
247 void JIT::emit_op_is_number(Instruction* currentInstruction)
248 {
249     int dst = currentInstruction[1].u.operand;
250     int value = currentInstruction[2].u.operand;
251     
252     emitGetVirtualRegister(value, regT0);
253     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
254     boxBoolean(regT0, JSValueRegs { regT0 });
255     emitPutVirtualRegister(dst);
256 }
257
258 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
259 {
260     int dst = currentInstruction[1].u.operand;
261     int value = currentInstruction[2].u.operand;
262     int type = currentInstruction[3].u.operand;
263
264     emitGetVirtualRegister(value, regT0);
265     Jump isNotCell = branchIfNotCell(regT0);
266
267     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
268     boxBoolean(regT0, JSValueRegs { regT0 });
269     Jump done = jump();
270
271     isNotCell.link(this);
272     move(TrustedImm32(ValueFalse), regT0);
273
274     done.link(this);
275     emitPutVirtualRegister(dst);
276 }
277
278 void JIT::emit_op_is_object(Instruction* currentInstruction)
279 {
280     int dst = currentInstruction[1].u.operand;
281     int value = currentInstruction[2].u.operand;
282
283     emitGetVirtualRegister(value, regT0);
284     Jump isNotCell = branchIfNotCell(regT0);
285
286     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
287     boxBoolean(regT0, JSValueRegs { regT0 });
288     Jump done = jump();
289
290     isNotCell.link(this);
291     move(TrustedImm32(ValueFalse), regT0);
292
293     done.link(this);
294     emitPutVirtualRegister(dst);
295 }
296
297 void JIT::emit_op_ret(Instruction* currentInstruction)
298 {
299     ASSERT(callFrameRegister != regT1);
300     ASSERT(regT1 != returnValueGPR);
301     ASSERT(returnValueGPR != callFrameRegister);
302
303     // Return the result in %eax.
304     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
305
306     checkStackPointerAlignment();
307     emitRestoreCalleeSaves();
308     emitFunctionEpilogue();
309     ret();
310 }
311
312 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
313 {
314     int dst = currentInstruction[1].u.operand;
315     int src = currentInstruction[2].u.operand;
316
317     emitGetVirtualRegister(src, regT0);
318     
319     Jump isImm = branchIfNotCell(regT0);
320     addSlowCase(branchIfObject(regT0));
321     isImm.link(this);
322
323     if (dst != src)
324         emitPutVirtualRegister(dst);
325
326 }
327
328 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
329 {
330     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
331     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
332     callOperation(operationSetFunctionName, regT0, regT1);
333 }
334
335 void JIT::emit_op_not(Instruction* currentInstruction)
336 {
337     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
338
339     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
340     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
341     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
342     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
343     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
344     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
345
346     emitPutVirtualRegister(currentInstruction[1].u.operand);
347 }
348
349 void JIT::emit_op_jfalse(Instruction* currentInstruction)
350 {
351     unsigned target = currentInstruction[2].u.operand;
352
353     GPRReg value = regT0;
354     GPRReg result = regT1;
355     GPRReg scratch = regT2;
356     bool shouldCheckMasqueradesAsUndefined = true;
357
358     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
359     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
360
361     addJump(branchTest32(Zero, result), target);
362 }
363
364 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
365 {
366     int src = currentInstruction[1].u.operand;
367     unsigned target = currentInstruction[2].u.operand;
368
369     emitGetVirtualRegister(src, regT0);
370     Jump isImmediate = branchIfNotCell(regT0);
371
372     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
373     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
374     emitLoadStructure(*vm(), regT0, regT2, regT1);
375     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
376     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
377     Jump masqueradesGlobalObjectIsForeign = jump();
378
379     // Now handle the immediate cases - undefined & null
380     isImmediate.link(this);
381     and64(TrustedImm32(~TagBitUndefined), regT0);
382     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
383
384     isNotMasqueradesAsUndefined.link(this);
385     masqueradesGlobalObjectIsForeign.link(this);
386 };
387 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
388 {
389     int src = currentInstruction[1].u.operand;
390     unsigned target = currentInstruction[2].u.operand;
391
392     emitGetVirtualRegister(src, regT0);
393     Jump isImmediate = branchIfNotCell(regT0);
394
395     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
396     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
397     emitLoadStructure(*vm(), regT0, regT2, regT1);
398     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
399     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
400     Jump wasNotImmediate = jump();
401
402     // Now handle the immediate cases - undefined & null
403     isImmediate.link(this);
404     and64(TrustedImm32(~TagBitUndefined), regT0);
405     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
406
407     wasNotImmediate.link(this);
408 }
409
410 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
411 {
412     int src = currentInstruction[1].u.operand;
413     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
414     unsigned target = currentInstruction[3].u.operand;
415     
416     emitGetVirtualRegister(src, regT0);
417     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
418     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
419     addJump(jump(), target);
420     equal.link(this);
421 }
422
423 void JIT::emit_op_eq(Instruction* currentInstruction)
424 {
425     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
426     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
427     compare32(Equal, regT1, regT0, regT0);
428     boxBoolean(regT0, JSValueRegs { regT0 });
429     emitPutVirtualRegister(currentInstruction[1].u.operand);
430 }
431
432 void JIT::emit_op_jeq(Instruction* currentInstruction)
433 {
434     unsigned target = currentInstruction[3].u.operand;
435     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
436     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
437     addJump(branch32(Equal, regT0, regT1), target);
438 }
439
440 void JIT::emit_op_jtrue(Instruction* currentInstruction)
441 {
442     unsigned target = currentInstruction[2].u.operand;
443
444     GPRReg value = regT0;
445     GPRReg result = regT1;
446     GPRReg scratch = regT2;
447     bool shouldCheckMasqueradesAsUndefined = true;
448     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
449     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
450     addJump(branchTest32(NonZero, result), target);
451 }
452
453 void JIT::emit_op_neq(Instruction* currentInstruction)
454 {
455     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
456     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
457     compare32(NotEqual, regT1, regT0, regT0);
458     boxBoolean(regT0, JSValueRegs { regT0 });
459
460     emitPutVirtualRegister(currentInstruction[1].u.operand);
461 }
462
463 void JIT::emit_op_jneq(Instruction* currentInstruction)
464 {
465     unsigned target = currentInstruction[3].u.operand;
466     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
467     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
468     addJump(branch32(NotEqual, regT0, regT1), target);
469 }
470
471 void JIT::emit_op_throw(Instruction* currentInstruction)
472 {
473     ASSERT(regT0 == returnValueGPR);
474     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
475     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
476     callOperationNoExceptionCheck(operationThrow, regT0);
477     jumpToExceptionHandler(*vm());
478 }
479
480 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
481 {
482     int dst = currentInstruction[1].u.operand;
483     int src1 = currentInstruction[2].u.operand;
484     int src2 = currentInstruction[3].u.operand;
485
486     emitGetVirtualRegisters(src1, regT0, src2, regT1);
487     
488     // Jump slow if both are cells (to cover strings).
489     move(regT0, regT2);
490     or64(regT1, regT2);
491     addSlowCase(branchIfCell(regT2));
492     
493     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
494     // if it's a double.
495     Jump leftOK = branchIfInt32(regT0);
496     addSlowCase(branchIfNumber(regT0));
497     leftOK.link(this);
498     Jump rightOK = branchIfInt32(regT1);
499     addSlowCase(branchIfNumber(regT1));
500     rightOK.link(this);
501
502     if (type == CompileOpStrictEqType::StrictEq)
503         compare64(Equal, regT1, regT0, regT0);
504     else
505         compare64(NotEqual, regT1, regT0, regT0);
506     boxBoolean(regT0, JSValueRegs { regT0 });
507
508     emitPutVirtualRegister(dst);
509 }
510
511 void JIT::emit_op_stricteq(Instruction* currentInstruction)
512 {
513     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::StrictEq);
514 }
515
516 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
517 {
518     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::NStrictEq);
519 }
520
521 void JIT::compileOpStrictEqJump(Instruction* currentInstruction, CompileOpStrictEqType type)
522 {
523     int target = currentInstruction[3].u.operand;
524     int src1 = currentInstruction[1].u.operand;
525     int src2 = currentInstruction[2].u.operand;
526
527     emitGetVirtualRegisters(src1, regT0, src2, regT1);
528
529     // Jump slow if both are cells (to cover strings).
530     move(regT0, regT2);
531     or64(regT1, regT2);
532     addSlowCase(branchIfCell(regT2));
533
534     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
535     // if it's a double.
536     Jump leftOK = branchIfInt32(regT0);
537     addSlowCase(branchIfNumber(regT0));
538     leftOK.link(this);
539     Jump rightOK = branchIfInt32(regT1);
540     addSlowCase(branchIfNumber(regT1));
541     rightOK.link(this);
542
543     if (type == CompileOpStrictEqType::StrictEq)
544         addJump(branch64(Equal, regT1, regT0), target);
545     else
546         addJump(branch64(NotEqual, regT1, regT0), target);
547 }
548
549 void JIT::emit_op_jstricteq(Instruction* currentInstruction)
550 {
551     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::StrictEq);
552 }
553
554 void JIT::emit_op_jnstricteq(Instruction* currentInstruction)
555 {
556     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::NStrictEq);
557 }
558
559 void JIT::emitSlow_op_jstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
560 {
561     linkAllSlowCases(iter);
562
563     unsigned target = currentInstruction[3].u.operand;
564     callOperation(operationCompareStrictEq, regT0, regT1);
565     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
566 }
567
568 void JIT::emitSlow_op_jnstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
569 {
570     linkAllSlowCases(iter);
571
572     unsigned target = currentInstruction[3].u.operand;
573     callOperation(operationCompareStrictEq, regT0, regT1);
574     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
575 }
576
577 void JIT::emit_op_to_number(Instruction* currentInstruction)
578 {
579     int dstVReg = currentInstruction[1].u.operand;
580     int srcVReg = currentInstruction[2].u.operand;
581     emitGetVirtualRegister(srcVReg, regT0);
582     
583     addSlowCase(branchIfNotNumber(regT0));
584
585     emitValueProfilingSite();
586     if (srcVReg != dstVReg)
587         emitPutVirtualRegister(dstVReg);
588 }
589
590 void JIT::emit_op_to_string(Instruction* currentInstruction)
591 {
592     int srcVReg = currentInstruction[2].u.operand;
593     emitGetVirtualRegister(srcVReg, regT0);
594
595     addSlowCase(branchIfNotCell(regT0));
596     addSlowCase(branchIfNotString(regT0));
597
598     emitPutVirtualRegister(currentInstruction[1].u.operand);
599 }
600
601 void JIT::emit_op_to_object(Instruction* currentInstruction)
602 {
603     int dstVReg = currentInstruction[1].u.operand;
604     int srcVReg = currentInstruction[2].u.operand;
605     emitGetVirtualRegister(srcVReg, regT0);
606
607     addSlowCase(branchIfNotCell(regT0));
608     addSlowCase(branchIfNotObject(regT0));
609
610     emitValueProfilingSite();
611     if (srcVReg != dstVReg)
612         emitPutVirtualRegister(dstVReg);
613 }
614
615 void JIT::emit_op_catch(Instruction* currentInstruction)
616 {
617     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
618
619     move(TrustedImmPtr(m_vm), regT3);
620     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
621     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
622
623     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
624
625     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
626     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
627     jumpToExceptionHandler(*vm());
628     isCatchableException.link(this);
629
630     move(TrustedImmPtr(m_vm), regT3);
631     load64(Address(regT3, VM::exceptionOffset()), regT0);
632     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
633     emitPutVirtualRegister(currentInstruction[1].u.operand);
634
635     load64(Address(regT0, Exception::valueOffset()), regT0);
636     emitPutVirtualRegister(currentInstruction[2].u.operand);
637
638 #if ENABLE(DFG_JIT)
639     // FIXME: consider inline caching the process of doing OSR entry, including
640     // argument type proofs, storing locals to the buffer, etc
641     // https://bugs.webkit.org/show_bug.cgi?id=175598
642
643     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
644     if (buffer || !shouldEmitProfiling())
645         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
646     else
647         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
648     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
649     emitRestoreCalleeSaves();
650     jump(returnValueGPR, ExceptionHandlerPtrTag);
651     skipOSREntry.link(this);
652     if (buffer && shouldEmitProfiling()) {
653         buffer->forEach([&] (ValueProfileAndOperand& profile) {
654             JSValueRegs regs(regT0);
655             emitGetVirtualRegister(profile.m_operand, regs);
656             emitValueProfilingSite(profile.m_profile);
657         });
658     }
659 #endif // ENABLE(DFG_JIT)
660 }
661
662 void JIT::emit_op_identity_with_profile(Instruction*)
663 {
664     // We don't need to do anything here...
665 }
666
667 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
668 {
669     int currentScope = currentInstruction[2].u.operand;
670     emitGetVirtualRegister(currentScope, regT0);
671     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
672     emitStoreCell(currentInstruction[1].u.operand, regT0);
673 }
674
675 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
676 {
677     size_t tableIndex = currentInstruction[1].u.operand;
678     unsigned defaultOffset = currentInstruction[2].u.operand;
679     unsigned scrutinee = currentInstruction[3].u.operand;
680
681     // create jump table for switch destinations, track this switch statement.
682     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
683     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
684     jumpTable->ensureCTITable();
685
686     emitGetVirtualRegister(scrutinee, regT0);
687     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
688     jump(returnValueGPR, JSSwitchPtrTag);
689 }
690
691 void JIT::emit_op_switch_char(Instruction* currentInstruction)
692 {
693     size_t tableIndex = currentInstruction[1].u.operand;
694     unsigned defaultOffset = currentInstruction[2].u.operand;
695     unsigned scrutinee = currentInstruction[3].u.operand;
696
697     // create jump table for switch destinations, track this switch statement.
698     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
699     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
700     jumpTable->ensureCTITable();
701
702     emitGetVirtualRegister(scrutinee, regT0);
703     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
704     jump(returnValueGPR, JSSwitchPtrTag);
705 }
706
707 void JIT::emit_op_switch_string(Instruction* currentInstruction)
708 {
709     size_t tableIndex = currentInstruction[1].u.operand;
710     unsigned defaultOffset = currentInstruction[2].u.operand;
711     unsigned scrutinee = currentInstruction[3].u.operand;
712
713     // create jump table for switch destinations, track this switch statement.
714     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
715     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
716
717     emitGetVirtualRegister(scrutinee, regT0);
718     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
719     jump(returnValueGPR, JSSwitchPtrTag);
720 }
721
722 void JIT::emit_op_debug(Instruction* currentInstruction)
723 {
724     load32(codeBlock()->debuggerRequestsAddress(), regT0);
725     Jump noDebuggerRequests = branchTest32(Zero, regT0);
726     callOperation(operationDebug, currentInstruction[1].u.operand);
727     noDebuggerRequests.link(this);
728 }
729
730 void JIT::emit_op_eq_null(Instruction* currentInstruction)
731 {
732     int dst = currentInstruction[1].u.operand;
733     int src1 = currentInstruction[2].u.operand;
734
735     emitGetVirtualRegister(src1, regT0);
736     Jump isImmediate = branchIfNotCell(regT0);
737
738     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
739     move(TrustedImm32(0), regT0);
740     Jump wasNotMasqueradesAsUndefined = jump();
741
742     isMasqueradesAsUndefined.link(this);
743     emitLoadStructure(*vm(), regT0, regT2, regT1);
744     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
745     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
746     comparePtr(Equal, regT0, regT2, regT0);
747     Jump wasNotImmediate = jump();
748
749     isImmediate.link(this);
750
751     and64(TrustedImm32(~TagBitUndefined), regT0);
752     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
753
754     wasNotImmediate.link(this);
755     wasNotMasqueradesAsUndefined.link(this);
756
757     boxBoolean(regT0, JSValueRegs { regT0 });
758     emitPutVirtualRegister(dst);
759
760 }
761
762 void JIT::emit_op_neq_null(Instruction* currentInstruction)
763 {
764     int dst = currentInstruction[1].u.operand;
765     int src1 = currentInstruction[2].u.operand;
766
767     emitGetVirtualRegister(src1, regT0);
768     Jump isImmediate = branchIfNotCell(regT0);
769
770     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
771     move(TrustedImm32(1), regT0);
772     Jump wasNotMasqueradesAsUndefined = jump();
773
774     isMasqueradesAsUndefined.link(this);
775     emitLoadStructure(*vm(), regT0, regT2, regT1);
776     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
777     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
778     comparePtr(NotEqual, regT0, regT2, regT0);
779     Jump wasNotImmediate = jump();
780
781     isImmediate.link(this);
782
783     and64(TrustedImm32(~TagBitUndefined), regT0);
784     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
785
786     wasNotImmediate.link(this);
787     wasNotMasqueradesAsUndefined.link(this);
788
789     boxBoolean(regT0, JSValueRegs { regT0 });
790     emitPutVirtualRegister(dst);
791 }
792
793 void JIT::emit_op_enter(Instruction*)
794 {
795     // Even though CTI doesn't use them, we initialize our constant
796     // registers to zap stale pointers, to avoid unnecessarily prolonging
797     // object lifetime and increasing GC pressure.
798     size_t count = m_codeBlock->m_numVars;
799     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
800         emitInitRegister(virtualRegisterForLocal(j).offset());
801
802     emitWriteBarrier(m_codeBlock);
803
804     emitEnterOptimizationCheck();
805 }
806
807 void JIT::emit_op_get_scope(Instruction* currentInstruction)
808 {
809     int dst = currentInstruction[1].u.operand;
810     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
811     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
812     emitStoreCell(dst, regT0);
813 }
814
815 void JIT::emit_op_to_this(Instruction* currentInstruction)
816 {
817     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
818     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
819
820     emitJumpSlowCaseIfNotJSCell(regT1);
821
822     addSlowCase(branchIfNotType(regT1, FinalObjectType));
823     loadPtr(cachedStructure, regT2);
824     addSlowCase(branchTestPtr(Zero, regT2));
825     load32(Address(regT2, Structure::structureIDOffset()), regT2);
826     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
827 }
828
829 void JIT::emit_op_create_this(Instruction* currentInstruction)
830 {
831     int callee = currentInstruction[2].u.operand;
832     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
833     RegisterID calleeReg = regT0;
834     RegisterID rareDataReg = regT4;
835     RegisterID resultReg = regT0;
836     RegisterID allocatorReg = regT1;
837     RegisterID structureReg = regT2;
838     RegisterID cachedFunctionReg = regT4;
839     RegisterID scratchReg = regT3;
840
841     emitGetVirtualRegister(callee, calleeReg);
842     addSlowCase(branchIfNotFunction(calleeReg));
843     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
844     addSlowCase(branchTestPtr(Zero, rareDataReg));
845     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), rareDataReg);
846     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
847     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
848
849     loadPtr(cachedFunction, cachedFunctionReg);
850     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
851     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
852     hasSeenMultipleCallees.link(this);
853
854     JumpList slowCases;
855     auto butterfly = TrustedImmPtr(nullptr);
856     emitAllocateJSObject(resultReg, JITAllocator::variable(), allocatorReg, structureReg, butterfly, scratchReg, slowCases);
857     emitGetVirtualRegister(callee, scratchReg);
858     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
859     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), scratchReg);
860     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
861     emitInitializeInlineStorage(resultReg, scratchReg);
862     addSlowCase(slowCases);
863     emitPutVirtualRegister(currentInstruction[1].u.operand);
864 }
865
866 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
867 {
868     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
869     addSlowCase(branchIfEmpty(regT0));
870 }
871
872
873 // Slow cases
874
875 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
876 {
877     linkAllSlowCases(iter);
878
879     callOperation(operationCompareEq, regT0, regT1);
880     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
881     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
882 }
883
884 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
885 {
886     linkAllSlowCases(iter);
887
888     callOperation(operationCompareEq, regT0, regT1);
889     xor32(TrustedImm32(0x1), regT0);
890     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
891     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
892 }
893
894 void JIT::emitSlow_op_jeq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 {
896     linkAllSlowCases(iter);
897
898     unsigned target = currentInstruction[3].u.operand;
899     callOperation(operationCompareEq, regT0, regT1);
900     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
901 }
902
903 void JIT::emitSlow_op_jneq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
904 {
905     linkAllSlowCases(iter);
906
907     unsigned target = currentInstruction[3].u.operand;
908     callOperation(operationCompareEq, regT0, regT1);
909     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
910 }
911
912 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
913 {
914     linkAllSlowCases(iter);
915
916     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
917     int dst = bytecode.dst();
918     int value = bytecode.value();
919     int constructor = bytecode.constructor();
920     int hasInstanceValue = bytecode.hasInstanceValue();
921
922     emitGetVirtualRegister(value, regT0);
923     emitGetVirtualRegister(constructor, regT1);
924     emitGetVirtualRegister(hasInstanceValue, regT2);
925     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
926     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
927     emitPutVirtualRegister(dst, returnValueGPR);
928 }
929
930 #endif // USE(JSVALUE64)
931
932 void JIT::emit_op_loop_hint(Instruction*)
933 {
934     // Emit the JIT optimization check: 
935     if (canBeOptimized()) {
936         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
937             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
938     }
939 }
940
941 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
942 {
943 #if ENABLE(DFG_JIT)
944     // Emit the slow path for the JIT optimization check:
945     if (canBeOptimized()) {
946         linkAllSlowCases(iter);
947
948         copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
949
950         callOperation(operationOptimize, m_bytecodeOffset);
951         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
952         if (!ASSERT_DISABLED) {
953             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
954             abortWithReason(JITUnreasonableLoopHintJumpTarget);
955             ok.link(this);
956         }
957         jump(returnValueGPR, GPRInfo::callFrameRegister);
958         noOptimizedEntry.link(this);
959
960         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
961     }
962 #else
963     UNUSED_PARAM(iter);
964 #endif
965 }
966
967 void JIT::emit_op_check_traps(Instruction*)
968 {
969     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
970 }
971
972 void JIT::emit_op_nop(Instruction*)
973 {
974 }
975
976 void JIT::emit_op_super_sampler_begin(Instruction*)
977 {
978     add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
979 }
980
981 void JIT::emit_op_super_sampler_end(Instruction*)
982 {
983     sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
984 }
985
986 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
987 {
988     linkAllSlowCases(iter);
989
990     callOperation(operationHandleTraps);
991 }
992
993 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
994 {
995     int dst = currentInstruction[1].u.operand;
996     callOperation(operationNewRegexp, m_codeBlock->regexp(currentInstruction[2].u.operand));
997     emitStoreCell(dst, returnValueGPR);
998 }
999
1000 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
1001 {
1002     Jump lazyJump;
1003     int dst = currentInstruction[1].u.operand;
1004
1005 #if USE(JSVALUE64)
1006     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1007 #else
1008     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1009 #endif
1010     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
1011
1012     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1013     if (opcodeID == op_new_func)
1014         callOperation(operationNewFunction, dst, regT0, funcExec);
1015     else if (opcodeID == op_new_generator_func)
1016         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
1017     else if (opcodeID == op_new_async_func)
1018         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
1019     else {
1020         ASSERT(opcodeID == op_new_async_generator_func);
1021         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
1022     }
1023 }
1024
1025 void JIT::emit_op_new_func(Instruction* currentInstruction)
1026 {
1027     emitNewFuncCommon(currentInstruction);
1028 }
1029
1030 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
1031 {
1032     emitNewFuncCommon(currentInstruction);
1033 }
1034
1035 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
1036 {
1037     emitNewFuncCommon(currentInstruction);
1038 }
1039
1040 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
1041 {
1042     emitNewFuncCommon(currentInstruction);
1043 }
1044     
1045 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1046 {
1047     int dst = currentInstruction[1].u.operand;
1048 #if USE(JSVALUE64)
1049     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1050 #else
1051     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1052 #endif
1053
1054     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1055     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1056
1057     if (opcodeID == op_new_func_exp)
1058         callOperation(operationNewFunction, dst, regT0, function);
1059     else if (opcodeID == op_new_generator_func_exp)
1060         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1061     else if (opcodeID == op_new_async_func_exp)
1062         callOperation(operationNewAsyncFunction, dst, regT0, function);
1063     else {
1064         ASSERT(opcodeID == op_new_async_generator_func_exp);
1065         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
1066     }
1067 }
1068
1069 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1070 {
1071     emitNewFuncExprCommon(currentInstruction);
1072 }
1073
1074 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1075 {
1076     emitNewFuncExprCommon(currentInstruction);
1077 }
1078
1079 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1080 {
1081     emitNewFuncExprCommon(currentInstruction);
1082 }
1083     
1084 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1085 {
1086     emitNewFuncExprCommon(currentInstruction);
1087 }
1088     
1089 void JIT::emit_op_new_array(Instruction* currentInstruction)
1090 {
1091     int dst = currentInstruction[1].u.operand;
1092     int valuesIndex = currentInstruction[2].u.operand;
1093     int size = currentInstruction[3].u.operand;
1094     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1095     callOperation(operationNewArrayWithProfile, dst,
1096         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1097 }
1098
1099 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1100 {
1101     int dst = currentInstruction[1].u.operand;
1102     int sizeIndex = currentInstruction[2].u.operand;
1103 #if USE(JSVALUE64)
1104     emitGetVirtualRegister(sizeIndex, regT0);
1105     callOperation(operationNewArrayWithSizeAndProfile, dst,
1106         currentInstruction[3].u.arrayAllocationProfile, regT0);
1107 #else
1108     emitLoad(sizeIndex, regT1, regT0);
1109     callOperation(operationNewArrayWithSizeAndProfile, dst,
1110         currentInstruction[3].u.arrayAllocationProfile, JSValueRegs(regT1, regT0));
1111 #endif
1112 }
1113
1114 #if USE(JSVALUE64)
1115 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1116 {
1117     int dst = currentInstruction[1].u.operand;
1118     int base = currentInstruction[2].u.operand;
1119     int enumerator = currentInstruction[4].u.operand;
1120
1121     emitGetVirtualRegister(base, regT0);
1122     emitGetVirtualRegister(enumerator, regT1);
1123     emitJumpSlowCaseIfNotJSCell(regT0, base);
1124
1125     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1126     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1127     
1128     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1129     emitPutVirtualRegister(dst);
1130 }
1131
1132 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1133 {
1134     Instruction* currentInstruction = &m_codeBlock->instructions()[byValInfo->bytecodeIndex];
1135     
1136     PatchableJump badType;
1137     
1138     // FIXME: Add support for other types like TypedArrays and Arguments.
1139     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1140     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1141     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1142     Jump done = jump();
1143
1144     LinkBuffer patchBuffer(*this, m_codeBlock);
1145     
1146     patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1147     patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1148     
1149     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1150     
1151     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1152         m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
1153         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1154     
1155     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
1156     MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationHasIndexedPropertyGeneric));
1157 }
1158
1159 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1160 {
1161     int dst = currentInstruction[1].u.operand;
1162     int base = currentInstruction[2].u.operand;
1163     int property = currentInstruction[3].u.operand;
1164     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1165     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1166     
1167     emitGetVirtualRegisters(base, regT0, property, regT1);
1168
1169     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1170     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1171     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1172     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1173     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1174     // extending since it makes it easier to re-tag the value in the slow case.
1175     zeroExtend32ToPtr(regT1, regT1);
1176
1177     emitJumpSlowCaseIfNotJSCell(regT0, base);
1178     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1179     and32(TrustedImm32(IndexingShapeMask), regT2);
1180
1181     JITArrayMode mode = chooseArrayMode(profile);
1182     PatchableJump badType;
1183
1184     // FIXME: Add support for other types like TypedArrays and Arguments.
1185     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1186     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1187     
1188     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1189
1190     addSlowCase(badType);
1191     addSlowCase(slowCases);
1192     
1193     Label done = label();
1194     
1195     emitPutVirtualRegister(dst);
1196
1197     Label nextHotPath = label();
1198     
1199     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1200 }
1201
1202 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1203 {
1204     linkAllSlowCases(iter);
1205
1206     int dst = currentInstruction[1].u.operand;
1207     int base = currentInstruction[2].u.operand;
1208     int property = currentInstruction[3].u.operand;
1209     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1210
1211     Label slowPath = label();
1212     
1213     emitGetVirtualRegister(base, regT0);
1214     emitGetVirtualRegister(property, regT1);
1215     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1216
1217     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1218     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1219     m_byValInstructionIndex++;
1220 }
1221
1222 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1223 {
1224     int dst = currentInstruction[1].u.operand;
1225     int base = currentInstruction[2].u.operand;
1226     int index = currentInstruction[4].u.operand;
1227     int enumerator = currentInstruction[5].u.operand;
1228
1229     // Check that base is a cell
1230     emitGetVirtualRegister(base, regT0);
1231     emitJumpSlowCaseIfNotJSCell(regT0, base);
1232
1233     // Check the structure
1234     emitGetVirtualRegister(enumerator, regT2);
1235     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1236     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1237
1238     // Compute the offset
1239     emitGetVirtualRegister(index, regT1);
1240     // If index is less than the enumerator's cached inline storage, then it's an inline access
1241     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1242     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1243     signExtend32ToPtr(regT1, regT1);
1244     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1245     
1246     Jump done = jump();
1247
1248     // Otherwise it's out of line
1249     outOfLineAccess.link(this);
1250     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1251     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1252     neg32(regT1);
1253     signExtend32ToPtr(regT1, regT1);
1254     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1255     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1256     
1257     done.link(this);
1258     emitValueProfilingSite();
1259     emitPutVirtualRegister(dst, regT0);
1260 }
1261
1262 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1263 {
1264     int dst = currentInstruction[1].u.operand;
1265     int enumerator = currentInstruction[2].u.operand;
1266     int index = currentInstruction[3].u.operand;
1267
1268     emitGetVirtualRegister(index, regT0);
1269     emitGetVirtualRegister(enumerator, regT1);
1270     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1271
1272     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1273
1274     Jump done = jump();
1275     inBounds.link(this);
1276
1277     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1278     signExtend32ToPtr(regT0, regT0);
1279     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1280
1281     done.link(this);
1282     emitPutVirtualRegister(dst);
1283 }
1284
1285 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1286 {
1287     int dst = currentInstruction[1].u.operand;
1288     int enumerator = currentInstruction[2].u.operand;
1289     int index = currentInstruction[3].u.operand;
1290
1291     emitGetVirtualRegister(index, regT0);
1292     emitGetVirtualRegister(enumerator, regT1);
1293     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1294
1295     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1296
1297     Jump done = jump();
1298     inBounds.link(this);
1299
1300     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1301     signExtend32ToPtr(regT0, regT0);
1302     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1303     
1304     done.link(this);
1305     emitPutVirtualRegister(dst);
1306 }
1307
1308 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1309 {
1310     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1311     int valueToProfile = currentInstruction[1].u.operand;
1312
1313     emitGetVirtualRegister(valueToProfile, regT0);
1314
1315     JumpList jumpToEnd;
1316
1317     jumpToEnd.append(branchIfEmpty(regT0));
1318
1319     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1320     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1321     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1322         jumpToEnd.append(branchIfUndefined(regT0));
1323     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1324         jumpToEnd.append(branchIfNull(regT0));
1325     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1326         jumpToEnd.append(branchIfBoolean(regT0, regT1));
1327     else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1328         jumpToEnd.append(branchIfInt32(regT0));
1329     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1330         jumpToEnd.append(branchIfNumber(regT0));
1331     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1332         Jump isNotCell = branchIfNotCell(regT0);
1333         jumpToEnd.append(branchIfString(regT0));
1334         isNotCell.link(this);
1335     }
1336
1337     // Load the type profiling log into T2.
1338     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1339     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1340     // Load the next log entry into T1.
1341     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1342
1343     // Store the JSValue onto the log entry.
1344     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1345
1346     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1347     Jump notCell = branchIfNotCell(regT0);
1348     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1349     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1350     Jump skipIsCell = jump();
1351     notCell.link(this);
1352     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1353     skipIsCell.link(this);
1354
1355     // Store the typeLocation on the log entry.
1356     move(TrustedImmPtr(cachedTypeLocation), regT0);
1357     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1358
1359     // Increment the current log entry.
1360     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1361     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1362     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1363     // Clear the log if we're at the end of the log.
1364     callOperation(operationProcessTypeProfilerLog);
1365     skipClearLog.link(this);
1366
1367     jumpToEnd.link(this);
1368 }
1369
1370 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1371 {
1372     updateTopCallFrame();
1373     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1374     GPRReg shadowPacketReg = regT0;
1375     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1376     GPRReg scratch2Reg = regT2;
1377     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1378     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1379     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1380 }
1381
1382 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1383 {
1384     updateTopCallFrame();
1385     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1386     GPRReg shadowPacketReg = regT0;
1387     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1388     GPRReg scratch2Reg = regT2;
1389     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1390     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1391     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1392     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1393 }
1394
1395 #endif // USE(JSVALUE64)
1396
1397 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1398 {
1399     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1400 #if USE(JSVALUE64)
1401     basicBlockLocation->emitExecuteCode(*this);
1402 #else
1403     basicBlockLocation->emitExecuteCode(*this, regT0);
1404 #endif
1405 }
1406
1407 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1408 {
1409     int dst = currentInstruction[1].u.operand;
1410     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1411     sub32(TrustedImm32(1), regT0);
1412     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1413     boxInt32(regT0, result);
1414     emitPutVirtualRegister(dst, result);
1415 }
1416
1417 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1418 {
1419     int dst = currentInstruction[1].u.operand;
1420     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1421     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1422     sub32(TrustedImm32(1), regT0);
1423     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1424     sub32(Imm32(numParamsToSkip), regT0);
1425 #if USE(JSVALUE64)
1426     boxInt32(regT0, JSValueRegs(regT0));
1427 #endif
1428     Jump done = jump();
1429
1430     zeroLength.link(this);
1431 #if USE(JSVALUE64)
1432     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1433 #else
1434     move(TrustedImm32(0), regT0);
1435 #endif
1436
1437     done.link(this);
1438 #if USE(JSVALUE64)
1439     emitPutVirtualRegister(dst, regT0);
1440 #else
1441     move(TrustedImm32(JSValue::Int32Tag), regT1);
1442     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1443 #endif
1444 }
1445
1446 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1447 {
1448     int dst = currentInstruction[1].u.operand;
1449     int index = currentInstruction[2].u.operand;
1450 #if USE(JSVALUE64)
1451     JSValueRegs resultRegs(regT0);
1452 #else
1453     JSValueRegs resultRegs(regT1, regT0);
1454 #endif
1455
1456     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1457     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1458     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1459     Jump done = jump();
1460
1461     argumentOutOfBounds.link(this);
1462     moveValue(jsUndefined(), resultRegs);
1463
1464     done.link(this);
1465     emitValueProfilingSite();
1466     emitPutVirtualRegister(dst, resultRegs);
1467 }
1468
1469 } // namespace JSC
1470
1471 #endif // ENABLE(JIT)