[ESNext][BigInt] Implement support for "=<" and ">=" relational operation
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
34 #include "Heap.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCast.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "SuperSampler.h"
45 #include "ThunkGenerators.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
49 #include "Watchdog.h"
50
51 namespace JSC {
52
53 #if USE(JSVALUE64)
54
55 void JIT::emit_op_mov(Instruction* currentInstruction)
56 {
57     int dst = currentInstruction[1].u.operand;
58     int src = currentInstruction[2].u.operand;
59
60     emitGetVirtualRegister(src, regT0);
61     emitPutVirtualRegister(dst);
62 }
63
64
65 void JIT::emit_op_end(Instruction* currentInstruction)
66 {
67     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
68     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
69     emitRestoreCalleeSaves();
70     emitFunctionEpilogue();
71     ret();
72 }
73
74 void JIT::emit_op_jmp(Instruction* currentInstruction)
75 {
76     unsigned target = currentInstruction[1].u.operand;
77     addJump(jump(), target);
78 }
79
80 void JIT::emit_op_new_object(Instruction* currentInstruction)
81 {
82     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
83     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
84     Allocator allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorForNonVirtual(allocationSize, AllocatorForMode::AllocatorIfExists);
85
86     RegisterID resultReg = regT0;
87     RegisterID allocatorReg = regT1;
88     RegisterID scratchReg = regT2;
89
90     if (!allocator)
91         addSlowCase(jump());
92     else {
93         JumpList slowCases;
94         auto butterfly = TrustedImmPtr(nullptr);
95         emitAllocateJSObject(resultReg, JITAllocator::constant(allocator), allocatorReg, TrustedImmPtr(structure), butterfly, scratchReg, slowCases);
96         emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
97         addSlowCase(slowCases);
98         emitPutVirtualRegister(currentInstruction[1].u.operand);
99     }
100 }
101
102 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
103 {
104     linkAllSlowCases(iter);
105
106     int dst = currentInstruction[1].u.operand;
107     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
108     callOperation(operationNewObject, structure);
109     emitStoreCell(dst, returnValueGPR);
110 }
111
112 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
113 {
114     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
115     int dst = bytecode.dst();
116     int constructor = bytecode.constructor();
117     int hasInstanceValue = bytecode.hasInstanceValue();
118
119     emitGetVirtualRegister(hasInstanceValue, regT0);
120
121     // We don't jump if we know what Symbol.hasInstance would do.
122     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
123
124     emitGetVirtualRegister(constructor, regT0);
125
126     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
127     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
128     boxBoolean(regT0, JSValueRegs { regT0 });
129     Jump done = jump();
130
131     customhasInstanceValue.link(this);
132     move(TrustedImm32(ValueTrue), regT0);
133
134     done.link(this);
135     emitPutVirtualRegister(dst);
136 }
137
138 void JIT::emit_op_instanceof(Instruction* currentInstruction)
139 {
140     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
141     int dst = bytecode.dst();
142     int value = bytecode.value();
143     int proto = bytecode.prototype();
144
145     // Load the operands (baseVal, proto, and value respectively) into registers.
146     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
147     emitGetVirtualRegister(value, regT2);
148     emitGetVirtualRegister(proto, regT1);
149     
150     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
151     emitJumpSlowCaseIfNotJSCell(regT2, value);
152     emitJumpSlowCaseIfNotJSCell(regT1, proto);
153
154     JITInstanceOfGenerator gen(
155         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset),
156         RegisterSet::stubUnavailableRegisters(),
157         regT0, // result
158         regT2, // value
159         regT1, // proto
160         regT3, regT4); // scratch
161     gen.generateFastPath(*this);
162     m_instanceOfs.append(gen);
163     
164     emitPutVirtualRegister(dst);
165 }
166
167 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
168 {
169     linkAllSlowCases(iter);
170     
171     int resultVReg = currentInstruction[1].u.operand;
172     
173     JITInstanceOfGenerator& gen = m_instanceOfs[m_instanceOfIndex++];
174     
175     Label coldPathBegin = label();
176     Call call = callOperation(operationInstanceOfOptimize, resultVReg, gen.stubInfo(), regT2, regT1);
177     gen.reportSlowPathCall(coldPathBegin, call);
178 }
179
180 void JIT::emit_op_instanceof_custom(Instruction*)
181 {
182     // This always goes to slow path since we expect it to be rare.
183     addSlowCase(jump());
184 }
185     
186 void JIT::emit_op_is_empty(Instruction* currentInstruction)
187 {
188     int dst = currentInstruction[1].u.operand;
189     int value = currentInstruction[2].u.operand;
190
191     emitGetVirtualRegister(value, regT0);
192     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
193
194     boxBoolean(regT0, JSValueRegs { regT0 });
195     emitPutVirtualRegister(dst);
196 }
197
198 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
199 {
200     int dst = currentInstruction[1].u.operand;
201     int value = currentInstruction[2].u.operand;
202     
203     emitGetVirtualRegister(value, regT0);
204     Jump isCell = branchIfCell(regT0);
205
206     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
207     Jump done = jump();
208     
209     isCell.link(this);
210     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
211     move(TrustedImm32(0), regT0);
212     Jump notMasqueradesAsUndefined = jump();
213
214     isMasqueradesAsUndefined.link(this);
215     emitLoadStructure(*vm(), regT0, regT1, regT2);
216     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
217     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
218     comparePtr(Equal, regT0, regT1, regT0);
219
220     notMasqueradesAsUndefined.link(this);
221     done.link(this);
222     boxBoolean(regT0, JSValueRegs { regT0 });
223     emitPutVirtualRegister(dst);
224 }
225
226 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
227 {
228     int dst = currentInstruction[1].u.operand;
229     int value = currentInstruction[2].u.operand;
230     
231     emitGetVirtualRegister(value, regT0);
232     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
233     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
234     boxBoolean(regT0, JSValueRegs { regT0 });
235     emitPutVirtualRegister(dst);
236 }
237
238 void JIT::emit_op_is_number(Instruction* currentInstruction)
239 {
240     int dst = currentInstruction[1].u.operand;
241     int value = currentInstruction[2].u.operand;
242     
243     emitGetVirtualRegister(value, regT0);
244     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
245     boxBoolean(regT0, JSValueRegs { regT0 });
246     emitPutVirtualRegister(dst);
247 }
248
249 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
250 {
251     int dst = currentInstruction[1].u.operand;
252     int value = currentInstruction[2].u.operand;
253     int type = currentInstruction[3].u.operand;
254
255     emitGetVirtualRegister(value, regT0);
256     Jump isNotCell = branchIfNotCell(regT0);
257
258     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
259     boxBoolean(regT0, JSValueRegs { regT0 });
260     Jump done = jump();
261
262     isNotCell.link(this);
263     move(TrustedImm32(ValueFalse), regT0);
264
265     done.link(this);
266     emitPutVirtualRegister(dst);
267 }
268
269 void JIT::emit_op_is_object(Instruction* currentInstruction)
270 {
271     int dst = currentInstruction[1].u.operand;
272     int value = currentInstruction[2].u.operand;
273
274     emitGetVirtualRegister(value, regT0);
275     Jump isNotCell = branchIfNotCell(regT0);
276
277     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
278     boxBoolean(regT0, JSValueRegs { regT0 });
279     Jump done = jump();
280
281     isNotCell.link(this);
282     move(TrustedImm32(ValueFalse), regT0);
283
284     done.link(this);
285     emitPutVirtualRegister(dst);
286 }
287
288 void JIT::emit_op_ret(Instruction* currentInstruction)
289 {
290     ASSERT(callFrameRegister != regT1);
291     ASSERT(regT1 != returnValueGPR);
292     ASSERT(returnValueGPR != callFrameRegister);
293
294     // Return the result in %eax.
295     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
296
297     checkStackPointerAlignment();
298     emitRestoreCalleeSaves();
299     emitFunctionEpilogue();
300     ret();
301 }
302
303 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
304 {
305     int dst = currentInstruction[1].u.operand;
306     int src = currentInstruction[2].u.operand;
307
308     emitGetVirtualRegister(src, regT0);
309     
310     Jump isImm = branchIfNotCell(regT0);
311     addSlowCase(branchIfObject(regT0));
312     isImm.link(this);
313
314     if (dst != src)
315         emitPutVirtualRegister(dst);
316
317 }
318
319 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
320 {
321     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
322     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
323     callOperation(operationSetFunctionName, regT0, regT1);
324 }
325
326 void JIT::emit_op_not(Instruction* currentInstruction)
327 {
328     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
329
330     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
331     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
332     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
333     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
334     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
335     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
336
337     emitPutVirtualRegister(currentInstruction[1].u.operand);
338 }
339
340 void JIT::emit_op_jfalse(Instruction* currentInstruction)
341 {
342     unsigned target = currentInstruction[2].u.operand;
343
344     GPRReg value = regT0;
345     GPRReg result = regT1;
346     GPRReg scratch = regT2;
347     bool shouldCheckMasqueradesAsUndefined = true;
348
349     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
350     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
351
352     addJump(branchTest32(Zero, result), target);
353 }
354
355 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
356 {
357     int src = currentInstruction[1].u.operand;
358     unsigned target = currentInstruction[2].u.operand;
359
360     emitGetVirtualRegister(src, regT0);
361     Jump isImmediate = branchIfNotCell(regT0);
362
363     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
364     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
365     emitLoadStructure(*vm(), regT0, regT2, regT1);
366     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
367     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
368     Jump masqueradesGlobalObjectIsForeign = jump();
369
370     // Now handle the immediate cases - undefined & null
371     isImmediate.link(this);
372     and64(TrustedImm32(~TagBitUndefined), regT0);
373     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
374
375     isNotMasqueradesAsUndefined.link(this);
376     masqueradesGlobalObjectIsForeign.link(this);
377 };
378 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
379 {
380     int src = currentInstruction[1].u.operand;
381     unsigned target = currentInstruction[2].u.operand;
382
383     emitGetVirtualRegister(src, regT0);
384     Jump isImmediate = branchIfNotCell(regT0);
385
386     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
387     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
388     emitLoadStructure(*vm(), regT0, regT2, regT1);
389     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
390     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
391     Jump wasNotImmediate = jump();
392
393     // Now handle the immediate cases - undefined & null
394     isImmediate.link(this);
395     and64(TrustedImm32(~TagBitUndefined), regT0);
396     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
397
398     wasNotImmediate.link(this);
399 }
400
401 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
402 {
403     int src = currentInstruction[1].u.operand;
404     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
405     unsigned target = currentInstruction[3].u.operand;
406     
407     emitGetVirtualRegister(src, regT0);
408     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
409     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
410     addJump(jump(), target);
411     equal.link(this);
412 }
413
414 void JIT::emit_op_eq(Instruction* currentInstruction)
415 {
416     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
417     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
418     compare32(Equal, regT1, regT0, regT0);
419     boxBoolean(regT0, JSValueRegs { regT0 });
420     emitPutVirtualRegister(currentInstruction[1].u.operand);
421 }
422
423 void JIT::emit_op_jeq(Instruction* currentInstruction)
424 {
425     unsigned target = currentInstruction[3].u.operand;
426     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
427     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
428     addJump(branch32(Equal, regT0, regT1), target);
429 }
430
431 void JIT::emit_op_jtrue(Instruction* currentInstruction)
432 {
433     unsigned target = currentInstruction[2].u.operand;
434
435     GPRReg value = regT0;
436     GPRReg result = regT1;
437     GPRReg scratch = regT2;
438     bool shouldCheckMasqueradesAsUndefined = true;
439     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
440     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
441     addJump(branchTest32(NonZero, result), target);
442 }
443
444 void JIT::emit_op_neq(Instruction* currentInstruction)
445 {
446     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
447     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
448     compare32(NotEqual, regT1, regT0, regT0);
449     boxBoolean(regT0, JSValueRegs { regT0 });
450
451     emitPutVirtualRegister(currentInstruction[1].u.operand);
452 }
453
454 void JIT::emit_op_jneq(Instruction* currentInstruction)
455 {
456     unsigned target = currentInstruction[3].u.operand;
457     emitGetVirtualRegisters(currentInstruction[1].u.operand, regT0, currentInstruction[2].u.operand, regT1);
458     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
459     addJump(branch32(NotEqual, regT0, regT1), target);
460 }
461
462 void JIT::emit_op_throw(Instruction* currentInstruction)
463 {
464     ASSERT(regT0 == returnValueGPR);
465     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
466     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
467     callOperationNoExceptionCheck(operationThrow, regT0);
468     jumpToExceptionHandler(*vm());
469 }
470
471 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
472 {
473     int dst = currentInstruction[1].u.operand;
474     int src1 = currentInstruction[2].u.operand;
475     int src2 = currentInstruction[3].u.operand;
476
477     emitGetVirtualRegisters(src1, regT0, src2, regT1);
478     
479     // Jump slow if both are cells (to cover strings).
480     move(regT0, regT2);
481     or64(regT1, regT2);
482     addSlowCase(branchIfCell(regT2));
483     
484     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
485     // if it's a double.
486     Jump leftOK = branchIfInt32(regT0);
487     addSlowCase(branchIfNumber(regT0));
488     leftOK.link(this);
489     Jump rightOK = branchIfInt32(regT1);
490     addSlowCase(branchIfNumber(regT1));
491     rightOK.link(this);
492
493     if (type == CompileOpStrictEqType::StrictEq)
494         compare64(Equal, regT1, regT0, regT0);
495     else
496         compare64(NotEqual, regT1, regT0, regT0);
497     boxBoolean(regT0, JSValueRegs { regT0 });
498
499     emitPutVirtualRegister(dst);
500 }
501
502 void JIT::emit_op_stricteq(Instruction* currentInstruction)
503 {
504     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::StrictEq);
505 }
506
507 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
508 {
509     compileOpStrictEq(currentInstruction, CompileOpStrictEqType::NStrictEq);
510 }
511
512 void JIT::compileOpStrictEqJump(Instruction* currentInstruction, CompileOpStrictEqType type)
513 {
514     int target = currentInstruction[3].u.operand;
515     int src1 = currentInstruction[1].u.operand;
516     int src2 = currentInstruction[2].u.operand;
517
518     emitGetVirtualRegisters(src1, regT0, src2, regT1);
519
520     // Jump slow if both are cells (to cover strings).
521     move(regT0, regT2);
522     or64(regT1, regT2);
523     addSlowCase(branchIfCell(regT2));
524
525     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
526     // if it's a double.
527     Jump leftOK = branchIfInt32(regT0);
528     addSlowCase(branchIfNumber(regT0));
529     leftOK.link(this);
530     Jump rightOK = branchIfInt32(regT1);
531     addSlowCase(branchIfNumber(regT1));
532     rightOK.link(this);
533
534     if (type == CompileOpStrictEqType::StrictEq)
535         addJump(branch64(Equal, regT1, regT0), target);
536     else
537         addJump(branch64(NotEqual, regT1, regT0), target);
538 }
539
540 void JIT::emit_op_jstricteq(Instruction* currentInstruction)
541 {
542     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::StrictEq);
543 }
544
545 void JIT::emit_op_jnstricteq(Instruction* currentInstruction)
546 {
547     compileOpStrictEqJump(currentInstruction, CompileOpStrictEqType::NStrictEq);
548 }
549
550 void JIT::emitSlow_op_jstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
551 {
552     linkAllSlowCases(iter);
553
554     unsigned target = currentInstruction[3].u.operand;
555     callOperation(operationCompareStrictEq, regT0, regT1);
556     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
557 }
558
559 void JIT::emitSlow_op_jnstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
560 {
561     linkAllSlowCases(iter);
562
563     unsigned target = currentInstruction[3].u.operand;
564     callOperation(operationCompareStrictEq, regT0, regT1);
565     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
566 }
567
568 void JIT::emit_op_to_number(Instruction* currentInstruction)
569 {
570     int dstVReg = currentInstruction[1].u.operand;
571     int srcVReg = currentInstruction[2].u.operand;
572     emitGetVirtualRegister(srcVReg, regT0);
573     
574     addSlowCase(branchIfNotNumber(regT0));
575
576     emitValueProfilingSite();
577     if (srcVReg != dstVReg)
578         emitPutVirtualRegister(dstVReg);
579 }
580
581 void JIT::emit_op_to_string(Instruction* currentInstruction)
582 {
583     int srcVReg = currentInstruction[2].u.operand;
584     emitGetVirtualRegister(srcVReg, regT0);
585
586     addSlowCase(branchIfNotCell(regT0));
587     addSlowCase(branchIfNotString(regT0));
588
589     emitPutVirtualRegister(currentInstruction[1].u.operand);
590 }
591
592 void JIT::emit_op_to_object(Instruction* currentInstruction)
593 {
594     int dstVReg = currentInstruction[1].u.operand;
595     int srcVReg = currentInstruction[2].u.operand;
596     emitGetVirtualRegister(srcVReg, regT0);
597
598     addSlowCase(branchIfNotCell(regT0));
599     addSlowCase(branchIfNotObject(regT0));
600
601     emitValueProfilingSite();
602     if (srcVReg != dstVReg)
603         emitPutVirtualRegister(dstVReg);
604 }
605
606 void JIT::emit_op_catch(Instruction* currentInstruction)
607 {
608     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
609
610     move(TrustedImmPtr(m_vm), regT3);
611     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
612     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
613
614     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
615
616     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
617     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
618     jumpToExceptionHandler(*vm());
619     isCatchableException.link(this);
620
621     move(TrustedImmPtr(m_vm), regT3);
622     load64(Address(regT3, VM::exceptionOffset()), regT0);
623     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
624     emitPutVirtualRegister(currentInstruction[1].u.operand);
625
626     load64(Address(regT0, Exception::valueOffset()), regT0);
627     emitPutVirtualRegister(currentInstruction[2].u.operand);
628
629 #if ENABLE(DFG_JIT)
630     // FIXME: consider inline caching the process of doing OSR entry, including
631     // argument type proofs, storing locals to the buffer, etc
632     // https://bugs.webkit.org/show_bug.cgi?id=175598
633
634     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
635     if (buffer || !shouldEmitProfiling())
636         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
637     else
638         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
639     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
640     emitRestoreCalleeSaves();
641     jump(returnValueGPR, ExceptionHandlerPtrTag);
642     skipOSREntry.link(this);
643     if (buffer && shouldEmitProfiling()) {
644         buffer->forEach([&] (ValueProfileAndOperand& profile) {
645             JSValueRegs regs(regT0);
646             emitGetVirtualRegister(profile.m_operand, regs);
647             emitValueProfilingSite(profile.m_profile);
648         });
649     }
650 #endif // ENABLE(DFG_JIT)
651 }
652
653 void JIT::emit_op_identity_with_profile(Instruction*)
654 {
655     // We don't need to do anything here...
656 }
657
658 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
659 {
660     int currentScope = currentInstruction[2].u.operand;
661     emitGetVirtualRegister(currentScope, regT0);
662     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
663     emitStoreCell(currentInstruction[1].u.operand, regT0);
664 }
665
666 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
667 {
668     size_t tableIndex = currentInstruction[1].u.operand;
669     unsigned defaultOffset = currentInstruction[2].u.operand;
670     unsigned scrutinee = currentInstruction[3].u.operand;
671
672     // create jump table for switch destinations, track this switch statement.
673     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
674     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
675     jumpTable->ensureCTITable();
676
677     emitGetVirtualRegister(scrutinee, regT0);
678     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
679     jump(returnValueGPR, JSSwitchPtrTag);
680 }
681
682 void JIT::emit_op_switch_char(Instruction* currentInstruction)
683 {
684     size_t tableIndex = currentInstruction[1].u.operand;
685     unsigned defaultOffset = currentInstruction[2].u.operand;
686     unsigned scrutinee = currentInstruction[3].u.operand;
687
688     // create jump table for switch destinations, track this switch statement.
689     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
690     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
691     jumpTable->ensureCTITable();
692
693     emitGetVirtualRegister(scrutinee, regT0);
694     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
695     jump(returnValueGPR, JSSwitchPtrTag);
696 }
697
698 void JIT::emit_op_switch_string(Instruction* currentInstruction)
699 {
700     size_t tableIndex = currentInstruction[1].u.operand;
701     unsigned defaultOffset = currentInstruction[2].u.operand;
702     unsigned scrutinee = currentInstruction[3].u.operand;
703
704     // create jump table for switch destinations, track this switch statement.
705     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
706     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
707
708     emitGetVirtualRegister(scrutinee, regT0);
709     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
710     jump(returnValueGPR, JSSwitchPtrTag);
711 }
712
713 void JIT::emit_op_debug(Instruction* currentInstruction)
714 {
715     load32(codeBlock()->debuggerRequestsAddress(), regT0);
716     Jump noDebuggerRequests = branchTest32(Zero, regT0);
717     callOperation(operationDebug, currentInstruction[1].u.operand);
718     noDebuggerRequests.link(this);
719 }
720
721 void JIT::emit_op_eq_null(Instruction* currentInstruction)
722 {
723     int dst = currentInstruction[1].u.operand;
724     int src1 = currentInstruction[2].u.operand;
725
726     emitGetVirtualRegister(src1, regT0);
727     Jump isImmediate = branchIfNotCell(regT0);
728
729     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
730     move(TrustedImm32(0), regT0);
731     Jump wasNotMasqueradesAsUndefined = jump();
732
733     isMasqueradesAsUndefined.link(this);
734     emitLoadStructure(*vm(), regT0, regT2, regT1);
735     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
736     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
737     comparePtr(Equal, regT0, regT2, regT0);
738     Jump wasNotImmediate = jump();
739
740     isImmediate.link(this);
741
742     and64(TrustedImm32(~TagBitUndefined), regT0);
743     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
744
745     wasNotImmediate.link(this);
746     wasNotMasqueradesAsUndefined.link(this);
747
748     boxBoolean(regT0, JSValueRegs { regT0 });
749     emitPutVirtualRegister(dst);
750
751 }
752
753 void JIT::emit_op_neq_null(Instruction* currentInstruction)
754 {
755     int dst = currentInstruction[1].u.operand;
756     int src1 = currentInstruction[2].u.operand;
757
758     emitGetVirtualRegister(src1, regT0);
759     Jump isImmediate = branchIfNotCell(regT0);
760
761     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
762     move(TrustedImm32(1), regT0);
763     Jump wasNotMasqueradesAsUndefined = jump();
764
765     isMasqueradesAsUndefined.link(this);
766     emitLoadStructure(*vm(), regT0, regT2, regT1);
767     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
768     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
769     comparePtr(NotEqual, regT0, regT2, regT0);
770     Jump wasNotImmediate = jump();
771
772     isImmediate.link(this);
773
774     and64(TrustedImm32(~TagBitUndefined), regT0);
775     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
776
777     wasNotImmediate.link(this);
778     wasNotMasqueradesAsUndefined.link(this);
779
780     boxBoolean(regT0, JSValueRegs { regT0 });
781     emitPutVirtualRegister(dst);
782 }
783
784 void JIT::emit_op_enter(Instruction*)
785 {
786     // Even though CTI doesn't use them, we initialize our constant
787     // registers to zap stale pointers, to avoid unnecessarily prolonging
788     // object lifetime and increasing GC pressure.
789     size_t count = m_codeBlock->m_numVars;
790     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
791         emitInitRegister(virtualRegisterForLocal(j).offset());
792
793     emitWriteBarrier(m_codeBlock);
794
795     emitEnterOptimizationCheck();
796 }
797
798 void JIT::emit_op_get_scope(Instruction* currentInstruction)
799 {
800     int dst = currentInstruction[1].u.operand;
801     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
802     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
803     emitStoreCell(dst, regT0);
804 }
805
806 void JIT::emit_op_to_this(Instruction* currentInstruction)
807 {
808     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
809     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
810
811     emitJumpSlowCaseIfNotJSCell(regT1);
812
813     addSlowCase(branchIfNotType(regT1, FinalObjectType));
814     loadPtr(cachedStructure, regT2);
815     addSlowCase(branchTestPtr(Zero, regT2));
816     load32(Address(regT2, Structure::structureIDOffset()), regT2);
817     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
818 }
819
820 void JIT::emit_op_create_this(Instruction* currentInstruction)
821 {
822     int callee = currentInstruction[2].u.operand;
823     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
824     RegisterID calleeReg = regT0;
825     RegisterID rareDataReg = regT4;
826     RegisterID resultReg = regT0;
827     RegisterID allocatorReg = regT1;
828     RegisterID structureReg = regT2;
829     RegisterID cachedFunctionReg = regT4;
830     RegisterID scratchReg = regT3;
831
832     emitGetVirtualRegister(callee, calleeReg);
833     addSlowCase(branchIfNotFunction(calleeReg));
834     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
835     addSlowCase(branchTestPtr(Zero, rareDataReg));
836     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), rareDataReg);
837     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
838     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
839
840     loadPtr(cachedFunction, cachedFunctionReg);
841     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
842     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
843     hasSeenMultipleCallees.link(this);
844
845     JumpList slowCases;
846     auto butterfly = TrustedImmPtr(nullptr);
847     emitAllocateJSObject(resultReg, JITAllocator::variable(), allocatorReg, structureReg, butterfly, scratchReg, slowCases);
848     emitGetVirtualRegister(callee, scratchReg);
849     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
850     xorPtr(TrustedImmPtr(JSFunctionPoison::key()), scratchReg);
851     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
852     emitInitializeInlineStorage(resultReg, scratchReg);
853     addSlowCase(slowCases);
854     emitPutVirtualRegister(currentInstruction[1].u.operand);
855 }
856
857 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
858 {
859     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
860     addSlowCase(branchIfEmpty(regT0));
861 }
862
863
864 // Slow cases
865
866 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
867 {
868     linkAllSlowCases(iter);
869
870     callOperation(operationCompareEq, regT0, regT1);
871     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
872     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
873 }
874
875 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
876 {
877     linkAllSlowCases(iter);
878
879     callOperation(operationCompareEq, regT0, regT1);
880     xor32(TrustedImm32(0x1), regT0);
881     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
882     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
883 }
884
885 void JIT::emitSlow_op_jeq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
886 {
887     linkAllSlowCases(iter);
888
889     unsigned target = currentInstruction[3].u.operand;
890     callOperation(operationCompareEq, regT0, regT1);
891     emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), target);
892 }
893
894 void JIT::emitSlow_op_jneq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 {
896     linkAllSlowCases(iter);
897
898     unsigned target = currentInstruction[3].u.operand;
899     callOperation(operationCompareEq, regT0, regT1);
900     emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), target);
901 }
902
903 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
904 {
905     linkAllSlowCases(iter);
906
907     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
908     int dst = bytecode.dst();
909     int value = bytecode.value();
910     int constructor = bytecode.constructor();
911     int hasInstanceValue = bytecode.hasInstanceValue();
912
913     emitGetVirtualRegister(value, regT0);
914     emitGetVirtualRegister(constructor, regT1);
915     emitGetVirtualRegister(hasInstanceValue, regT2);
916     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
917     boxBoolean(returnValueGPR, JSValueRegs { returnValueGPR });
918     emitPutVirtualRegister(dst, returnValueGPR);
919 }
920
921 #endif // USE(JSVALUE64)
922
923 void JIT::emit_op_loop_hint(Instruction*)
924 {
925     // Emit the JIT optimization check: 
926     if (canBeOptimized()) {
927         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
928             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
929     }
930 }
931
932 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
933 {
934 #if ENABLE(DFG_JIT)
935     // Emit the slow path for the JIT optimization check:
936     if (canBeOptimized()) {
937         linkAllSlowCases(iter);
938
939         copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
940
941         callOperation(operationOptimize, m_bytecodeOffset);
942         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
943         if (!ASSERT_DISABLED) {
944             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
945             abortWithReason(JITUnreasonableLoopHintJumpTarget);
946             ok.link(this);
947         }
948         jump(returnValueGPR, GPRInfo::callFrameRegister);
949         noOptimizedEntry.link(this);
950
951         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
952     }
953 #else
954     UNUSED_PARAM(iter);
955 #endif
956 }
957
958 void JIT::emit_op_check_traps(Instruction*)
959 {
960     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
961 }
962
963 void JIT::emit_op_nop(Instruction*)
964 {
965 }
966
967 void JIT::emit_op_super_sampler_begin(Instruction*)
968 {
969     add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
970 }
971
972 void JIT::emit_op_super_sampler_end(Instruction*)
973 {
974     sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
975 }
976
977 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
978 {
979     linkAllSlowCases(iter);
980
981     callOperation(operationHandleTraps);
982 }
983
984 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
985 {
986     int dst = currentInstruction[1].u.operand;
987     callOperation(operationNewRegexp, m_codeBlock->regexp(currentInstruction[2].u.operand));
988     emitStoreCell(dst, returnValueGPR);
989 }
990
991 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
992 {
993     Jump lazyJump;
994     int dst = currentInstruction[1].u.operand;
995
996 #if USE(JSVALUE64)
997     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
998 #else
999     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1000 #endif
1001     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
1002
1003     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1004     if (opcodeID == op_new_func)
1005         callOperation(operationNewFunction, dst, regT0, funcExec);
1006     else if (opcodeID == op_new_generator_func)
1007         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
1008     else if (opcodeID == op_new_async_func)
1009         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
1010     else {
1011         ASSERT(opcodeID == op_new_async_generator_func);
1012         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
1013     }
1014 }
1015
1016 void JIT::emit_op_new_func(Instruction* currentInstruction)
1017 {
1018     emitNewFuncCommon(currentInstruction);
1019 }
1020
1021 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
1022 {
1023     emitNewFuncCommon(currentInstruction);
1024 }
1025
1026 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
1027 {
1028     emitNewFuncCommon(currentInstruction);
1029 }
1030
1031 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
1032 {
1033     emitNewFuncCommon(currentInstruction);
1034 }
1035     
1036 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1037 {
1038     int dst = currentInstruction[1].u.operand;
1039 #if USE(JSVALUE64)
1040     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1041 #else
1042     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1043 #endif
1044
1045     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1046     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1047
1048     if (opcodeID == op_new_func_exp)
1049         callOperation(operationNewFunction, dst, regT0, function);
1050     else if (opcodeID == op_new_generator_func_exp)
1051         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1052     else if (opcodeID == op_new_async_func_exp)
1053         callOperation(operationNewAsyncFunction, dst, regT0, function);
1054     else {
1055         ASSERT(opcodeID == op_new_async_generator_func_exp);
1056         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
1057     }
1058 }
1059
1060 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1061 {
1062     emitNewFuncExprCommon(currentInstruction);
1063 }
1064
1065 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1066 {
1067     emitNewFuncExprCommon(currentInstruction);
1068 }
1069
1070 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1071 {
1072     emitNewFuncExprCommon(currentInstruction);
1073 }
1074     
1075 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1076 {
1077     emitNewFuncExprCommon(currentInstruction);
1078 }
1079     
1080 void JIT::emit_op_new_array(Instruction* currentInstruction)
1081 {
1082     int dst = currentInstruction[1].u.operand;
1083     int valuesIndex = currentInstruction[2].u.operand;
1084     int size = currentInstruction[3].u.operand;
1085     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1086     callOperation(operationNewArrayWithProfile, dst,
1087         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1088 }
1089
1090 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1091 {
1092     int dst = currentInstruction[1].u.operand;
1093     int sizeIndex = currentInstruction[2].u.operand;
1094 #if USE(JSVALUE64)
1095     emitGetVirtualRegister(sizeIndex, regT0);
1096     callOperation(operationNewArrayWithSizeAndProfile, dst,
1097         currentInstruction[3].u.arrayAllocationProfile, regT0);
1098 #else
1099     emitLoad(sizeIndex, regT1, regT0);
1100     callOperation(operationNewArrayWithSizeAndProfile, dst,
1101         currentInstruction[3].u.arrayAllocationProfile, JSValueRegs(regT1, regT0));
1102 #endif
1103 }
1104
1105 #if USE(JSVALUE64)
1106 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1107 {
1108     int dst = currentInstruction[1].u.operand;
1109     int base = currentInstruction[2].u.operand;
1110     int enumerator = currentInstruction[4].u.operand;
1111
1112     emitGetVirtualRegister(base, regT0);
1113     emitGetVirtualRegister(enumerator, regT1);
1114     emitJumpSlowCaseIfNotJSCell(regT0, base);
1115
1116     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1117     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1118     
1119     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1120     emitPutVirtualRegister(dst);
1121 }
1122
1123 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1124 {
1125     Instruction* currentInstruction = &m_codeBlock->instructions()[byValInfo->bytecodeIndex];
1126     
1127     PatchableJump badType;
1128     
1129     // FIXME: Add support for other types like TypedArrays and Arguments.
1130     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1131     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1132     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1133     Jump done = jump();
1134
1135     LinkBuffer patchBuffer(*this, m_codeBlock);
1136     
1137     patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1138     patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1139     
1140     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1141     
1142     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1143         m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
1144         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1145     
1146     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
1147     MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationHasIndexedPropertyGeneric));
1148 }
1149
1150 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1151 {
1152     int dst = currentInstruction[1].u.operand;
1153     int base = currentInstruction[2].u.operand;
1154     int property = currentInstruction[3].u.operand;
1155     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1156     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1157     
1158     emitGetVirtualRegisters(base, regT0, property, regT1);
1159
1160     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1161     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1162     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1163     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1164     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1165     // extending since it makes it easier to re-tag the value in the slow case.
1166     zeroExtend32ToPtr(regT1, regT1);
1167
1168     emitJumpSlowCaseIfNotJSCell(regT0, base);
1169     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1170     and32(TrustedImm32(IndexingShapeMask), regT2);
1171
1172     JITArrayMode mode = chooseArrayMode(profile);
1173     PatchableJump badType;
1174
1175     // FIXME: Add support for other types like TypedArrays and Arguments.
1176     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1177     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1178     
1179     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1180
1181     addSlowCase(badType);
1182     addSlowCase(slowCases);
1183     
1184     Label done = label();
1185     
1186     emitPutVirtualRegister(dst);
1187
1188     Label nextHotPath = label();
1189     
1190     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1191 }
1192
1193 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1194 {
1195     linkAllSlowCases(iter);
1196
1197     int dst = currentInstruction[1].u.operand;
1198     int base = currentInstruction[2].u.operand;
1199     int property = currentInstruction[3].u.operand;
1200     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1201
1202     Label slowPath = label();
1203     
1204     emitGetVirtualRegister(base, regT0);
1205     emitGetVirtualRegister(property, regT1);
1206     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1207
1208     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1209     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1210     m_byValInstructionIndex++;
1211 }
1212
1213 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1214 {
1215     int dst = currentInstruction[1].u.operand;
1216     int base = currentInstruction[2].u.operand;
1217     int index = currentInstruction[4].u.operand;
1218     int enumerator = currentInstruction[5].u.operand;
1219
1220     // Check that base is a cell
1221     emitGetVirtualRegister(base, regT0);
1222     emitJumpSlowCaseIfNotJSCell(regT0, base);
1223
1224     // Check the structure
1225     emitGetVirtualRegister(enumerator, regT2);
1226     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1227     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1228
1229     // Compute the offset
1230     emitGetVirtualRegister(index, regT1);
1231     // If index is less than the enumerator's cached inline storage, then it's an inline access
1232     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1233     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1234     signExtend32ToPtr(regT1, regT1);
1235     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1236     
1237     Jump done = jump();
1238
1239     // Otherwise it's out of line
1240     outOfLineAccess.link(this);
1241     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1242     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1243     neg32(regT1);
1244     signExtend32ToPtr(regT1, regT1);
1245     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1246     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1247     
1248     done.link(this);
1249     emitValueProfilingSite();
1250     emitPutVirtualRegister(dst, regT0);
1251 }
1252
1253 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1254 {
1255     int dst = currentInstruction[1].u.operand;
1256     int enumerator = currentInstruction[2].u.operand;
1257     int index = currentInstruction[3].u.operand;
1258
1259     emitGetVirtualRegister(index, regT0);
1260     emitGetVirtualRegister(enumerator, regT1);
1261     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1262
1263     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1264
1265     Jump done = jump();
1266     inBounds.link(this);
1267
1268     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1269     signExtend32ToPtr(regT0, regT0);
1270     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1271
1272     done.link(this);
1273     emitPutVirtualRegister(dst);
1274 }
1275
1276 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1277 {
1278     int dst = currentInstruction[1].u.operand;
1279     int enumerator = currentInstruction[2].u.operand;
1280     int index = currentInstruction[3].u.operand;
1281
1282     emitGetVirtualRegister(index, regT0);
1283     emitGetVirtualRegister(enumerator, regT1);
1284     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1285
1286     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1287
1288     Jump done = jump();
1289     inBounds.link(this);
1290
1291     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1292     signExtend32ToPtr(regT0, regT0);
1293     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1294     
1295     done.link(this);
1296     emitPutVirtualRegister(dst);
1297 }
1298
1299 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1300 {
1301     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1302     int valueToProfile = currentInstruction[1].u.operand;
1303
1304     emitGetVirtualRegister(valueToProfile, regT0);
1305
1306     JumpList jumpToEnd;
1307
1308     jumpToEnd.append(branchIfEmpty(regT0));
1309
1310     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1311     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1312     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1313         jumpToEnd.append(branchIfUndefined(regT0));
1314     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1315         jumpToEnd.append(branchIfNull(regT0));
1316     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean)
1317         jumpToEnd.append(branchIfBoolean(regT0, regT1));
1318     else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1319         jumpToEnd.append(branchIfInt32(regT0));
1320     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1321         jumpToEnd.append(branchIfNumber(regT0));
1322     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1323         Jump isNotCell = branchIfNotCell(regT0);
1324         jumpToEnd.append(branchIfString(regT0));
1325         isNotCell.link(this);
1326     }
1327
1328     // Load the type profiling log into T2.
1329     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1330     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1331     // Load the next log entry into T1.
1332     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1333
1334     // Store the JSValue onto the log entry.
1335     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1336
1337     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1338     Jump notCell = branchIfNotCell(regT0);
1339     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1340     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1341     Jump skipIsCell = jump();
1342     notCell.link(this);
1343     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1344     skipIsCell.link(this);
1345
1346     // Store the typeLocation on the log entry.
1347     move(TrustedImmPtr(cachedTypeLocation), regT0);
1348     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1349
1350     // Increment the current log entry.
1351     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1352     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1353     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1354     // Clear the log if we're at the end of the log.
1355     callOperation(operationProcessTypeProfilerLog);
1356     skipClearLog.link(this);
1357
1358     jumpToEnd.link(this);
1359 }
1360
1361 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1362 {
1363     updateTopCallFrame();
1364     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1365     GPRReg shadowPacketReg = regT0;
1366     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1367     GPRReg scratch2Reg = regT2;
1368     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1369     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1370     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1371 }
1372
1373 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1374 {
1375     updateTopCallFrame();
1376     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1377     GPRReg shadowPacketReg = regT0;
1378     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1379     GPRReg scratch2Reg = regT2;
1380     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1381     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1382     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1383     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1384 }
1385
1386 #endif // USE(JSVALUE64)
1387
1388 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1389 {
1390     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1391 #if USE(JSVALUE64)
1392     basicBlockLocation->emitExecuteCode(*this);
1393 #else
1394     basicBlockLocation->emitExecuteCode(*this, regT0);
1395 #endif
1396 }
1397
1398 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1399 {
1400     int dst = currentInstruction[1].u.operand;
1401     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1402     sub32(TrustedImm32(1), regT0);
1403     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1404     boxInt32(regT0, result);
1405     emitPutVirtualRegister(dst, result);
1406 }
1407
1408 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1409 {
1410     int dst = currentInstruction[1].u.operand;
1411     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1412     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1413     sub32(TrustedImm32(1), regT0);
1414     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1415     sub32(Imm32(numParamsToSkip), regT0);
1416 #if USE(JSVALUE64)
1417     boxInt32(regT0, JSValueRegs(regT0));
1418 #endif
1419     Jump done = jump();
1420
1421     zeroLength.link(this);
1422 #if USE(JSVALUE64)
1423     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1424 #else
1425     move(TrustedImm32(0), regT0);
1426 #endif
1427
1428     done.link(this);
1429 #if USE(JSVALUE64)
1430     emitPutVirtualRegister(dst, regT0);
1431 #else
1432     move(TrustedImm32(JSValue::Int32Tag), regT1);
1433     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1434 #endif
1435 }
1436
1437 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1438 {
1439     int dst = currentInstruction[1].u.operand;
1440     int index = currentInstruction[2].u.operand;
1441 #if USE(JSVALUE64)
1442     JSValueRegs resultRegs(regT0);
1443 #else
1444     JSValueRegs resultRegs(regT1, regT0);
1445 #endif
1446
1447     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1448     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1449     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1450     Jump done = jump();
1451
1452     argumentOutOfBounds.link(this);
1453     moveValue(jsUndefined(), resultRegs);
1454
1455     done.link(this);
1456     emitValueProfilingSite();
1457     emitPutVirtualRegister(dst, resultRegs);
1458 }
1459
1460 } // namespace JSC
1461
1462 #endif // ENABLE(JIT)