AssemblyHelpers should not have a VM field
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "Exception.h"
33 #include "Heap.h"
34 #include "Interpreter.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameEnumerator.h"
40 #include "LinkBuffer.h"
41 #include "MaxFrameExtentForSlowPathCall.h"
42 #include "SlowPathCall.h"
43 #include "TypeLocation.h"
44 #include "TypeProfilerLog.h"
45 #include "VirtualRegister.h"
46 #include "Watchdog.h"
47
48 namespace JSC {
49
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
53 {
54     return vm->getCTIStub(nativeCallGenerator);
55 }
56
57 void JIT::emit_op_mov(Instruction* currentInstruction)
58 {
59     int dst = currentInstruction[1].u.operand;
60     int src = currentInstruction[2].u.operand;
61
62     emitGetVirtualRegister(src, regT0);
63     emitPutVirtualRegister(dst);
64 }
65
66
67 void JIT::emit_op_end(Instruction* currentInstruction)
68 {
69     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
70     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
71     emitRestoreCalleeSaves();
72     emitFunctionEpilogue();
73     ret();
74 }
75
76 void JIT::emit_op_jmp(Instruction* currentInstruction)
77 {
78     unsigned target = currentInstruction[1].u.operand;
79     addJump(jump(), target);
80 }
81
82 void JIT::emit_op_new_object(Instruction* currentInstruction)
83 {
84     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
85     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
86     MarkedAllocator* allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorFor(allocationSize);
87
88     RegisterID resultReg = regT0;
89     RegisterID allocatorReg = regT1;
90     RegisterID scratchReg = regT2;
91
92     move(TrustedImmPtr(allocator), allocatorReg);
93     if (allocator)
94         addSlowCase(Jump());
95     JumpList slowCases;
96     emitAllocateJSObject(resultReg, allocator, allocatorReg, TrustedImmPtr(structure), TrustedImmPtr(0), scratchReg, slowCases);
97     emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
98     addSlowCase(slowCases);
99     emitPutVirtualRegister(currentInstruction[1].u.operand);
100 }
101
102 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
103 {
104     linkSlowCase(iter);
105     linkSlowCase(iter);
106     int dst = currentInstruction[1].u.operand;
107     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
108     callOperation(operationNewObject, structure);
109     emitStoreCell(dst, returnValueGPR);
110 }
111
112 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
113 {
114     int dst = currentInstruction[1].u.operand;
115     int constructor = currentInstruction[2].u.operand;
116     int hasInstanceValue = currentInstruction[3].u.operand;
117
118     emitGetVirtualRegister(hasInstanceValue, regT0);
119
120     // We don't jump if we know what Symbol.hasInstance would do.
121     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
122
123     emitGetVirtualRegister(constructor, regT0);
124
125     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
126     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
127     emitTagBool(regT0);
128     Jump done = jump();
129
130     customhasInstanceValue.link(this);
131     move(TrustedImm32(ValueTrue), regT0);
132
133     done.link(this);
134     emitPutVirtualRegister(dst);
135 }
136
137 void JIT::emit_op_instanceof(Instruction* currentInstruction)
138 {
139     int dst = currentInstruction[1].u.operand;
140     int value = currentInstruction[2].u.operand;
141     int proto = currentInstruction[3].u.operand;
142
143     // Load the operands (baseVal, proto, and value respectively) into registers.
144     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
145     emitGetVirtualRegister(value, regT2);
146     emitGetVirtualRegister(proto, regT1);
147
148     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
149     emitJumpSlowCaseIfNotJSCell(regT2, value);
150     emitJumpSlowCaseIfNotJSCell(regT1, proto);
151
152     // Check that prototype is an object
153     addSlowCase(emitJumpIfCellNotObject(regT1));
154     
155     // Optimistically load the result true, and start looping.
156     // Initially, regT1 still contains proto and regT2 still contains value.
157     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
158     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
159     Label loop(this);
160
161     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
162
163     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
164     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
165     emitLoadStructure(*vm(), regT2, regT2, regT3);
166     load64(Address(regT2, Structure::prototypeOffset()), regT2);
167     Jump isInstance = branchPtr(Equal, regT2, regT1);
168     emitJumpIfJSCell(regT2).linkTo(loop, this);
169
170     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
171     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
172
173     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
174     isInstance.link(this);
175     emitPutVirtualRegister(dst);
176 }
177
178 void JIT::emit_op_instanceof_custom(Instruction*)
179 {
180     // This always goes to slow path since we expect it to be rare.
181     addSlowCase(jump());
182 }
183     
184 void JIT::emit_op_is_empty(Instruction* currentInstruction)
185 {
186     int dst = currentInstruction[1].u.operand;
187     int value = currentInstruction[2].u.operand;
188
189     emitGetVirtualRegister(value, regT0);
190     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
191
192     emitTagBool(regT0);
193     emitPutVirtualRegister(dst);
194 }
195
196 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
197 {
198     int dst = currentInstruction[1].u.operand;
199     int value = currentInstruction[2].u.operand;
200     
201     emitGetVirtualRegister(value, regT0);
202     Jump isCell = emitJumpIfJSCell(regT0);
203
204     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
205     Jump done = jump();
206     
207     isCell.link(this);
208     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
209     move(TrustedImm32(0), regT0);
210     Jump notMasqueradesAsUndefined = jump();
211
212     isMasqueradesAsUndefined.link(this);
213     emitLoadStructure(*vm(), regT0, regT1, regT2);
214     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
215     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
216     comparePtr(Equal, regT0, regT1, regT0);
217
218     notMasqueradesAsUndefined.link(this);
219     done.link(this);
220     emitTagBool(regT0);
221     emitPutVirtualRegister(dst);
222 }
223
224 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
225 {
226     int dst = currentInstruction[1].u.operand;
227     int value = currentInstruction[2].u.operand;
228     
229     emitGetVirtualRegister(value, regT0);
230     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
231     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
232     emitTagBool(regT0);
233     emitPutVirtualRegister(dst);
234 }
235
236 void JIT::emit_op_is_number(Instruction* currentInstruction)
237 {
238     int dst = currentInstruction[1].u.operand;
239     int value = currentInstruction[2].u.operand;
240     
241     emitGetVirtualRegister(value, regT0);
242     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
243     emitTagBool(regT0);
244     emitPutVirtualRegister(dst);
245 }
246
247 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
248 {
249     int dst = currentInstruction[1].u.operand;
250     int value = currentInstruction[2].u.operand;
251     int type = currentInstruction[3].u.operand;
252
253     emitGetVirtualRegister(value, regT0);
254     Jump isNotCell = emitJumpIfNotJSCell(regT0);
255
256     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
257     emitTagBool(regT0);
258     Jump done = jump();
259
260     isNotCell.link(this);
261     move(TrustedImm32(ValueFalse), regT0);
262
263     done.link(this);
264     emitPutVirtualRegister(dst);
265 }
266
267 void JIT::emit_op_is_object(Instruction* currentInstruction)
268 {
269     int dst = currentInstruction[1].u.operand;
270     int value = currentInstruction[2].u.operand;
271
272     emitGetVirtualRegister(value, regT0);
273     Jump isNotCell = emitJumpIfNotJSCell(regT0);
274
275     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
276     emitTagBool(regT0);
277     Jump done = jump();
278
279     isNotCell.link(this);
280     move(TrustedImm32(ValueFalse), regT0);
281
282     done.link(this);
283     emitPutVirtualRegister(dst);
284 }
285
286 void JIT::emit_op_ret(Instruction* currentInstruction)
287 {
288     ASSERT(callFrameRegister != regT1);
289     ASSERT(regT1 != returnValueGPR);
290     ASSERT(returnValueGPR != callFrameRegister);
291
292     // Return the result in %eax.
293     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
294
295     checkStackPointerAlignment();
296     emitRestoreCalleeSaves();
297     emitFunctionEpilogue();
298     ret();
299 }
300
301 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
302 {
303     int dst = currentInstruction[1].u.operand;
304     int src = currentInstruction[2].u.operand;
305
306     emitGetVirtualRegister(src, regT0);
307     
308     Jump isImm = emitJumpIfNotJSCell(regT0);
309     addSlowCase(emitJumpIfCellObject(regT0));
310     isImm.link(this);
311
312     if (dst != src)
313         emitPutVirtualRegister(dst);
314
315 }
316
317 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
318 {
319     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
320     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
321     callOperation(operationSetFunctionName, regT0, regT1);
322 }
323
324 void JIT::emit_op_strcat(Instruction* currentInstruction)
325 {
326     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
327     slowPathCall.call();
328 }
329
330 void JIT::emit_op_not(Instruction* currentInstruction)
331 {
332     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
333
334     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
335     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
336     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
337     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
338     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
339     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
340
341     emitPutVirtualRegister(currentInstruction[1].u.operand);
342 }
343
344 void JIT::emit_op_jfalse(Instruction* currentInstruction)
345 {
346     unsigned target = currentInstruction[2].u.operand;
347
348     GPRReg value = regT0;
349     GPRReg result = regT1;
350     GPRReg scratch = regT2;
351     bool shouldCheckMasqueradesAsUndefined = true;
352
353     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
354     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
355
356     addJump(branchTest32(Zero, result), target);
357 }
358
359 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
360 {
361     int src = currentInstruction[1].u.operand;
362     unsigned target = currentInstruction[2].u.operand;
363
364     emitGetVirtualRegister(src, regT0);
365     Jump isImmediate = emitJumpIfNotJSCell(regT0);
366
367     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
368     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
369     emitLoadStructure(*vm(), regT0, regT2, regT1);
370     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
371     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
372     Jump masqueradesGlobalObjectIsForeign = jump();
373
374     // Now handle the immediate cases - undefined & null
375     isImmediate.link(this);
376     and64(TrustedImm32(~TagBitUndefined), regT0);
377     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
378
379     isNotMasqueradesAsUndefined.link(this);
380     masqueradesGlobalObjectIsForeign.link(this);
381 };
382 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
383 {
384     int src = currentInstruction[1].u.operand;
385     unsigned target = currentInstruction[2].u.operand;
386
387     emitGetVirtualRegister(src, regT0);
388     Jump isImmediate = emitJumpIfNotJSCell(regT0);
389
390     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
391     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
392     emitLoadStructure(*vm(), regT0, regT2, regT1);
393     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
394     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
395     Jump wasNotImmediate = jump();
396
397     // Now handle the immediate cases - undefined & null
398     isImmediate.link(this);
399     and64(TrustedImm32(~TagBitUndefined), regT0);
400     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
401
402     wasNotImmediate.link(this);
403 }
404
405 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
406 {
407     int src = currentInstruction[1].u.operand;
408     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
409     unsigned target = currentInstruction[3].u.operand;
410     
411     emitGetVirtualRegister(src, regT0);
412     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
413     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
414     addJump(jump(), target);
415     equal.link(this);
416 }
417
418 void JIT::emit_op_eq(Instruction* currentInstruction)
419 {
420     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
421     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
422     compare32(Equal, regT1, regT0, regT0);
423     emitTagBool(regT0);
424     emitPutVirtualRegister(currentInstruction[1].u.operand);
425 }
426
427 void JIT::emit_op_jtrue(Instruction* currentInstruction)
428 {
429     unsigned target = currentInstruction[2].u.operand;
430
431     GPRReg value = regT0;
432     GPRReg result = regT1;
433     GPRReg scratch = regT2;
434     bool shouldCheckMasqueradesAsUndefined = true;
435     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
436     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
437     addJump(branchTest32(NonZero, result), target);
438 }
439
440 void JIT::emit_op_neq(Instruction* currentInstruction)
441 {
442     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
443     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
444     compare32(NotEqual, regT1, regT0, regT0);
445     emitTagBool(regT0);
446
447     emitPutVirtualRegister(currentInstruction[1].u.operand);
448
449 }
450
451 void JIT::emit_op_throw(Instruction* currentInstruction)
452 {
453     ASSERT(regT0 == returnValueGPR);
454     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
455     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
456     callOperationNoExceptionCheck(operationThrow, regT0);
457     jumpToExceptionHandler(*vm());
458 }
459
460 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
461 {
462     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
463     slowPathCall.call();
464 }
465
466 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
467 {
468     int dst = currentInstruction[1].u.operand;
469     int src1 = currentInstruction[2].u.operand;
470     int src2 = currentInstruction[3].u.operand;
471
472     emitGetVirtualRegisters(src1, regT0, src2, regT1);
473     
474     // Jump slow if both are cells (to cover strings).
475     move(regT0, regT2);
476     or64(regT1, regT2);
477     addSlowCase(emitJumpIfJSCell(regT2));
478     
479     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
480     // if it's a double.
481     Jump leftOK = emitJumpIfInt(regT0);
482     addSlowCase(emitJumpIfNumber(regT0));
483     leftOK.link(this);
484     Jump rightOK = emitJumpIfInt(regT1);
485     addSlowCase(emitJumpIfNumber(regT1));
486     rightOK.link(this);
487
488     if (type == OpStrictEq)
489         compare64(Equal, regT1, regT0, regT0);
490     else
491         compare64(NotEqual, regT1, regT0, regT0);
492     emitTagBool(regT0);
493
494     emitPutVirtualRegister(dst);
495 }
496
497 void JIT::emit_op_stricteq(Instruction* currentInstruction)
498 {
499     compileOpStrictEq(currentInstruction, OpStrictEq);
500 }
501
502 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
503 {
504     compileOpStrictEq(currentInstruction, OpNStrictEq);
505 }
506
507 void JIT::emit_op_to_number(Instruction* currentInstruction)
508 {
509     int dstVReg = currentInstruction[1].u.operand;
510     int srcVReg = currentInstruction[2].u.operand;
511     emitGetVirtualRegister(srcVReg, regT0);
512     
513     addSlowCase(emitJumpIfNotNumber(regT0));
514
515     emitValueProfilingSite();
516     if (srcVReg != dstVReg)
517         emitPutVirtualRegister(dstVReg);
518 }
519
520 void JIT::emit_op_to_string(Instruction* currentInstruction)
521 {
522     int srcVReg = currentInstruction[2].u.operand;
523     emitGetVirtualRegister(srcVReg, regT0);
524
525     addSlowCase(emitJumpIfNotJSCell(regT0));
526     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
527
528     emitPutVirtualRegister(currentInstruction[1].u.operand);
529 }
530
531 void JIT::emit_op_catch(Instruction* currentInstruction)
532 {
533     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm());
534
535     move(TrustedImmPtr(m_vm), regT3);
536     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
537     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
538
539     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
540
541     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
542     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
543     jumpToExceptionHandler(*vm());
544     isCatchableException.link(this);
545
546     move(TrustedImmPtr(m_vm), regT3);
547     load64(Address(regT3, VM::exceptionOffset()), regT0);
548     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
549     emitPutVirtualRegister(currentInstruction[1].u.operand);
550
551     load64(Address(regT0, Exception::valueOffset()), regT0);
552     emitPutVirtualRegister(currentInstruction[2].u.operand);
553 }
554
555 void JIT::emit_op_assert(Instruction* currentInstruction)
556 {
557     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
558     slowPathCall.call();
559 }
560
561 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
562 {
563     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
564     slowPathCall.call();
565 }
566
567 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
568 {
569     int currentScope = currentInstruction[2].u.operand;
570     emitGetVirtualRegister(currentScope, regT0);
571     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
572     emitStoreCell(currentInstruction[1].u.operand, regT0);
573 }
574
575 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
576 {
577     size_t tableIndex = currentInstruction[1].u.operand;
578     unsigned defaultOffset = currentInstruction[2].u.operand;
579     unsigned scrutinee = currentInstruction[3].u.operand;
580
581     // create jump table for switch destinations, track this switch statement.
582     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
583     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
584     jumpTable->ensureCTITable();
585
586     emitGetVirtualRegister(scrutinee, regT0);
587     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
588     jump(returnValueGPR);
589 }
590
591 void JIT::emit_op_switch_char(Instruction* currentInstruction)
592 {
593     size_t tableIndex = currentInstruction[1].u.operand;
594     unsigned defaultOffset = currentInstruction[2].u.operand;
595     unsigned scrutinee = currentInstruction[3].u.operand;
596
597     // create jump table for switch destinations, track this switch statement.
598     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
599     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
600     jumpTable->ensureCTITable();
601
602     emitGetVirtualRegister(scrutinee, regT0);
603     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
604     jump(returnValueGPR);
605 }
606
607 void JIT::emit_op_switch_string(Instruction* currentInstruction)
608 {
609     size_t tableIndex = currentInstruction[1].u.operand;
610     unsigned defaultOffset = currentInstruction[2].u.operand;
611     unsigned scrutinee = currentInstruction[3].u.operand;
612
613     // create jump table for switch destinations, track this switch statement.
614     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
615     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
616
617     emitGetVirtualRegister(scrutinee, regT0);
618     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
619     jump(returnValueGPR);
620 }
621
622 void JIT::emit_op_debug(Instruction* currentInstruction)
623 {
624     load32(codeBlock()->debuggerRequestsAddress(), regT0);
625     Jump noDebuggerRequests = branchTest32(Zero, regT0);
626     callOperation(operationDebug, currentInstruction[1].u.operand);
627     noDebuggerRequests.link(this);
628 }
629
630 void JIT::emit_op_eq_null(Instruction* currentInstruction)
631 {
632     int dst = currentInstruction[1].u.operand;
633     int src1 = currentInstruction[2].u.operand;
634
635     emitGetVirtualRegister(src1, regT0);
636     Jump isImmediate = emitJumpIfNotJSCell(regT0);
637
638     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
639     move(TrustedImm32(0), regT0);
640     Jump wasNotMasqueradesAsUndefined = jump();
641
642     isMasqueradesAsUndefined.link(this);
643     emitLoadStructure(*vm(), regT0, regT2, regT1);
644     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
645     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
646     comparePtr(Equal, regT0, regT2, regT0);
647     Jump wasNotImmediate = jump();
648
649     isImmediate.link(this);
650
651     and64(TrustedImm32(~TagBitUndefined), regT0);
652     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
653
654     wasNotImmediate.link(this);
655     wasNotMasqueradesAsUndefined.link(this);
656
657     emitTagBool(regT0);
658     emitPutVirtualRegister(dst);
659
660 }
661
662 void JIT::emit_op_neq_null(Instruction* currentInstruction)
663 {
664     int dst = currentInstruction[1].u.operand;
665     int src1 = currentInstruction[2].u.operand;
666
667     emitGetVirtualRegister(src1, regT0);
668     Jump isImmediate = emitJumpIfNotJSCell(regT0);
669
670     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
671     move(TrustedImm32(1), regT0);
672     Jump wasNotMasqueradesAsUndefined = jump();
673
674     isMasqueradesAsUndefined.link(this);
675     emitLoadStructure(*vm(), regT0, regT2, regT1);
676     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
677     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
678     comparePtr(NotEqual, regT0, regT2, regT0);
679     Jump wasNotImmediate = jump();
680
681     isImmediate.link(this);
682
683     and64(TrustedImm32(~TagBitUndefined), regT0);
684     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
685
686     wasNotImmediate.link(this);
687     wasNotMasqueradesAsUndefined.link(this);
688
689     emitTagBool(regT0);
690     emitPutVirtualRegister(dst);
691 }
692
693 void JIT::emit_op_enter(Instruction*)
694 {
695     // Even though CTI doesn't use them, we initialize our constant
696     // registers to zap stale pointers, to avoid unnecessarily prolonging
697     // object lifetime and increasing GC pressure.
698     size_t count = m_codeBlock->m_numVars;
699     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
700         emitInitRegister(virtualRegisterForLocal(j).offset());
701
702     emitWriteBarrier(m_codeBlock);
703
704     emitEnterOptimizationCheck();
705 }
706
707 void JIT::emit_op_get_scope(Instruction* currentInstruction)
708 {
709     int dst = currentInstruction[1].u.operand;
710     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
711     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
712     emitStoreCell(dst, regT0);
713 }
714
715 void JIT::emit_op_to_this(Instruction* currentInstruction)
716 {
717     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
718     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
719
720     emitJumpSlowCaseIfNotJSCell(regT1);
721
722     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
723     loadPtr(cachedStructure, regT2);
724     addSlowCase(branchTestPtr(Zero, regT2));
725     load32(Address(regT2, Structure::structureIDOffset()), regT2);
726     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
727 }
728
729 void JIT::emit_op_create_this(Instruction* currentInstruction)
730 {
731     int callee = currentInstruction[2].u.operand;
732     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
733     RegisterID calleeReg = regT0;
734     RegisterID rareDataReg = regT4;
735     RegisterID resultReg = regT0;
736     RegisterID allocatorReg = regT1;
737     RegisterID structureReg = regT2;
738     RegisterID cachedFunctionReg = regT4;
739     RegisterID scratchReg = regT3;
740
741     emitGetVirtualRegister(callee, calleeReg);
742     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
743     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
744     addSlowCase(branchTestPtr(Zero, rareDataReg));
745     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
746     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
747     addSlowCase(branchTestPtr(Zero, allocatorReg));
748
749     loadPtr(cachedFunction, cachedFunctionReg);
750     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
751     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
752     hasSeenMultipleCallees.link(this);
753
754     JumpList slowCases;
755     emitAllocateJSObject(resultReg, nullptr, allocatorReg, structureReg, TrustedImmPtr(0), scratchReg, slowCases);
756     emitGetVirtualRegister(callee, scratchReg);
757     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
758     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
759     emitInitializeInlineStorage(resultReg, scratchReg);
760     addSlowCase(slowCases);
761     emitPutVirtualRegister(currentInstruction[1].u.operand);
762 }
763
764 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
765 {
766     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
767     linkSlowCase(iter); // doesn't have rare data
768     linkSlowCase(iter); // doesn't have an allocation profile
769     linkSlowCase(iter); // allocation failed (no allocator)
770     linkSlowCase(iter); // allocation failed (allocator empty)
771     linkSlowCase(iter); // cached function didn't match
772
773     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
774     slowPathCall.call();
775 }
776
777 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
778 {
779     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
780     addSlowCase(branchTest64(Zero, regT0));
781 }
782
783 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
784 {
785     linkSlowCase(iter);
786     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
787     slowPathCall.call();
788 }
789
790
791 // Slow cases
792
793 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
794 {
795     linkSlowCase(iter);
796     linkSlowCase(iter);
797     linkSlowCase(iter);
798     linkSlowCase(iter);
799
800     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
801     slowPathCall.call();
802 }
803
804 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
805 {
806     linkSlowCase(iter);
807
808     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
809     slowPathCall.call();
810 }
811
812 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813 {
814     linkSlowCase(iter);
815     
816     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
817     slowPathCall.call();
818 }
819
820 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
821 {
822     linkSlowCase(iter);
823     callOperation(operationCompareEq, regT0, regT1);
824     emitTagBool(returnValueGPR);
825     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
826 }
827
828 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
829 {
830     linkSlowCase(iter);
831     callOperation(operationCompareEq, regT0, regT1);
832     xor32(TrustedImm32(0x1), regT0);
833     emitTagBool(returnValueGPR);
834     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
835 }
836
837 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
838 {
839     linkSlowCase(iter);
840     linkSlowCase(iter);
841     linkSlowCase(iter);
842     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
843     slowPathCall.call();
844 }
845
846 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
847 {
848     linkSlowCase(iter);
849     linkSlowCase(iter);
850     linkSlowCase(iter);
851     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
852     slowPathCall.call();
853 }
854
855 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
856 {
857     int dst = currentInstruction[1].u.operand;
858     int value = currentInstruction[2].u.operand;
859     int proto = currentInstruction[3].u.operand;
860
861     linkSlowCaseIfNotJSCell(iter, value);
862     linkSlowCaseIfNotJSCell(iter, proto);
863     linkSlowCase(iter);
864     linkSlowCase(iter);
865     emitGetVirtualRegister(value, regT0);
866     emitGetVirtualRegister(proto, regT1);
867     callOperation(operationInstanceOf, dst, regT0, regT1);
868 }
869
870 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
871 {
872     int dst = currentInstruction[1].u.operand;
873     int value = currentInstruction[2].u.operand;
874     int constructor = currentInstruction[3].u.operand;
875     int hasInstanceValue = currentInstruction[4].u.operand;
876
877     linkSlowCase(iter);
878     emitGetVirtualRegister(value, regT0);
879     emitGetVirtualRegister(constructor, regT1);
880     emitGetVirtualRegister(hasInstanceValue, regT2);
881     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
882     emitTagBool(returnValueGPR);
883     emitPutVirtualRegister(dst, returnValueGPR);
884 }
885
886 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
887 {
888     linkSlowCase(iter);
889
890     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
891     slowPathCall.call();
892 }
893
894 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 {
896     linkSlowCase(iter); // Not JSCell.
897     linkSlowCase(iter); // Not JSString.
898
899     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
900     slowPathCall.call();
901 }
902
903 #endif // USE(JSVALUE64)
904
905 void JIT::emit_op_loop_hint(Instruction*)
906 {
907     // Emit the JIT optimization check: 
908     if (canBeOptimized()) {
909         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
910             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
911     }
912 }
913
914 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
915 {
916 #if ENABLE(DFG_JIT)
917     // Emit the slow path for the JIT optimization check:
918     if (canBeOptimized()) {
919         linkSlowCase(iter);
920
921         copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(*vm());
922
923         callOperation(operationOptimize, m_bytecodeOffset);
924         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
925         if (!ASSERT_DISABLED) {
926             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
927             abortWithReason(JITUnreasonableLoopHintJumpTarget);
928             ok.link(this);
929         }
930         jump(returnValueGPR);
931         noOptimizedEntry.link(this);
932
933         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
934     }
935 #else
936     UNUSED_PARAM(iter);
937 #endif
938 }
939
940 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
941 {
942     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_static_error);
943     slowPathCall.call();
944 }
945
946 void JIT::emit_op_check_traps(Instruction*)
947 {
948     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
949 }
950
951 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
952 {
953     linkSlowCase(iter);
954     callOperation(operationHandleTraps);
955 }
956
957 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
958 {
959     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
960 }
961
962 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
963 {
964     Jump lazyJump;
965     int dst = currentInstruction[1].u.operand;
966
967 #if USE(JSVALUE64)
968     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
969 #else
970     emitLoadPayload(currentInstruction[2].u.operand, regT0);
971 #endif
972     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
973
974     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
975     if (opcodeID == op_new_func)
976         callOperation(operationNewFunction, dst, regT0, funcExec);
977     else if (opcodeID == op_new_generator_func)
978         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
979     else {
980         ASSERT(opcodeID == op_new_async_func);
981         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
982     }
983 }
984
985 void JIT::emit_op_new_func(Instruction* currentInstruction)
986 {
987     emitNewFuncCommon(currentInstruction);
988 }
989
990 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
991 {
992     emitNewFuncCommon(currentInstruction);
993 }
994
995 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
996 {
997     emitNewFuncCommon(currentInstruction);
998 }
999
1000 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1001 {
1002     Jump notUndefinedScope;
1003     int dst = currentInstruction[1].u.operand;
1004 #if USE(JSVALUE64)
1005     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1006     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
1007     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
1008 #else
1009     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1010     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1011     emitStore(dst, jsUndefined());
1012 #endif
1013     Jump done = jump();
1014     notUndefinedScope.link(this);
1015         
1016     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1017     OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
1018
1019     if (opcodeID == op_new_func_exp)
1020         callOperation(operationNewFunction, dst, regT0, function);
1021     else if (opcodeID == op_new_generator_func_exp)
1022         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1023     else {
1024         ASSERT(opcodeID == op_new_async_func_exp);
1025         callOperation(operationNewAsyncFunction, dst, regT0, function);
1026     }
1027
1028     done.link(this);
1029 }
1030
1031 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1032 {
1033     emitNewFuncExprCommon(currentInstruction);
1034 }
1035
1036 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1037 {
1038     emitNewFuncExprCommon(currentInstruction);
1039 }
1040
1041 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1042 {
1043     emitNewFuncExprCommon(currentInstruction);
1044 }
1045
1046 void JIT::emit_op_new_array(Instruction* currentInstruction)
1047 {
1048     int dst = currentInstruction[1].u.operand;
1049     int valuesIndex = currentInstruction[2].u.operand;
1050     int size = currentInstruction[3].u.operand;
1051     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1052     callOperation(operationNewArrayWithProfile, dst,
1053         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1054 }
1055
1056 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1057 {
1058     int dst = currentInstruction[1].u.operand;
1059     int sizeIndex = currentInstruction[2].u.operand;
1060 #if USE(JSVALUE64)
1061     emitGetVirtualRegister(sizeIndex, regT0);
1062     callOperation(operationNewArrayWithSizeAndProfile, dst,
1063         currentInstruction[3].u.arrayAllocationProfile, regT0);
1064 #else
1065     emitLoad(sizeIndex, regT1, regT0);
1066     callOperation(operationNewArrayWithSizeAndProfile, dst,
1067         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1068 #endif
1069 }
1070
1071 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1072 {
1073     int dst = currentInstruction[1].u.operand;
1074     int valuesIndex = currentInstruction[2].u.operand;
1075     int size = currentInstruction[3].u.operand;
1076     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1077     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1078 }
1079
1080 void JIT::emit_op_new_array_with_spread(Instruction* currentInstruction)
1081 {
1082     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_array_with_spread);
1083     slowPathCall.call();
1084 }
1085
1086 void JIT::emit_op_spread(Instruction* currentInstruction)
1087 {
1088     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_spread);
1089     slowPathCall.call();
1090 }
1091
1092 #if USE(JSVALUE64)
1093 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1094 {
1095     int dst = currentInstruction[1].u.operand;
1096     int base = currentInstruction[2].u.operand;
1097     int enumerator = currentInstruction[4].u.operand;
1098
1099     emitGetVirtualRegister(base, regT0);
1100     emitGetVirtualRegister(enumerator, regT1);
1101     emitJumpSlowCaseIfNotJSCell(regT0, base);
1102
1103     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1104     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1105     
1106     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1107     emitPutVirtualRegister(dst);
1108 }
1109
1110 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1111 {
1112     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1113     
1114     PatchableJump badType;
1115     
1116     // FIXME: Add support for other types like TypedArrays and Arguments.
1117     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1118     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1119     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1120     Jump done = jump();
1121
1122     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1123     
1124     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1125     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1126     
1127     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1128     
1129     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1130         m_codeBlock, patchBuffer,
1131         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1132     
1133     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1134     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1135 }
1136
1137 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1138 {
1139     int dst = currentInstruction[1].u.operand;
1140     int base = currentInstruction[2].u.operand;
1141     int property = currentInstruction[3].u.operand;
1142     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1143     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1144     
1145     emitGetVirtualRegisters(base, regT0, property, regT1);
1146
1147     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1148     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1149     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1150     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1151     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1152     // extending since it makes it easier to re-tag the value in the slow case.
1153     zeroExtend32ToPtr(regT1, regT1);
1154
1155     emitJumpSlowCaseIfNotJSCell(regT0, base);
1156     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1157     and32(TrustedImm32(IndexingShapeMask), regT2);
1158
1159     JITArrayMode mode = chooseArrayMode(profile);
1160     PatchableJump badType;
1161
1162     // FIXME: Add support for other types like TypedArrays and Arguments.
1163     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1164     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1165     
1166     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1167
1168     addSlowCase(badType);
1169     addSlowCase(slowCases);
1170     
1171     Label done = label();
1172     
1173     emitPutVirtualRegister(dst);
1174
1175     Label nextHotPath = label();
1176     
1177     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1178 }
1179
1180 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1181 {
1182     int dst = currentInstruction[1].u.operand;
1183     int base = currentInstruction[2].u.operand;
1184     int property = currentInstruction[3].u.operand;
1185     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1186     
1187     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1188     linkSlowCase(iter); // base array check
1189     linkSlowCase(iter); // vector length check
1190     linkSlowCase(iter); // empty value
1191     
1192     Label slowPath = label();
1193     
1194     emitGetVirtualRegister(base, regT0);
1195     emitGetVirtualRegister(property, regT1);
1196     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1197
1198     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1199     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1200     m_byValInstructionIndex++;
1201 }
1202
1203 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1204 {
1205     int dst = currentInstruction[1].u.operand;
1206     int base = currentInstruction[2].u.operand;
1207     int index = currentInstruction[4].u.operand;
1208     int enumerator = currentInstruction[5].u.operand;
1209
1210     // Check that base is a cell
1211     emitGetVirtualRegister(base, regT0);
1212     emitJumpSlowCaseIfNotJSCell(regT0, base);
1213
1214     // Check the structure
1215     emitGetVirtualRegister(enumerator, regT2);
1216     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1217     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1218
1219     // Compute the offset
1220     emitGetVirtualRegister(index, regT1);
1221     // If index is less than the enumerator's cached inline storage, then it's an inline access
1222     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1223     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1224     signExtend32ToPtr(regT1, regT1);
1225     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1226     
1227     Jump done = jump();
1228
1229     // Otherwise it's out of line
1230     outOfLineAccess.link(this);
1231     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1232     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1233     neg32(regT1);
1234     signExtend32ToPtr(regT1, regT1);
1235     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1236     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1237     
1238     done.link(this);
1239     emitValueProfilingSite();
1240     emitPutVirtualRegister(dst, regT0);
1241 }
1242
1243 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1244 {
1245     int base = currentInstruction[2].u.operand;
1246     linkSlowCaseIfNotJSCell(iter, base);
1247     linkSlowCase(iter);
1248
1249     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1250     slowPathCall.call();
1251 }
1252
1253 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1254 {
1255     int dst = currentInstruction[1].u.operand;
1256     int enumerator = currentInstruction[2].u.operand;
1257     int index = currentInstruction[3].u.operand;
1258
1259     emitGetVirtualRegister(index, regT0);
1260     emitGetVirtualRegister(enumerator, regT1);
1261     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1262
1263     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1264
1265     Jump done = jump();
1266     inBounds.link(this);
1267
1268     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1269     signExtend32ToPtr(regT0, regT0);
1270     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1271
1272     done.link(this);
1273     emitPutVirtualRegister(dst);
1274 }
1275
1276 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1277 {
1278     int dst = currentInstruction[1].u.operand;
1279     int enumerator = currentInstruction[2].u.operand;
1280     int index = currentInstruction[3].u.operand;
1281
1282     emitGetVirtualRegister(index, regT0);
1283     emitGetVirtualRegister(enumerator, regT1);
1284     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1285
1286     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1287
1288     Jump done = jump();
1289     inBounds.link(this);
1290
1291     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1292     signExtend32ToPtr(regT0, regT0);
1293     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1294     
1295     done.link(this);
1296     emitPutVirtualRegister(dst);
1297 }
1298
1299 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1300 {
1301     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1302     int valueToProfile = currentInstruction[1].u.operand;
1303
1304     emitGetVirtualRegister(valueToProfile, regT0);
1305
1306     JumpList jumpToEnd;
1307
1308     jumpToEnd.append(branchTest64(Zero, regT0));
1309
1310     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1311     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1312     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1313         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1314     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1315         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1316     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1317         move(regT0, regT1);
1318         and64(TrustedImm32(~1), regT1);
1319         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1320     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1321         jumpToEnd.append(emitJumpIfInt(regT0));
1322     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1323         jumpToEnd.append(emitJumpIfNumber(regT0));
1324     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1325         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1326         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1327         isNotCell.link(this);
1328     }
1329
1330     // Load the type profiling log into T2.
1331     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1332     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1333     // Load the next log entry into T1.
1334     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1335
1336     // Store the JSValue onto the log entry.
1337     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1338
1339     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1340     Jump notCell = emitJumpIfNotJSCell(regT0);
1341     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1342     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1343     Jump skipIsCell = jump();
1344     notCell.link(this);
1345     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1346     skipIsCell.link(this);
1347
1348     // Store the typeLocation on the log entry.
1349     move(TrustedImmPtr(cachedTypeLocation), regT0);
1350     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1351
1352     // Increment the current log entry.
1353     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1354     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1355     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1356     // Clear the log if we're at the end of the log.
1357     callOperation(operationProcessTypeProfilerLog);
1358     skipClearLog.link(this);
1359
1360     jumpToEnd.link(this);
1361 }
1362
1363 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1364 {
1365     updateTopCallFrame();
1366     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1367     GPRReg shadowPacketReg = regT0;
1368     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1369     GPRReg scratch2Reg = regT2;
1370     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1371     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1372     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1373 }
1374
1375 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1376 {
1377     updateTopCallFrame();
1378     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1379     GPRReg shadowPacketReg = regT0;
1380     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1381     GPRReg scratch2Reg = regT2;
1382     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1383     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1384     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1385     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1386 }
1387
1388 #endif // USE(JSVALUE64)
1389
1390 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1391 {
1392     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1393     slowPathCall.call();
1394 }
1395
1396 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1397 {
1398     linkSlowCase(iter);
1399     linkSlowCase(iter);
1400
1401     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1402     slowPathCall.call();
1403 }
1404
1405 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1406 {
1407     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1408     slowPathCall.call();
1409 }
1410
1411 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1412 {
1413     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1414     slowPathCall.call();
1415 }
1416
1417 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1418 {
1419     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1420     slowPathCall.call();
1421 }
1422
1423 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1424 {
1425     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1426 #if USE(JSVALUE64)
1427     basicBlockLocation->emitExecuteCode(*this);
1428 #else
1429     basicBlockLocation->emitExecuteCode(*this, regT0);
1430 #endif
1431 }
1432
1433 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1434 {
1435     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1436     slowPathCall.call();
1437 }
1438
1439 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1440 {
1441     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1442     slowPathCall.call();
1443 }
1444
1445 void JIT::emit_op_create_cloned_arguments(Instruction* currentInstruction)
1446 {
1447     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_cloned_arguments);
1448     slowPathCall.call();
1449 }
1450
1451 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1452 {
1453     int dst = currentInstruction[1].u.operand;
1454     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1455     sub32(TrustedImm32(1), regT0);
1456     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1457     boxInt32(regT0, result);
1458     emitPutVirtualRegister(dst, result);
1459 }
1460
1461 void JIT::emit_op_create_rest(Instruction* currentInstruction)
1462 {
1463     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_rest);
1464     slowPathCall.call();
1465 }
1466
1467 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1468 {
1469     int dst = currentInstruction[1].u.operand;
1470     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1471     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1472     sub32(TrustedImm32(1), regT0);
1473     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1474     sub32(Imm32(numParamsToSkip), regT0);
1475 #if USE(JSVALUE64)
1476     boxInt32(regT0, JSValueRegs(regT0));
1477 #endif
1478     Jump done = jump();
1479
1480     zeroLength.link(this);
1481 #if USE(JSVALUE64)
1482     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1483 #else
1484     move(TrustedImm32(0), regT0);
1485 #endif
1486
1487     done.link(this);
1488 #if USE(JSVALUE64)
1489     emitPutVirtualRegister(dst, regT0);
1490 #else
1491     move(TrustedImm32(JSValue::Int32Tag), regT1);
1492     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1493 #endif
1494 }
1495
1496 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1497 {
1498     int dst = currentInstruction[1].u.operand;
1499     int index = currentInstruction[2].u.operand;
1500 #if USE(JSVALUE64)
1501     JSValueRegs resultRegs(regT0);
1502 #else
1503     JSValueRegs resultRegs(regT1, regT0);
1504 #endif
1505
1506     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1507     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1508     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1509     Jump done = jump();
1510
1511     argumentOutOfBounds.link(this);
1512     moveValue(jsUndefined(), resultRegs);
1513
1514     done.link(this);
1515     emitValueProfilingSite();
1516     emitPutVirtualRegister(dst, resultRegs);
1517 }
1518
1519 } // namespace JSC
1520
1521 #endif // ENABLE(JIT)