2a7e606ad16bb49ddb72f6f02bb386006cd95155
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "Exception.h"
33 #include "Heap.h"
34 #include "InterpreterInlines.h"
35 #include "JITInlines.h"
36 #include "JSArray.h"
37 #include "JSCell.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameEnumerator.h"
40 #include "LinkBuffer.h"
41 #include "MaxFrameExtentForSlowPathCall.h"
42 #include "SlowPathCall.h"
43 #include "TypeLocation.h"
44 #include "TypeProfilerLog.h"
45 #include "VirtualRegister.h"
46 #include "Watchdog.h"
47
48 namespace JSC {
49
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
53 {
54     return vm->getCTIStub(nativeCallGenerator);
55 }
56
57 void JIT::emit_op_mov(Instruction* currentInstruction)
58 {
59     int dst = currentInstruction[1].u.operand;
60     int src = currentInstruction[2].u.operand;
61
62     emitGetVirtualRegister(src, regT0);
63     emitPutVirtualRegister(dst);
64 }
65
66
67 void JIT::emit_op_end(Instruction* currentInstruction)
68 {
69     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
70     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
71     emitRestoreCalleeSaves();
72     emitFunctionEpilogue();
73     ret();
74 }
75
76 void JIT::emit_op_jmp(Instruction* currentInstruction)
77 {
78     unsigned target = currentInstruction[1].u.operand;
79     addJump(jump(), target);
80 }
81
82 void JIT::emit_op_new_object(Instruction* currentInstruction)
83 {
84     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
85     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
86     MarkedAllocator* allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorFor(allocationSize);
87
88     RegisterID resultReg = regT0;
89     RegisterID allocatorReg = regT1;
90     RegisterID scratchReg = regT2;
91
92     move(TrustedImmPtr(allocator), allocatorReg);
93     if (allocator)
94         addSlowCase(Jump());
95     JumpList slowCases;
96     emitAllocateJSObject(resultReg, allocator, allocatorReg, TrustedImmPtr(structure), TrustedImmPtr(0), scratchReg, slowCases);
97     emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
98     addSlowCase(slowCases);
99     emitPutVirtualRegister(currentInstruction[1].u.operand);
100 }
101
102 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
103 {
104     linkSlowCase(iter);
105     linkSlowCase(iter);
106     int dst = currentInstruction[1].u.operand;
107     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
108     callOperation(operationNewObject, structure);
109     emitStoreCell(dst, returnValueGPR);
110 }
111
112 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
113 {
114     int dst = currentInstruction[1].u.operand;
115     int constructor = currentInstruction[2].u.operand;
116     int hasInstanceValue = currentInstruction[3].u.operand;
117
118     emitGetVirtualRegister(hasInstanceValue, regT0);
119
120     // We don't jump if we know what Symbol.hasInstance would do.
121     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
122
123     emitGetVirtualRegister(constructor, regT0);
124
125     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
126     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
127     emitTagBool(regT0);
128     Jump done = jump();
129
130     customhasInstanceValue.link(this);
131     move(TrustedImm32(ValueTrue), regT0);
132
133     done.link(this);
134     emitPutVirtualRegister(dst);
135 }
136
137 void JIT::emit_op_instanceof(Instruction* currentInstruction)
138 {
139     int dst = currentInstruction[1].u.operand;
140     int value = currentInstruction[2].u.operand;
141     int proto = currentInstruction[3].u.operand;
142
143     // Load the operands (baseVal, proto, and value respectively) into registers.
144     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
145     emitGetVirtualRegister(value, regT2);
146     emitGetVirtualRegister(proto, regT1);
147
148     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
149     emitJumpSlowCaseIfNotJSCell(regT2, value);
150     emitJumpSlowCaseIfNotJSCell(regT1, proto);
151
152     // Check that prototype is an object
153     addSlowCase(emitJumpIfCellNotObject(regT1));
154     
155     // Optimistically load the result true, and start looping.
156     // Initially, regT1 still contains proto and regT2 still contains value.
157     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
158     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
159     Label loop(this);
160
161     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
162
163     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
164     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
165     emitLoadStructure(*vm(), regT2, regT2, regT3);
166     load64(Address(regT2, Structure::prototypeOffset()), regT2);
167     Jump isInstance = branchPtr(Equal, regT2, regT1);
168     emitJumpIfJSCell(regT2).linkTo(loop, this);
169
170     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
171     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
172
173     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
174     isInstance.link(this);
175     emitPutVirtualRegister(dst);
176 }
177
178 void JIT::emit_op_instanceof_custom(Instruction*)
179 {
180     // This always goes to slow path since we expect it to be rare.
181     addSlowCase(jump());
182 }
183     
184 void JIT::emit_op_is_empty(Instruction* currentInstruction)
185 {
186     int dst = currentInstruction[1].u.operand;
187     int value = currentInstruction[2].u.operand;
188
189     emitGetVirtualRegister(value, regT0);
190     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
191
192     emitTagBool(regT0);
193     emitPutVirtualRegister(dst);
194 }
195
196 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
197 {
198     int dst = currentInstruction[1].u.operand;
199     int value = currentInstruction[2].u.operand;
200     
201     emitGetVirtualRegister(value, regT0);
202     Jump isCell = emitJumpIfJSCell(regT0);
203
204     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
205     Jump done = jump();
206     
207     isCell.link(this);
208     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
209     move(TrustedImm32(0), regT0);
210     Jump notMasqueradesAsUndefined = jump();
211
212     isMasqueradesAsUndefined.link(this);
213     emitLoadStructure(*vm(), regT0, regT1, regT2);
214     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
215     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
216     comparePtr(Equal, regT0, regT1, regT0);
217
218     notMasqueradesAsUndefined.link(this);
219     done.link(this);
220     emitTagBool(regT0);
221     emitPutVirtualRegister(dst);
222 }
223
224 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
225 {
226     int dst = currentInstruction[1].u.operand;
227     int value = currentInstruction[2].u.operand;
228     
229     emitGetVirtualRegister(value, regT0);
230     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
231     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
232     emitTagBool(regT0);
233     emitPutVirtualRegister(dst);
234 }
235
236 void JIT::emit_op_is_number(Instruction* currentInstruction)
237 {
238     int dst = currentInstruction[1].u.operand;
239     int value = currentInstruction[2].u.operand;
240     
241     emitGetVirtualRegister(value, regT0);
242     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
243     emitTagBool(regT0);
244     emitPutVirtualRegister(dst);
245 }
246
247 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
248 {
249     int dst = currentInstruction[1].u.operand;
250     int value = currentInstruction[2].u.operand;
251     int type = currentInstruction[3].u.operand;
252
253     emitGetVirtualRegister(value, regT0);
254     Jump isNotCell = emitJumpIfNotJSCell(regT0);
255
256     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
257     emitTagBool(regT0);
258     Jump done = jump();
259
260     isNotCell.link(this);
261     move(TrustedImm32(ValueFalse), regT0);
262
263     done.link(this);
264     emitPutVirtualRegister(dst);
265 }
266
267 void JIT::emit_op_is_object(Instruction* currentInstruction)
268 {
269     int dst = currentInstruction[1].u.operand;
270     int value = currentInstruction[2].u.operand;
271
272     emitGetVirtualRegister(value, regT0);
273     Jump isNotCell = emitJumpIfNotJSCell(regT0);
274
275     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
276     emitTagBool(regT0);
277     Jump done = jump();
278
279     isNotCell.link(this);
280     move(TrustedImm32(ValueFalse), regT0);
281
282     done.link(this);
283     emitPutVirtualRegister(dst);
284 }
285
286 void JIT::emit_op_ret(Instruction* currentInstruction)
287 {
288     ASSERT(callFrameRegister != regT1);
289     ASSERT(regT1 != returnValueGPR);
290     ASSERT(returnValueGPR != callFrameRegister);
291
292     // Return the result in %eax.
293     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
294
295     checkStackPointerAlignment();
296     emitRestoreCalleeSaves();
297     emitFunctionEpilogue();
298     ret();
299 }
300
301 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
302 {
303     int dst = currentInstruction[1].u.operand;
304     int src = currentInstruction[2].u.operand;
305
306     emitGetVirtualRegister(src, regT0);
307     
308     Jump isImm = emitJumpIfNotJSCell(regT0);
309     addSlowCase(emitJumpIfCellObject(regT0));
310     isImm.link(this);
311
312     if (dst != src)
313         emitPutVirtualRegister(dst);
314
315 }
316
317 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
318 {
319     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
320     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
321     callOperation(operationSetFunctionName, regT0, regT1);
322 }
323
324 void JIT::emit_op_strcat(Instruction* currentInstruction)
325 {
326     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
327     slowPathCall.call();
328 }
329
330 void JIT::emit_op_not(Instruction* currentInstruction)
331 {
332     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
333
334     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
335     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
336     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
337     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
338     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
339     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
340
341     emitPutVirtualRegister(currentInstruction[1].u.operand);
342 }
343
344 void JIT::emit_op_jfalse(Instruction* currentInstruction)
345 {
346     unsigned target = currentInstruction[2].u.operand;
347
348     GPRReg value = regT0;
349     GPRReg result = regT1;
350     GPRReg scratch = regT2;
351     bool shouldCheckMasqueradesAsUndefined = true;
352
353     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
354     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
355
356     addJump(branchTest32(Zero, result), target);
357 }
358
359 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
360 {
361     int src = currentInstruction[1].u.operand;
362     unsigned target = currentInstruction[2].u.operand;
363
364     emitGetVirtualRegister(src, regT0);
365     Jump isImmediate = emitJumpIfNotJSCell(regT0);
366
367     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
368     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
369     emitLoadStructure(*vm(), regT0, regT2, regT1);
370     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
371     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
372     Jump masqueradesGlobalObjectIsForeign = jump();
373
374     // Now handle the immediate cases - undefined & null
375     isImmediate.link(this);
376     and64(TrustedImm32(~TagBitUndefined), regT0);
377     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
378
379     isNotMasqueradesAsUndefined.link(this);
380     masqueradesGlobalObjectIsForeign.link(this);
381 };
382 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
383 {
384     int src = currentInstruction[1].u.operand;
385     unsigned target = currentInstruction[2].u.operand;
386
387     emitGetVirtualRegister(src, regT0);
388     Jump isImmediate = emitJumpIfNotJSCell(regT0);
389
390     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
391     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
392     emitLoadStructure(*vm(), regT0, regT2, regT1);
393     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
394     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
395     Jump wasNotImmediate = jump();
396
397     // Now handle the immediate cases - undefined & null
398     isImmediate.link(this);
399     and64(TrustedImm32(~TagBitUndefined), regT0);
400     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
401
402     wasNotImmediate.link(this);
403 }
404
405 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
406 {
407     int src = currentInstruction[1].u.operand;
408     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
409     unsigned target = currentInstruction[3].u.operand;
410     
411     emitGetVirtualRegister(src, regT0);
412     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
413     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
414     addJump(jump(), target);
415     equal.link(this);
416 }
417
418 void JIT::emit_op_eq(Instruction* currentInstruction)
419 {
420     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
421     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
422     compare32(Equal, regT1, regT0, regT0);
423     emitTagBool(regT0);
424     emitPutVirtualRegister(currentInstruction[1].u.operand);
425 }
426
427 void JIT::emit_op_jtrue(Instruction* currentInstruction)
428 {
429     unsigned target = currentInstruction[2].u.operand;
430
431     GPRReg value = regT0;
432     GPRReg result = regT1;
433     GPRReg scratch = regT2;
434     bool shouldCheckMasqueradesAsUndefined = true;
435     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
436     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
437     addJump(branchTest32(NonZero, result), target);
438 }
439
440 void JIT::emit_op_neq(Instruction* currentInstruction)
441 {
442     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
443     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
444     compare32(NotEqual, regT1, regT0, regT0);
445     emitTagBool(regT0);
446
447     emitPutVirtualRegister(currentInstruction[1].u.operand);
448
449 }
450
451 void JIT::emit_op_throw(Instruction* currentInstruction)
452 {
453     ASSERT(regT0 == returnValueGPR);
454     copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
455     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
456     callOperationNoExceptionCheck(operationThrow, regT0);
457     jumpToExceptionHandler(*vm());
458 }
459
460 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
461 {
462     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
463     slowPathCall.call();
464 }
465
466 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
467 {
468     int dst = currentInstruction[1].u.operand;
469     int src1 = currentInstruction[2].u.operand;
470     int src2 = currentInstruction[3].u.operand;
471
472     emitGetVirtualRegisters(src1, regT0, src2, regT1);
473     
474     // Jump slow if both are cells (to cover strings).
475     move(regT0, regT2);
476     or64(regT1, regT2);
477     addSlowCase(emitJumpIfJSCell(regT2));
478     
479     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
480     // if it's a double.
481     Jump leftOK = emitJumpIfInt(regT0);
482     addSlowCase(emitJumpIfNumber(regT0));
483     leftOK.link(this);
484     Jump rightOK = emitJumpIfInt(regT1);
485     addSlowCase(emitJumpIfNumber(regT1));
486     rightOK.link(this);
487
488     if (type == OpStrictEq)
489         compare64(Equal, regT1, regT0, regT0);
490     else
491         compare64(NotEqual, regT1, regT0, regT0);
492     emitTagBool(regT0);
493
494     emitPutVirtualRegister(dst);
495 }
496
497 void JIT::emit_op_stricteq(Instruction* currentInstruction)
498 {
499     compileOpStrictEq(currentInstruction, OpStrictEq);
500 }
501
502 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
503 {
504     compileOpStrictEq(currentInstruction, OpNStrictEq);
505 }
506
507 void JIT::emit_op_to_number(Instruction* currentInstruction)
508 {
509     int dstVReg = currentInstruction[1].u.operand;
510     int srcVReg = currentInstruction[2].u.operand;
511     emitGetVirtualRegister(srcVReg, regT0);
512     
513     addSlowCase(emitJumpIfNotNumber(regT0));
514
515     emitValueProfilingSite();
516     if (srcVReg != dstVReg)
517         emitPutVirtualRegister(dstVReg);
518 }
519
520 void JIT::emit_op_to_string(Instruction* currentInstruction)
521 {
522     int srcVReg = currentInstruction[2].u.operand;
523     emitGetVirtualRegister(srcVReg, regT0);
524
525     addSlowCase(emitJumpIfNotJSCell(regT0));
526     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
527
528     emitPutVirtualRegister(currentInstruction[1].u.operand);
529 }
530
531 void JIT::emit_op_catch(Instruction* currentInstruction)
532 {
533     restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm());
534
535     move(TrustedImmPtr(m_vm), regT3);
536     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
537     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
538
539     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
540
541     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
542     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
543     jumpToExceptionHandler(*vm());
544     isCatchableException.link(this);
545
546     move(TrustedImmPtr(m_vm), regT3);
547     load64(Address(regT3, VM::exceptionOffset()), regT0);
548     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
549     emitPutVirtualRegister(currentInstruction[1].u.operand);
550
551     load64(Address(regT0, Exception::valueOffset()), regT0);
552     emitPutVirtualRegister(currentInstruction[2].u.operand);
553 }
554
555 void JIT::emit_op_assert(Instruction* currentInstruction)
556 {
557     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
558     slowPathCall.call();
559 }
560
561 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
562 {
563     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
564     slowPathCall.call();
565 }
566
567 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
568 {
569     int currentScope = currentInstruction[2].u.operand;
570     emitGetVirtualRegister(currentScope, regT0);
571     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
572     emitStoreCell(currentInstruction[1].u.operand, regT0);
573 }
574
575 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
576 {
577     size_t tableIndex = currentInstruction[1].u.operand;
578     unsigned defaultOffset = currentInstruction[2].u.operand;
579     unsigned scrutinee = currentInstruction[3].u.operand;
580
581     // create jump table for switch destinations, track this switch statement.
582     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
583     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
584     jumpTable->ensureCTITable();
585
586     emitGetVirtualRegister(scrutinee, regT0);
587     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
588     jump(returnValueGPR);
589 }
590
591 void JIT::emit_op_switch_char(Instruction* currentInstruction)
592 {
593     size_t tableIndex = currentInstruction[1].u.operand;
594     unsigned defaultOffset = currentInstruction[2].u.operand;
595     unsigned scrutinee = currentInstruction[3].u.operand;
596
597     // create jump table for switch destinations, track this switch statement.
598     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
599     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
600     jumpTable->ensureCTITable();
601
602     emitGetVirtualRegister(scrutinee, regT0);
603     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
604     jump(returnValueGPR);
605 }
606
607 void JIT::emit_op_switch_string(Instruction* currentInstruction)
608 {
609     size_t tableIndex = currentInstruction[1].u.operand;
610     unsigned defaultOffset = currentInstruction[2].u.operand;
611     unsigned scrutinee = currentInstruction[3].u.operand;
612
613     // create jump table for switch destinations, track this switch statement.
614     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
615     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
616
617     emitGetVirtualRegister(scrutinee, regT0);
618     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
619     jump(returnValueGPR);
620 }
621
622 void JIT::emit_op_debug(Instruction* currentInstruction)
623 {
624     load32(codeBlock()->debuggerRequestsAddress(), regT0);
625     Jump noDebuggerRequests = branchTest32(Zero, regT0);
626     callOperation(operationDebug, currentInstruction[1].u.operand);
627     noDebuggerRequests.link(this);
628 }
629
630 void JIT::emit_op_eq_null(Instruction* currentInstruction)
631 {
632     int dst = currentInstruction[1].u.operand;
633     int src1 = currentInstruction[2].u.operand;
634
635     emitGetVirtualRegister(src1, regT0);
636     Jump isImmediate = emitJumpIfNotJSCell(regT0);
637
638     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
639     move(TrustedImm32(0), regT0);
640     Jump wasNotMasqueradesAsUndefined = jump();
641
642     isMasqueradesAsUndefined.link(this);
643     emitLoadStructure(*vm(), regT0, regT2, regT1);
644     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
645     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
646     comparePtr(Equal, regT0, regT2, regT0);
647     Jump wasNotImmediate = jump();
648
649     isImmediate.link(this);
650
651     and64(TrustedImm32(~TagBitUndefined), regT0);
652     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
653
654     wasNotImmediate.link(this);
655     wasNotMasqueradesAsUndefined.link(this);
656
657     emitTagBool(regT0);
658     emitPutVirtualRegister(dst);
659
660 }
661
662 void JIT::emit_op_neq_null(Instruction* currentInstruction)
663 {
664     int dst = currentInstruction[1].u.operand;
665     int src1 = currentInstruction[2].u.operand;
666
667     emitGetVirtualRegister(src1, regT0);
668     Jump isImmediate = emitJumpIfNotJSCell(regT0);
669
670     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
671     move(TrustedImm32(1), regT0);
672     Jump wasNotMasqueradesAsUndefined = jump();
673
674     isMasqueradesAsUndefined.link(this);
675     emitLoadStructure(*vm(), regT0, regT2, regT1);
676     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
677     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
678     comparePtr(NotEqual, regT0, regT2, regT0);
679     Jump wasNotImmediate = jump();
680
681     isImmediate.link(this);
682
683     and64(TrustedImm32(~TagBitUndefined), regT0);
684     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
685
686     wasNotImmediate.link(this);
687     wasNotMasqueradesAsUndefined.link(this);
688
689     emitTagBool(regT0);
690     emitPutVirtualRegister(dst);
691 }
692
693 void JIT::emit_op_enter(Instruction*)
694 {
695     // Even though CTI doesn't use them, we initialize our constant
696     // registers to zap stale pointers, to avoid unnecessarily prolonging
697     // object lifetime and increasing GC pressure.
698     size_t count = m_codeBlock->m_numVars;
699     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
700         emitInitRegister(virtualRegisterForLocal(j).offset());
701
702     emitWriteBarrier(m_codeBlock);
703
704     emitEnterOptimizationCheck();
705 }
706
707 void JIT::emit_op_get_scope(Instruction* currentInstruction)
708 {
709     int dst = currentInstruction[1].u.operand;
710     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
711     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
712     emitStoreCell(dst, regT0);
713 }
714
715 void JIT::emit_op_to_this(Instruction* currentInstruction)
716 {
717     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
718     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
719
720     emitJumpSlowCaseIfNotJSCell(regT1);
721
722     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
723     loadPtr(cachedStructure, regT2);
724     addSlowCase(branchTestPtr(Zero, regT2));
725     load32(Address(regT2, Structure::structureIDOffset()), regT2);
726     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
727 }
728
729 void JIT::emit_op_create_this(Instruction* currentInstruction)
730 {
731     int callee = currentInstruction[2].u.operand;
732     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
733     RegisterID calleeReg = regT0;
734     RegisterID rareDataReg = regT4;
735     RegisterID resultReg = regT0;
736     RegisterID allocatorReg = regT1;
737     RegisterID structureReg = regT2;
738     RegisterID cachedFunctionReg = regT4;
739     RegisterID scratchReg = regT3;
740
741     emitGetVirtualRegister(callee, calleeReg);
742     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
743     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
744     addSlowCase(branchTestPtr(Zero, rareDataReg));
745     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
746     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
747     addSlowCase(branchTestPtr(Zero, allocatorReg));
748
749     loadPtr(cachedFunction, cachedFunctionReg);
750     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
751     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
752     hasSeenMultipleCallees.link(this);
753
754     JumpList slowCases;
755     emitAllocateJSObject(resultReg, nullptr, allocatorReg, structureReg, TrustedImmPtr(0), scratchReg, slowCases);
756     emitGetVirtualRegister(callee, scratchReg);
757     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
758     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
759     emitInitializeInlineStorage(resultReg, scratchReg);
760     addSlowCase(slowCases);
761     emitPutVirtualRegister(currentInstruction[1].u.operand);
762 }
763
764 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
765 {
766     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
767     linkSlowCase(iter); // doesn't have rare data
768     linkSlowCase(iter); // doesn't have an allocation profile
769     linkSlowCase(iter); // allocation failed (no allocator)
770     linkSlowCase(iter); // allocation failed (allocator empty)
771     linkSlowCase(iter); // cached function didn't match
772
773     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
774     slowPathCall.call();
775 }
776
777 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
778 {
779     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
780     addSlowCase(branchTest64(Zero, regT0));
781 }
782
783 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
784 {
785     linkSlowCase(iter);
786     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
787     slowPathCall.call();
788 }
789
790
791 // Slow cases
792
793 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
794 {
795     linkSlowCase(iter);
796     linkSlowCase(iter);
797     linkSlowCase(iter);
798     linkSlowCase(iter);
799
800     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
801     slowPathCall.call();
802 }
803
804 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
805 {
806     linkSlowCase(iter);
807
808     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
809     slowPathCall.call();
810 }
811
812 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813 {
814     linkSlowCase(iter);
815     
816     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
817     slowPathCall.call();
818 }
819
820 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
821 {
822     linkSlowCase(iter);
823     callOperation(operationCompareEq, regT0, regT1);
824     emitTagBool(returnValueGPR);
825     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
826 }
827
828 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
829 {
830     linkSlowCase(iter);
831     callOperation(operationCompareEq, regT0, regT1);
832     xor32(TrustedImm32(0x1), regT0);
833     emitTagBool(returnValueGPR);
834     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
835 }
836
837 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
838 {
839     linkSlowCase(iter);
840     linkSlowCase(iter);
841     linkSlowCase(iter);
842     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
843     slowPathCall.call();
844 }
845
846 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
847 {
848     linkSlowCase(iter);
849     linkSlowCase(iter);
850     linkSlowCase(iter);
851     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
852     slowPathCall.call();
853 }
854
855 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
856 {
857     int dst = currentInstruction[1].u.operand;
858     int value = currentInstruction[2].u.operand;
859     int proto = currentInstruction[3].u.operand;
860
861     linkSlowCaseIfNotJSCell(iter, value);
862     linkSlowCaseIfNotJSCell(iter, proto);
863     linkSlowCase(iter);
864     linkSlowCase(iter);
865     emitGetVirtualRegister(value, regT0);
866     emitGetVirtualRegister(proto, regT1);
867     callOperation(operationInstanceOf, dst, regT0, regT1);
868 }
869
870 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
871 {
872     int dst = currentInstruction[1].u.operand;
873     int value = currentInstruction[2].u.operand;
874     int constructor = currentInstruction[3].u.operand;
875     int hasInstanceValue = currentInstruction[4].u.operand;
876
877     linkSlowCase(iter);
878     emitGetVirtualRegister(value, regT0);
879     emitGetVirtualRegister(constructor, regT1);
880     emitGetVirtualRegister(hasInstanceValue, regT2);
881     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
882     emitTagBool(returnValueGPR);
883     emitPutVirtualRegister(dst, returnValueGPR);
884 }
885
886 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
887 {
888     linkSlowCase(iter);
889
890     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
891     slowPathCall.call();
892 }
893
894 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 {
896     linkSlowCase(iter); // Not JSCell.
897     linkSlowCase(iter); // Not JSString.
898
899     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
900     slowPathCall.call();
901 }
902
903 #endif // USE(JSVALUE64)
904
905 void JIT::emit_op_loop_hint(Instruction*)
906 {
907     // Emit the JIT optimization check: 
908     if (canBeOptimized()) {
909         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
910             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
911     }
912 }
913
914 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
915 {
916 #if ENABLE(DFG_JIT)
917     // Emit the slow path for the JIT optimization check:
918     if (canBeOptimized()) {
919         linkSlowCase(iter);
920
921         copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(*vm());
922
923         callOperation(operationOptimize, m_bytecodeOffset);
924         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
925         if (!ASSERT_DISABLED) {
926             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
927             abortWithReason(JITUnreasonableLoopHintJumpTarget);
928             ok.link(this);
929         }
930         jump(returnValueGPR);
931         noOptimizedEntry.link(this);
932
933         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
934     }
935 #else
936     UNUSED_PARAM(iter);
937 #endif
938 }
939
940 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
941 {
942     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_static_error);
943     slowPathCall.call();
944 }
945
946 void JIT::emit_op_check_traps(Instruction*)
947 {
948     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
949 }
950
951 void JIT::emit_op_nop(Instruction*)
952 {
953 }
954
955 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
956 {
957     linkSlowCase(iter);
958     callOperation(operationHandleTraps);
959 }
960
961 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
962 {
963     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
964 }
965
966 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
967 {
968     Jump lazyJump;
969     int dst = currentInstruction[1].u.operand;
970
971 #if USE(JSVALUE64)
972     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
973 #else
974     emitLoadPayload(currentInstruction[2].u.operand, regT0);
975 #endif
976     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
977
978     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
979     if (opcodeID == op_new_func)
980         callOperation(operationNewFunction, dst, regT0, funcExec);
981     else if (opcodeID == op_new_generator_func)
982         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
983     else {
984         ASSERT(opcodeID == op_new_async_func);
985         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
986     }
987 }
988
989 void JIT::emit_op_new_func(Instruction* currentInstruction)
990 {
991     emitNewFuncCommon(currentInstruction);
992 }
993
994 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
995 {
996     emitNewFuncCommon(currentInstruction);
997 }
998
999 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
1000 {
1001     emitNewFuncCommon(currentInstruction);
1002 }
1003
1004 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1005 {
1006     Jump notUndefinedScope;
1007     int dst = currentInstruction[1].u.operand;
1008 #if USE(JSVALUE64)
1009     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1010     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
1011     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
1012 #else
1013     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1014     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1015     emitStore(dst, jsUndefined());
1016 #endif
1017     Jump done = jump();
1018     notUndefinedScope.link(this);
1019         
1020     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1021     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1022
1023     if (opcodeID == op_new_func_exp)
1024         callOperation(operationNewFunction, dst, regT0, function);
1025     else if (opcodeID == op_new_generator_func_exp)
1026         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1027     else {
1028         ASSERT(opcodeID == op_new_async_func_exp);
1029         callOperation(operationNewAsyncFunction, dst, regT0, function);
1030     }
1031
1032     done.link(this);
1033 }
1034
1035 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1036 {
1037     emitNewFuncExprCommon(currentInstruction);
1038 }
1039
1040 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1041 {
1042     emitNewFuncExprCommon(currentInstruction);
1043 }
1044
1045 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1046 {
1047     emitNewFuncExprCommon(currentInstruction);
1048 }
1049
1050 void JIT::emit_op_new_array(Instruction* currentInstruction)
1051 {
1052     int dst = currentInstruction[1].u.operand;
1053     int valuesIndex = currentInstruction[2].u.operand;
1054     int size = currentInstruction[3].u.operand;
1055     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1056     callOperation(operationNewArrayWithProfile, dst,
1057         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1058 }
1059
1060 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1061 {
1062     int dst = currentInstruction[1].u.operand;
1063     int sizeIndex = currentInstruction[2].u.operand;
1064 #if USE(JSVALUE64)
1065     emitGetVirtualRegister(sizeIndex, regT0);
1066     callOperation(operationNewArrayWithSizeAndProfile, dst,
1067         currentInstruction[3].u.arrayAllocationProfile, regT0);
1068 #else
1069     emitLoad(sizeIndex, regT1, regT0);
1070     callOperation(operationNewArrayWithSizeAndProfile, dst,
1071         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1072 #endif
1073 }
1074
1075 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1076 {
1077     int dst = currentInstruction[1].u.operand;
1078     int valuesIndex = currentInstruction[2].u.operand;
1079     int size = currentInstruction[3].u.operand;
1080     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1081     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1082 }
1083
1084 void JIT::emit_op_new_array_with_spread(Instruction* currentInstruction)
1085 {
1086     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_array_with_spread);
1087     slowPathCall.call();
1088 }
1089
1090 void JIT::emit_op_spread(Instruction* currentInstruction)
1091 {
1092     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_spread);
1093     slowPathCall.call();
1094 }
1095
1096 #if USE(JSVALUE64)
1097 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1098 {
1099     int dst = currentInstruction[1].u.operand;
1100     int base = currentInstruction[2].u.operand;
1101     int enumerator = currentInstruction[4].u.operand;
1102
1103     emitGetVirtualRegister(base, regT0);
1104     emitGetVirtualRegister(enumerator, regT1);
1105     emitJumpSlowCaseIfNotJSCell(regT0, base);
1106
1107     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1108     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1109     
1110     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1111     emitPutVirtualRegister(dst);
1112 }
1113
1114 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1115 {
1116     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1117     
1118     PatchableJump badType;
1119     
1120     // FIXME: Add support for other types like TypedArrays and Arguments.
1121     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1122     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1123     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1124     Jump done = jump();
1125
1126     LinkBuffer patchBuffer(*this, m_codeBlock);
1127     
1128     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1129     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1130     
1131     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1132     
1133     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1134         m_codeBlock, patchBuffer,
1135         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1136     
1137     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1138     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1139 }
1140
1141 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1142 {
1143     int dst = currentInstruction[1].u.operand;
1144     int base = currentInstruction[2].u.operand;
1145     int property = currentInstruction[3].u.operand;
1146     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1147     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1148     
1149     emitGetVirtualRegisters(base, regT0, property, regT1);
1150
1151     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1152     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1153     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1154     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1155     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1156     // extending since it makes it easier to re-tag the value in the slow case.
1157     zeroExtend32ToPtr(regT1, regT1);
1158
1159     emitJumpSlowCaseIfNotJSCell(regT0, base);
1160     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1161     and32(TrustedImm32(IndexingShapeMask), regT2);
1162
1163     JITArrayMode mode = chooseArrayMode(profile);
1164     PatchableJump badType;
1165
1166     // FIXME: Add support for other types like TypedArrays and Arguments.
1167     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1168     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1169     
1170     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1171
1172     addSlowCase(badType);
1173     addSlowCase(slowCases);
1174     
1175     Label done = label();
1176     
1177     emitPutVirtualRegister(dst);
1178
1179     Label nextHotPath = label();
1180     
1181     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1182 }
1183
1184 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1185 {
1186     int dst = currentInstruction[1].u.operand;
1187     int base = currentInstruction[2].u.operand;
1188     int property = currentInstruction[3].u.operand;
1189     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1190     
1191     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1192     linkSlowCase(iter); // base array check
1193     linkSlowCase(iter); // vector length check
1194     linkSlowCase(iter); // empty value
1195     
1196     Label slowPath = label();
1197     
1198     emitGetVirtualRegister(base, regT0);
1199     emitGetVirtualRegister(property, regT1);
1200     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1201
1202     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1203     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1204     m_byValInstructionIndex++;
1205 }
1206
1207 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1208 {
1209     int dst = currentInstruction[1].u.operand;
1210     int base = currentInstruction[2].u.operand;
1211     int index = currentInstruction[4].u.operand;
1212     int enumerator = currentInstruction[5].u.operand;
1213
1214     // Check that base is a cell
1215     emitGetVirtualRegister(base, regT0);
1216     emitJumpSlowCaseIfNotJSCell(regT0, base);
1217
1218     // Check the structure
1219     emitGetVirtualRegister(enumerator, regT2);
1220     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1221     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1222
1223     // Compute the offset
1224     emitGetVirtualRegister(index, regT1);
1225     // If index is less than the enumerator's cached inline storage, then it's an inline access
1226     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1227     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1228     signExtend32ToPtr(regT1, regT1);
1229     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1230     
1231     Jump done = jump();
1232
1233     // Otherwise it's out of line
1234     outOfLineAccess.link(this);
1235     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1236     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1237     neg32(regT1);
1238     signExtend32ToPtr(regT1, regT1);
1239     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1240     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1241     
1242     done.link(this);
1243     emitValueProfilingSite();
1244     emitPutVirtualRegister(dst, regT0);
1245 }
1246
1247 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1248 {
1249     int base = currentInstruction[2].u.operand;
1250     linkSlowCaseIfNotJSCell(iter, base);
1251     linkSlowCase(iter);
1252
1253     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1254     slowPathCall.call();
1255 }
1256
1257 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1258 {
1259     int dst = currentInstruction[1].u.operand;
1260     int enumerator = currentInstruction[2].u.operand;
1261     int index = currentInstruction[3].u.operand;
1262
1263     emitGetVirtualRegister(index, regT0);
1264     emitGetVirtualRegister(enumerator, regT1);
1265     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1266
1267     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1268
1269     Jump done = jump();
1270     inBounds.link(this);
1271
1272     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1273     signExtend32ToPtr(regT0, regT0);
1274     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1275
1276     done.link(this);
1277     emitPutVirtualRegister(dst);
1278 }
1279
1280 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1281 {
1282     int dst = currentInstruction[1].u.operand;
1283     int enumerator = currentInstruction[2].u.operand;
1284     int index = currentInstruction[3].u.operand;
1285
1286     emitGetVirtualRegister(index, regT0);
1287     emitGetVirtualRegister(enumerator, regT1);
1288     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1289
1290     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1291
1292     Jump done = jump();
1293     inBounds.link(this);
1294
1295     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1296     signExtend32ToPtr(regT0, regT0);
1297     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1298     
1299     done.link(this);
1300     emitPutVirtualRegister(dst);
1301 }
1302
1303 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1304 {
1305     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1306     int valueToProfile = currentInstruction[1].u.operand;
1307
1308     emitGetVirtualRegister(valueToProfile, regT0);
1309
1310     JumpList jumpToEnd;
1311
1312     jumpToEnd.append(branchTest64(Zero, regT0));
1313
1314     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1315     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1316     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1317         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1318     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1319         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1320     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1321         move(regT0, regT1);
1322         and64(TrustedImm32(~1), regT1);
1323         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1324     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1325         jumpToEnd.append(emitJumpIfInt(regT0));
1326     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1327         jumpToEnd.append(emitJumpIfNumber(regT0));
1328     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1329         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1330         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1331         isNotCell.link(this);
1332     }
1333
1334     // Load the type profiling log into T2.
1335     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1336     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1337     // Load the next log entry into T1.
1338     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1339
1340     // Store the JSValue onto the log entry.
1341     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1342
1343     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1344     Jump notCell = emitJumpIfNotJSCell(regT0);
1345     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1346     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1347     Jump skipIsCell = jump();
1348     notCell.link(this);
1349     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1350     skipIsCell.link(this);
1351
1352     // Store the typeLocation on the log entry.
1353     move(TrustedImmPtr(cachedTypeLocation), regT0);
1354     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1355
1356     // Increment the current log entry.
1357     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1358     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1359     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1360     // Clear the log if we're at the end of the log.
1361     callOperation(operationProcessTypeProfilerLog);
1362     skipClearLog.link(this);
1363
1364     jumpToEnd.link(this);
1365 }
1366
1367 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1368 {
1369     updateTopCallFrame();
1370     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1371     GPRReg shadowPacketReg = regT0;
1372     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1373     GPRReg scratch2Reg = regT2;
1374     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1375     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1376     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1377 }
1378
1379 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1380 {
1381     updateTopCallFrame();
1382     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1383     GPRReg shadowPacketReg = regT0;
1384     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1385     GPRReg scratch2Reg = regT2;
1386     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1387     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1388     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1389     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1390 }
1391
1392 #endif // USE(JSVALUE64)
1393
1394 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1395 {
1396     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1397     slowPathCall.call();
1398 }
1399
1400 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1401 {
1402     linkSlowCase(iter);
1403     linkSlowCase(iter);
1404
1405     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1406     slowPathCall.call();
1407 }
1408
1409 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1410 {
1411     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1412     slowPathCall.call();
1413 }
1414
1415 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1416 {
1417     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1418     slowPathCall.call();
1419 }
1420
1421 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1422 {
1423     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1424     slowPathCall.call();
1425 }
1426
1427 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1428 {
1429     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1430 #if USE(JSVALUE64)
1431     basicBlockLocation->emitExecuteCode(*this);
1432 #else
1433     basicBlockLocation->emitExecuteCode(*this, regT0);
1434 #endif
1435 }
1436
1437 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1438 {
1439     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1440     slowPathCall.call();
1441 }
1442
1443 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1444 {
1445     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1446     slowPathCall.call();
1447 }
1448
1449 void JIT::emit_op_create_cloned_arguments(Instruction* currentInstruction)
1450 {
1451     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_cloned_arguments);
1452     slowPathCall.call();
1453 }
1454
1455 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1456 {
1457     int dst = currentInstruction[1].u.operand;
1458     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1459     sub32(TrustedImm32(1), regT0);
1460     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1461     boxInt32(regT0, result);
1462     emitPutVirtualRegister(dst, result);
1463 }
1464
1465 void JIT::emit_op_create_rest(Instruction* currentInstruction)
1466 {
1467     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_rest);
1468     slowPathCall.call();
1469 }
1470
1471 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1472 {
1473     int dst = currentInstruction[1].u.operand;
1474     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1475     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1476     sub32(TrustedImm32(1), regT0);
1477     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1478     sub32(Imm32(numParamsToSkip), regT0);
1479 #if USE(JSVALUE64)
1480     boxInt32(regT0, JSValueRegs(regT0));
1481 #endif
1482     Jump done = jump();
1483
1484     zeroLength.link(this);
1485 #if USE(JSVALUE64)
1486     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1487 #else
1488     move(TrustedImm32(0), regT0);
1489 #endif
1490
1491     done.link(this);
1492 #if USE(JSVALUE64)
1493     emitPutVirtualRegister(dst, regT0);
1494 #else
1495     move(TrustedImm32(JSValue::Int32Tag), regT1);
1496     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1497 #endif
1498 }
1499
1500 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1501 {
1502     int dst = currentInstruction[1].u.operand;
1503     int index = currentInstruction[2].u.operand;
1504 #if USE(JSVALUE64)
1505     JSValueRegs resultRegs(regT0);
1506 #else
1507     JSValueRegs resultRegs(regT1, regT0);
1508 #endif
1509
1510     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1511     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1512     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1513     Jump done = jump();
1514
1515     argumentOutOfBounds.link(this);
1516     moveValue(jsUndefined(), resultRegs);
1517
1518     done.link(this);
1519     emitValueProfilingSite();
1520     emitPutVirtualRegister(dst, resultRegs);
1521 }
1522
1523 void JIT::emit_op_unreachable(Instruction* currentInstruction)
1524 {
1525     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_unreachable);
1526     slowPathCall.call();
1527 }
1528
1529 } // namespace JSC
1530
1531 #endif // ENABLE(JIT)