[JSC] Remove per-host-function CTI stub in 32bit environment
[WebKit-https.git] / Source / JavaScriptCore / jit / JITOpcodes.cpp
1 /*
2  * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
3  * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
18  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
25  */
26
27 #include "config.h"
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
34 #include "Heap.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSCell.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "ThunkGenerators.h"
45 #include "TypeLocation.h"
46 #include "TypeProfilerLog.h"
47 #include "VirtualRegister.h"
48 #include "Watchdog.h"
49
50 namespace JSC {
51
52 #if USE(JSVALUE64)
53
54 void JIT::emit_op_mov(Instruction* currentInstruction)
55 {
56     int dst = currentInstruction[1].u.operand;
57     int src = currentInstruction[2].u.operand;
58
59     emitGetVirtualRegister(src, regT0);
60     emitPutVirtualRegister(dst);
61 }
62
63
64 void JIT::emit_op_end(Instruction* currentInstruction)
65 {
66     RELEASE_ASSERT(returnValueGPR != callFrameRegister);
67     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
68     emitRestoreCalleeSaves();
69     emitFunctionEpilogue();
70     ret();
71 }
72
73 void JIT::emit_op_jmp(Instruction* currentInstruction)
74 {
75     unsigned target = currentInstruction[1].u.operand;
76     addJump(jump(), target);
77 }
78
79 void JIT::emit_op_new_object(Instruction* currentInstruction)
80 {
81     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
82     size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
83     MarkedAllocator* allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorFor(allocationSize);
84
85     RegisterID resultReg = regT0;
86     RegisterID allocatorReg = regT1;
87     RegisterID scratchReg = regT2;
88
89     move(TrustedImmPtr(allocator), allocatorReg);
90     if (allocator)
91         addSlowCase(Jump());
92     JumpList slowCases;
93     emitAllocateJSObject(resultReg, allocator, allocatorReg, TrustedImmPtr(structure), TrustedImmPtr(0), scratchReg, slowCases);
94     emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
95     addSlowCase(slowCases);
96     emitPutVirtualRegister(currentInstruction[1].u.operand);
97 }
98
99 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
100 {
101     linkSlowCase(iter);
102     linkSlowCase(iter);
103     int dst = currentInstruction[1].u.operand;
104     Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
105     callOperation(operationNewObject, structure);
106     emitStoreCell(dst, returnValueGPR);
107 }
108
109 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
110 {
111     auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
112     int dst = bytecode.dst();
113     int constructor = bytecode.constructor();
114     int hasInstanceValue = bytecode.hasInstanceValue();
115
116     emitGetVirtualRegister(hasInstanceValue, regT0);
117
118     // We don't jump if we know what Symbol.hasInstance would do.
119     Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
120
121     emitGetVirtualRegister(constructor, regT0);
122
123     // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
124     test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
125     emitTagBool(regT0);
126     Jump done = jump();
127
128     customhasInstanceValue.link(this);
129     move(TrustedImm32(ValueTrue), regT0);
130
131     done.link(this);
132     emitPutVirtualRegister(dst);
133 }
134
135 void JIT::emit_op_instanceof(Instruction* currentInstruction)
136 {
137     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
138     int dst = bytecode.dst();
139     int value = bytecode.value();
140     int proto = bytecode.prototype();
141
142     // Load the operands (baseVal, proto, and value respectively) into registers.
143     // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
144     emitGetVirtualRegister(value, regT2);
145     emitGetVirtualRegister(proto, regT1);
146
147     // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
148     emitJumpSlowCaseIfNotJSCell(regT2, value);
149     emitJumpSlowCaseIfNotJSCell(regT1, proto);
150
151     // Check that prototype is an object
152     addSlowCase(emitJumpIfCellNotObject(regT1));
153     
154     // Optimistically load the result true, and start looping.
155     // Initially, regT1 still contains proto and regT2 still contains value.
156     // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
157     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
158     Label loop(this);
159
160     addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
161
162     // Load the prototype of the object in regT2.  If this is equal to regT1 - WIN!
163     // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
164     emitLoadStructure(*vm(), regT2, regT4, regT3);
165     load64(Address(regT4, Structure::prototypeOffset()), regT4);
166     auto hasMonoProto = branchTest64(NonZero, regT4);
167     load64(Address(regT2, offsetRelativeToBase(knownPolyProtoOffset)), regT4);
168     hasMonoProto.link(this);
169     move(regT4, regT2);
170     Jump isInstance = branchPtr(Equal, regT2, regT1);
171     emitJumpIfJSCell(regT2).linkTo(loop, this);
172
173     // We get here either by dropping out of the loop, or if value was not an Object.  Result is false.
174     move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
175
176     // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
177     isInstance.link(this);
178     emitPutVirtualRegister(dst);
179 }
180
181 void JIT::emit_op_instanceof_custom(Instruction*)
182 {
183     // This always goes to slow path since we expect it to be rare.
184     addSlowCase(jump());
185 }
186     
187 void JIT::emit_op_is_empty(Instruction* currentInstruction)
188 {
189     int dst = currentInstruction[1].u.operand;
190     int value = currentInstruction[2].u.operand;
191
192     emitGetVirtualRegister(value, regT0);
193     compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
194
195     emitTagBool(regT0);
196     emitPutVirtualRegister(dst);
197 }
198
199 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
200 {
201     int dst = currentInstruction[1].u.operand;
202     int value = currentInstruction[2].u.operand;
203     
204     emitGetVirtualRegister(value, regT0);
205     Jump isCell = emitJumpIfJSCell(regT0);
206
207     compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
208     Jump done = jump();
209     
210     isCell.link(this);
211     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
212     move(TrustedImm32(0), regT0);
213     Jump notMasqueradesAsUndefined = jump();
214
215     isMasqueradesAsUndefined.link(this);
216     emitLoadStructure(*vm(), regT0, regT1, regT2);
217     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
218     loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
219     comparePtr(Equal, regT0, regT1, regT0);
220
221     notMasqueradesAsUndefined.link(this);
222     done.link(this);
223     emitTagBool(regT0);
224     emitPutVirtualRegister(dst);
225 }
226
227 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
228 {
229     int dst = currentInstruction[1].u.operand;
230     int value = currentInstruction[2].u.operand;
231     
232     emitGetVirtualRegister(value, regT0);
233     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
234     test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
235     emitTagBool(regT0);
236     emitPutVirtualRegister(dst);
237 }
238
239 void JIT::emit_op_is_number(Instruction* currentInstruction)
240 {
241     int dst = currentInstruction[1].u.operand;
242     int value = currentInstruction[2].u.operand;
243     
244     emitGetVirtualRegister(value, regT0);
245     test64(NonZero, regT0, tagTypeNumberRegister, regT0);
246     emitTagBool(regT0);
247     emitPutVirtualRegister(dst);
248 }
249
250 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
251 {
252     int dst = currentInstruction[1].u.operand;
253     int value = currentInstruction[2].u.operand;
254     int type = currentInstruction[3].u.operand;
255
256     emitGetVirtualRegister(value, regT0);
257     Jump isNotCell = emitJumpIfNotJSCell(regT0);
258
259     compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
260     emitTagBool(regT0);
261     Jump done = jump();
262
263     isNotCell.link(this);
264     move(TrustedImm32(ValueFalse), regT0);
265
266     done.link(this);
267     emitPutVirtualRegister(dst);
268 }
269
270 void JIT::emit_op_is_object(Instruction* currentInstruction)
271 {
272     int dst = currentInstruction[1].u.operand;
273     int value = currentInstruction[2].u.operand;
274
275     emitGetVirtualRegister(value, regT0);
276     Jump isNotCell = emitJumpIfNotJSCell(regT0);
277
278     compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
279     emitTagBool(regT0);
280     Jump done = jump();
281
282     isNotCell.link(this);
283     move(TrustedImm32(ValueFalse), regT0);
284
285     done.link(this);
286     emitPutVirtualRegister(dst);
287 }
288
289 void JIT::emit_op_ret(Instruction* currentInstruction)
290 {
291     ASSERT(callFrameRegister != regT1);
292     ASSERT(regT1 != returnValueGPR);
293     ASSERT(returnValueGPR != callFrameRegister);
294
295     // Return the result in %eax.
296     emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
297
298     checkStackPointerAlignment();
299     emitRestoreCalleeSaves();
300     emitFunctionEpilogue();
301     ret();
302 }
303
304 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
305 {
306     int dst = currentInstruction[1].u.operand;
307     int src = currentInstruction[2].u.operand;
308
309     emitGetVirtualRegister(src, regT0);
310     
311     Jump isImm = emitJumpIfNotJSCell(regT0);
312     addSlowCase(emitJumpIfCellObject(regT0));
313     isImm.link(this);
314
315     if (dst != src)
316         emitPutVirtualRegister(dst);
317
318 }
319
320 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
321 {
322     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
323     emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
324     callOperation(operationSetFunctionName, regT0, regT1);
325 }
326
327 void JIT::emit_op_strcat(Instruction* currentInstruction)
328 {
329     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
330     slowPathCall.call();
331 }
332
333 void JIT::emit_op_not(Instruction* currentInstruction)
334 {
335     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
336
337     // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
338     // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
339     // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
340     xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
341     addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
342     xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
343
344     emitPutVirtualRegister(currentInstruction[1].u.operand);
345 }
346
347 void JIT::emit_op_jfalse(Instruction* currentInstruction)
348 {
349     unsigned target = currentInstruction[2].u.operand;
350
351     GPRReg value = regT0;
352     GPRReg result = regT1;
353     GPRReg scratch = regT2;
354     bool shouldCheckMasqueradesAsUndefined = true;
355
356     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
357     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
358
359     addJump(branchTest32(Zero, result), target);
360 }
361
362 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
363 {
364     int src = currentInstruction[1].u.operand;
365     unsigned target = currentInstruction[2].u.operand;
366
367     emitGetVirtualRegister(src, regT0);
368     Jump isImmediate = emitJumpIfNotJSCell(regT0);
369
370     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
371     Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
372     emitLoadStructure(*vm(), regT0, regT2, regT1);
373     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
374     addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
375     Jump masqueradesGlobalObjectIsForeign = jump();
376
377     // Now handle the immediate cases - undefined & null
378     isImmediate.link(this);
379     and64(TrustedImm32(~TagBitUndefined), regT0);
380     addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
381
382     isNotMasqueradesAsUndefined.link(this);
383     masqueradesGlobalObjectIsForeign.link(this);
384 };
385 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
386 {
387     int src = currentInstruction[1].u.operand;
388     unsigned target = currentInstruction[2].u.operand;
389
390     emitGetVirtualRegister(src, regT0);
391     Jump isImmediate = emitJumpIfNotJSCell(regT0);
392
393     // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
394     addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
395     emitLoadStructure(*vm(), regT0, regT2, regT1);
396     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
397     addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
398     Jump wasNotImmediate = jump();
399
400     // Now handle the immediate cases - undefined & null
401     isImmediate.link(this);
402     and64(TrustedImm32(~TagBitUndefined), regT0);
403     addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);            
404
405     wasNotImmediate.link(this);
406 }
407
408 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
409 {
410     int src = currentInstruction[1].u.operand;
411     Special::Pointer ptr = currentInstruction[2].u.specialPointer;
412     unsigned target = currentInstruction[3].u.operand;
413     
414     emitGetVirtualRegister(src, regT0);
415     CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
416     store32(TrustedImm32(1), &currentInstruction[4].u.operand);
417     addJump(jump(), target);
418     equal.link(this);
419 }
420
421 void JIT::emit_op_eq(Instruction* currentInstruction)
422 {
423     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
424     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
425     compare32(Equal, regT1, regT0, regT0);
426     emitTagBool(regT0);
427     emitPutVirtualRegister(currentInstruction[1].u.operand);
428 }
429
430 void JIT::emit_op_jtrue(Instruction* currentInstruction)
431 {
432     unsigned target = currentInstruction[2].u.operand;
433
434     GPRReg value = regT0;
435     GPRReg result = regT1;
436     GPRReg scratch = regT2;
437     bool shouldCheckMasqueradesAsUndefined = true;
438     emitGetVirtualRegister(currentInstruction[1].u.operand, value);
439     emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
440     addJump(branchTest32(NonZero, result), target);
441 }
442
443 void JIT::emit_op_neq(Instruction* currentInstruction)
444 {
445     emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
446     emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
447     compare32(NotEqual, regT1, regT0, regT0);
448     emitTagBool(regT0);
449
450     emitPutVirtualRegister(currentInstruction[1].u.operand);
451
452 }
453
454 void JIT::emit_op_throw(Instruction* currentInstruction)
455 {
456     ASSERT(regT0 == returnValueGPR);
457     copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
458     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
459     callOperationNoExceptionCheck(operationThrow, regT0);
460     jumpToExceptionHandler(*vm());
461 }
462
463 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
464 {
465     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
466     slowPathCall.call();
467 }
468
469 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
470 {
471     int dst = currentInstruction[1].u.operand;
472     int src1 = currentInstruction[2].u.operand;
473     int src2 = currentInstruction[3].u.operand;
474
475     emitGetVirtualRegisters(src1, regT0, src2, regT1);
476     
477     // Jump slow if both are cells (to cover strings).
478     move(regT0, regT2);
479     or64(regT1, regT2);
480     addSlowCase(emitJumpIfJSCell(regT2));
481     
482     // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
483     // if it's a double.
484     Jump leftOK = emitJumpIfInt(regT0);
485     addSlowCase(emitJumpIfNumber(regT0));
486     leftOK.link(this);
487     Jump rightOK = emitJumpIfInt(regT1);
488     addSlowCase(emitJumpIfNumber(regT1));
489     rightOK.link(this);
490
491     if (type == OpStrictEq)
492         compare64(Equal, regT1, regT0, regT0);
493     else
494         compare64(NotEqual, regT1, regT0, regT0);
495     emitTagBool(regT0);
496
497     emitPutVirtualRegister(dst);
498 }
499
500 void JIT::emit_op_stricteq(Instruction* currentInstruction)
501 {
502     compileOpStrictEq(currentInstruction, OpStrictEq);
503 }
504
505 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
506 {
507     compileOpStrictEq(currentInstruction, OpNStrictEq);
508 }
509
510 void JIT::emit_op_to_number(Instruction* currentInstruction)
511 {
512     int dstVReg = currentInstruction[1].u.operand;
513     int srcVReg = currentInstruction[2].u.operand;
514     emitGetVirtualRegister(srcVReg, regT0);
515     
516     addSlowCase(emitJumpIfNotNumber(regT0));
517
518     emitValueProfilingSite();
519     if (srcVReg != dstVReg)
520         emitPutVirtualRegister(dstVReg);
521 }
522
523 void JIT::emit_op_to_string(Instruction* currentInstruction)
524 {
525     int srcVReg = currentInstruction[2].u.operand;
526     emitGetVirtualRegister(srcVReg, regT0);
527
528     addSlowCase(emitJumpIfNotJSCell(regT0));
529     addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
530
531     emitPutVirtualRegister(currentInstruction[1].u.operand);
532 }
533
534 void JIT::emit_op_catch(Instruction* currentInstruction)
535 {
536     restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
537
538     move(TrustedImmPtr(m_vm), regT3);
539     load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
540     storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
541
542     addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
543
544     callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
545     Jump isCatchableException = branchTest32(Zero, returnValueGPR);
546     jumpToExceptionHandler(*vm());
547     isCatchableException.link(this);
548
549     move(TrustedImmPtr(m_vm), regT3);
550     load64(Address(regT3, VM::exceptionOffset()), regT0);
551     store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
552     emitPutVirtualRegister(currentInstruction[1].u.operand);
553
554     load64(Address(regT0, Exception::valueOffset()), regT0);
555     emitPutVirtualRegister(currentInstruction[2].u.operand);
556
557 #if ENABLE(DFG_JIT)
558     // FIXME: consider inline caching the process of doing OSR entry, including
559     // argument type proofs, storing locals to the buffer, etc
560     // https://bugs.webkit.org/show_bug.cgi?id=175598
561
562     ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
563     if (buffer || !shouldEmitProfiling())
564         callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
565     else
566         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
567     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
568     emitRestoreCalleeSaves();
569     jump(returnValueGPR);
570     skipOSREntry.link(this);
571     if (buffer && shouldEmitProfiling()) {
572         buffer->forEach([&] (ValueProfileAndOperand& profile) {
573             JSValueRegs regs(regT0);
574             emitGetVirtualRegister(profile.m_operand, regs);
575             emitValueProfilingSite(profile.m_profile);
576         });
577     }
578 #endif // ENABLE(DFG_JIT)
579 }
580
581 void JIT::emit_op_assert(Instruction* currentInstruction)
582 {
583     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
584     slowPathCall.call();
585 }
586
587 void JIT::emit_op_identity_with_profile(Instruction*)
588 {
589     // We don't need to do anything here...
590 }
591
592 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
593 {
594     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
595     slowPathCall.call();
596 }
597
598 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
599 {
600     int currentScope = currentInstruction[2].u.operand;
601     emitGetVirtualRegister(currentScope, regT0);
602     loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
603     emitStoreCell(currentInstruction[1].u.operand, regT0);
604 }
605
606 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
607 {
608     size_t tableIndex = currentInstruction[1].u.operand;
609     unsigned defaultOffset = currentInstruction[2].u.operand;
610     unsigned scrutinee = currentInstruction[3].u.operand;
611
612     // create jump table for switch destinations, track this switch statement.
613     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
614     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
615     jumpTable->ensureCTITable();
616
617     emitGetVirtualRegister(scrutinee, regT0);
618     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
619     jump(returnValueGPR);
620 }
621
622 void JIT::emit_op_switch_char(Instruction* currentInstruction)
623 {
624     size_t tableIndex = currentInstruction[1].u.operand;
625     unsigned defaultOffset = currentInstruction[2].u.operand;
626     unsigned scrutinee = currentInstruction[3].u.operand;
627
628     // create jump table for switch destinations, track this switch statement.
629     SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
630     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
631     jumpTable->ensureCTITable();
632
633     emitGetVirtualRegister(scrutinee, regT0);
634     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
635     jump(returnValueGPR);
636 }
637
638 void JIT::emit_op_switch_string(Instruction* currentInstruction)
639 {
640     size_t tableIndex = currentInstruction[1].u.operand;
641     unsigned defaultOffset = currentInstruction[2].u.operand;
642     unsigned scrutinee = currentInstruction[3].u.operand;
643
644     // create jump table for switch destinations, track this switch statement.
645     StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
646     m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
647
648     emitGetVirtualRegister(scrutinee, regT0);
649     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
650     jump(returnValueGPR);
651 }
652
653 void JIT::emit_op_debug(Instruction* currentInstruction)
654 {
655     load32(codeBlock()->debuggerRequestsAddress(), regT0);
656     Jump noDebuggerRequests = branchTest32(Zero, regT0);
657     callOperation(operationDebug, currentInstruction[1].u.operand);
658     noDebuggerRequests.link(this);
659 }
660
661 void JIT::emit_op_eq_null(Instruction* currentInstruction)
662 {
663     int dst = currentInstruction[1].u.operand;
664     int src1 = currentInstruction[2].u.operand;
665
666     emitGetVirtualRegister(src1, regT0);
667     Jump isImmediate = emitJumpIfNotJSCell(regT0);
668
669     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
670     move(TrustedImm32(0), regT0);
671     Jump wasNotMasqueradesAsUndefined = jump();
672
673     isMasqueradesAsUndefined.link(this);
674     emitLoadStructure(*vm(), regT0, regT2, regT1);
675     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
676     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
677     comparePtr(Equal, regT0, regT2, regT0);
678     Jump wasNotImmediate = jump();
679
680     isImmediate.link(this);
681
682     and64(TrustedImm32(~TagBitUndefined), regT0);
683     compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
684
685     wasNotImmediate.link(this);
686     wasNotMasqueradesAsUndefined.link(this);
687
688     emitTagBool(regT0);
689     emitPutVirtualRegister(dst);
690
691 }
692
693 void JIT::emit_op_neq_null(Instruction* currentInstruction)
694 {
695     int dst = currentInstruction[1].u.operand;
696     int src1 = currentInstruction[2].u.operand;
697
698     emitGetVirtualRegister(src1, regT0);
699     Jump isImmediate = emitJumpIfNotJSCell(regT0);
700
701     Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
702     move(TrustedImm32(1), regT0);
703     Jump wasNotMasqueradesAsUndefined = jump();
704
705     isMasqueradesAsUndefined.link(this);
706     emitLoadStructure(*vm(), regT0, regT2, regT1);
707     move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
708     loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
709     comparePtr(NotEqual, regT0, regT2, regT0);
710     Jump wasNotImmediate = jump();
711
712     isImmediate.link(this);
713
714     and64(TrustedImm32(~TagBitUndefined), regT0);
715     compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
716
717     wasNotImmediate.link(this);
718     wasNotMasqueradesAsUndefined.link(this);
719
720     emitTagBool(regT0);
721     emitPutVirtualRegister(dst);
722 }
723
724 void JIT::emit_op_enter(Instruction*)
725 {
726     // Even though CTI doesn't use them, we initialize our constant
727     // registers to zap stale pointers, to avoid unnecessarily prolonging
728     // object lifetime and increasing GC pressure.
729     size_t count = m_codeBlock->m_numVars;
730     for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
731         emitInitRegister(virtualRegisterForLocal(j).offset());
732
733     emitWriteBarrier(m_codeBlock);
734
735     emitEnterOptimizationCheck();
736 }
737
738 void JIT::emit_op_get_scope(Instruction* currentInstruction)
739 {
740     int dst = currentInstruction[1].u.operand;
741     emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
742     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
743     emitStoreCell(dst, regT0);
744 }
745
746 void JIT::emit_op_to_this(Instruction* currentInstruction)
747 {
748     WriteBarrierBase<Structure>* cachedStructure = &currentInstruction[2].u.structure;
749     emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
750
751     emitJumpSlowCaseIfNotJSCell(regT1);
752
753     addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
754     loadPtr(cachedStructure, regT2);
755     addSlowCase(branchTestPtr(Zero, regT2));
756     load32(Address(regT2, Structure::structureIDOffset()), regT2);
757     addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
758 }
759
760 void JIT::emit_op_create_this(Instruction* currentInstruction)
761 {
762     int callee = currentInstruction[2].u.operand;
763     WriteBarrierBase<JSCell>* cachedFunction = &currentInstruction[4].u.jsCell;
764     RegisterID calleeReg = regT0;
765     RegisterID rareDataReg = regT4;
766     RegisterID resultReg = regT0;
767     RegisterID allocatorReg = regT1;
768     RegisterID structureReg = regT2;
769     RegisterID cachedFunctionReg = regT4;
770     RegisterID scratchReg = regT3;
771
772     emitGetVirtualRegister(callee, calleeReg);
773     addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
774     loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
775     addSlowCase(branchTestPtr(Zero, rareDataReg));
776     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
777     loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
778     addSlowCase(branchTestPtr(Zero, allocatorReg));
779
780     loadPtr(cachedFunction, cachedFunctionReg);
781     Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
782     addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
783     hasSeenMultipleCallees.link(this);
784
785     JumpList slowCases;
786     emitAllocateJSObject(resultReg, nullptr, allocatorReg, structureReg, TrustedImmPtr(0), scratchReg, slowCases);
787     emitGetVirtualRegister(callee, scratchReg);
788     loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
789     load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
790     emitInitializeInlineStorage(resultReg, scratchReg);
791     addSlowCase(slowCases);
792     emitPutVirtualRegister(currentInstruction[1].u.operand);
793 }
794
795 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
796 {
797     linkSlowCase(iter); // Callee::m_type != JSFunctionType.
798     linkSlowCase(iter); // doesn't have rare data
799     linkSlowCase(iter); // doesn't have an allocation profile
800     linkSlowCase(iter); // allocation failed (no allocator)
801     linkSlowCase(iter); // allocation failed (allocator empty)
802     linkSlowCase(iter); // cached function didn't match
803
804     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
805     slowPathCall.call();
806 }
807
808 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
809 {
810     emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
811     addSlowCase(branchTest64(Zero, regT0));
812 }
813
814 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
815 {
816     linkSlowCase(iter);
817     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
818     slowPathCall.call();
819 }
820
821
822 // Slow cases
823
824 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
825 {
826     linkSlowCase(iter);
827     linkSlowCase(iter);
828     linkSlowCase(iter);
829     linkSlowCase(iter);
830
831     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
832     slowPathCall.call();
833 }
834
835 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
836 {
837     linkSlowCase(iter);
838
839     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
840     slowPathCall.call();
841 }
842
843 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
844 {
845     linkSlowCase(iter);
846     
847     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
848     slowPathCall.call();
849 }
850
851 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
852 {
853     linkSlowCase(iter);
854     callOperation(operationCompareEq, regT0, regT1);
855     emitTagBool(returnValueGPR);
856     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
857 }
858
859 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
860 {
861     linkSlowCase(iter);
862     callOperation(operationCompareEq, regT0, regT1);
863     xor32(TrustedImm32(0x1), regT0);
864     emitTagBool(returnValueGPR);
865     emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
866 }
867
868 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
869 {
870     linkSlowCase(iter);
871     linkSlowCase(iter);
872     linkSlowCase(iter);
873     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
874     slowPathCall.call();
875 }
876
877 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
878 {
879     linkSlowCase(iter);
880     linkSlowCase(iter);
881     linkSlowCase(iter);
882     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
883     slowPathCall.call();
884 }
885
886 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
887 {
888     auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
889     int dst = bytecode.dst();
890     int value = bytecode.value();
891     int proto = bytecode.prototype();
892
893     linkSlowCaseIfNotJSCell(iter, value);
894     linkSlowCaseIfNotJSCell(iter, proto);
895     linkSlowCase(iter);
896     linkSlowCase(iter);
897     emitGetVirtualRegister(value, regT0);
898     emitGetVirtualRegister(proto, regT1);
899     callOperation(operationInstanceOf, dst, regT0, regT1);
900 }
901
902 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
903 {
904     auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
905     int dst = bytecode.dst();
906     int value = bytecode.value();
907     int constructor = bytecode.constructor();
908     int hasInstanceValue = bytecode.hasInstanceValue();
909
910     linkSlowCase(iter);
911     emitGetVirtualRegister(value, regT0);
912     emitGetVirtualRegister(constructor, regT1);
913     emitGetVirtualRegister(hasInstanceValue, regT2);
914     callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
915     emitTagBool(returnValueGPR);
916     emitPutVirtualRegister(dst, returnValueGPR);
917 }
918
919 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
920 {
921     linkSlowCase(iter);
922
923     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
924     slowPathCall.call();
925 }
926
927 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
928 {
929     linkSlowCase(iter); // Not JSCell.
930     linkSlowCase(iter); // Not JSString.
931
932     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
933     slowPathCall.call();
934 }
935
936 #endif // USE(JSVALUE64)
937
938 void JIT::emit_op_loop_hint(Instruction*)
939 {
940     // Emit the JIT optimization check: 
941     if (canBeOptimized()) {
942         addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
943             AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
944     }
945 }
946
947 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
948 {
949 #if ENABLE(DFG_JIT)
950     // Emit the slow path for the JIT optimization check:
951     if (canBeOptimized()) {
952         linkSlowCase(iter);
953
954         copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
955
956         callOperation(operationOptimize, m_bytecodeOffset);
957         Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
958         if (!ASSERT_DISABLED) {
959             Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
960             abortWithReason(JITUnreasonableLoopHintJumpTarget);
961             ok.link(this);
962         }
963         jump(returnValueGPR);
964         noOptimizedEntry.link(this);
965
966         emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
967     }
968 #else
969     UNUSED_PARAM(iter);
970 #endif
971 }
972
973 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
974 {
975     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_static_error);
976     slowPathCall.call();
977 }
978
979 void JIT::emit_op_check_traps(Instruction*)
980 {
981     addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
982 }
983
984 void JIT::emit_op_nop(Instruction*)
985 {
986 }
987
988 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
989 {
990     linkSlowCase(iter);
991     callOperation(operationHandleTraps);
992 }
993
994 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
995 {
996     callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
997 }
998
999 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
1000 {
1001     Jump lazyJump;
1002     int dst = currentInstruction[1].u.operand;
1003
1004 #if USE(JSVALUE64)
1005     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1006 #else
1007     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1008 #endif
1009     FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
1010
1011     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1012     if (opcodeID == op_new_func)
1013         callOperation(operationNewFunction, dst, regT0, funcExec);
1014     else if (opcodeID == op_new_generator_func)
1015         callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
1016     else if (opcodeID == op_new_async_func)
1017         callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
1018     else {
1019         ASSERT(opcodeID == op_new_async_generator_func);
1020         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
1021     }
1022 }
1023
1024 void JIT::emit_op_new_func(Instruction* currentInstruction)
1025 {
1026     emitNewFuncCommon(currentInstruction);
1027 }
1028
1029 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
1030 {
1031     emitNewFuncCommon(currentInstruction);
1032 }
1033
1034 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
1035 {
1036     emitNewFuncCommon(currentInstruction);
1037 }
1038
1039 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
1040 {
1041     emitNewFuncCommon(currentInstruction);
1042 }
1043     
1044 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1045 {
1046     Jump notUndefinedScope;
1047     int dst = currentInstruction[1].u.operand;
1048 #if USE(JSVALUE64)
1049     emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1050     notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
1051     store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
1052 #else
1053     emitLoadPayload(currentInstruction[2].u.operand, regT0);
1054     notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1055     emitStore(dst, jsUndefined());
1056 #endif
1057     Jump done = jump();
1058     notUndefinedScope.link(this);
1059         
1060     FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1061     OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
1062
1063     if (opcodeID == op_new_func_exp)
1064         callOperation(operationNewFunction, dst, regT0, function);
1065     else if (opcodeID == op_new_generator_func_exp)
1066         callOperation(operationNewGeneratorFunction, dst, regT0, function);
1067     else if (opcodeID == op_new_async_func_exp)
1068         callOperation(operationNewAsyncFunction, dst, regT0, function);
1069     else {
1070         ASSERT(opcodeID == op_new_async_generator_func_exp);
1071         callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
1072     }
1073
1074     done.link(this);
1075 }
1076
1077 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1078 {
1079     emitNewFuncExprCommon(currentInstruction);
1080 }
1081
1082 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1083 {
1084     emitNewFuncExprCommon(currentInstruction);
1085 }
1086
1087 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1088 {
1089     emitNewFuncExprCommon(currentInstruction);
1090 }
1091     
1092 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1093 {
1094     emitNewFuncExprCommon(currentInstruction);
1095 }
1096     
1097 void JIT::emit_op_new_array(Instruction* currentInstruction)
1098 {
1099     int dst = currentInstruction[1].u.operand;
1100     int valuesIndex = currentInstruction[2].u.operand;
1101     int size = currentInstruction[3].u.operand;
1102     addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1103     callOperation(operationNewArrayWithProfile, dst,
1104         currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1105 }
1106
1107 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1108 {
1109     int dst = currentInstruction[1].u.operand;
1110     int sizeIndex = currentInstruction[2].u.operand;
1111 #if USE(JSVALUE64)
1112     emitGetVirtualRegister(sizeIndex, regT0);
1113     callOperation(operationNewArrayWithSizeAndProfile, dst,
1114         currentInstruction[3].u.arrayAllocationProfile, regT0);
1115 #else
1116     emitLoad(sizeIndex, regT1, regT0);
1117     callOperation(operationNewArrayWithSizeAndProfile, dst,
1118         currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1119 #endif
1120 }
1121
1122 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1123 {
1124     int dst = currentInstruction[1].u.operand;
1125     int valuesIndex = currentInstruction[2].u.operand;
1126     int size = currentInstruction[3].u.operand;
1127     const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1128     callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1129 }
1130
1131 void JIT::emit_op_new_array_with_spread(Instruction* currentInstruction)
1132 {
1133     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_array_with_spread);
1134     slowPathCall.call();
1135 }
1136
1137 void JIT::emit_op_spread(Instruction* currentInstruction)
1138 {
1139     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_spread);
1140     slowPathCall.call();
1141 }
1142
1143 #if USE(JSVALUE64)
1144 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1145 {
1146     int dst = currentInstruction[1].u.operand;
1147     int base = currentInstruction[2].u.operand;
1148     int enumerator = currentInstruction[4].u.operand;
1149
1150     emitGetVirtualRegister(base, regT0);
1151     emitGetVirtualRegister(enumerator, regT1);
1152     emitJumpSlowCaseIfNotJSCell(regT0, base);
1153
1154     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1155     addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1156     
1157     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1158     emitPutVirtualRegister(dst);
1159 }
1160
1161 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1162 {
1163     Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1164     
1165     PatchableJump badType;
1166     
1167     // FIXME: Add support for other types like TypedArrays and Arguments.
1168     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1169     JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1170     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1171     Jump done = jump();
1172
1173     LinkBuffer patchBuffer(*this, m_codeBlock);
1174     
1175     patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1176     patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1177     
1178     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1179     
1180     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1181         m_codeBlock, patchBuffer,
1182         ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1183     
1184     MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1185     MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1186 }
1187
1188 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1189 {
1190     int dst = currentInstruction[1].u.operand;
1191     int base = currentInstruction[2].u.operand;
1192     int property = currentInstruction[3].u.operand;
1193     ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1194     ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1195     
1196     emitGetVirtualRegisters(base, regT0, property, regT1);
1197
1198     // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1199     // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1200     // number was signed since m_vectorLength is always less than intmax (since the total allocation
1201     // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1202     // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1203     // extending since it makes it easier to re-tag the value in the slow case.
1204     zeroExtend32ToPtr(regT1, regT1);
1205
1206     emitJumpSlowCaseIfNotJSCell(regT0, base);
1207     emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1208     and32(TrustedImm32(IndexingShapeMask), regT2);
1209
1210     JITArrayMode mode = chooseArrayMode(profile);
1211     PatchableJump badType;
1212
1213     // FIXME: Add support for other types like TypedArrays and Arguments.
1214     // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1215     JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1216     
1217     move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1218
1219     addSlowCase(badType);
1220     addSlowCase(slowCases);
1221     
1222     Label done = label();
1223     
1224     emitPutVirtualRegister(dst);
1225
1226     Label nextHotPath = label();
1227     
1228     m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1229 }
1230
1231 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1232 {
1233     int dst = currentInstruction[1].u.operand;
1234     int base = currentInstruction[2].u.operand;
1235     int property = currentInstruction[3].u.operand;
1236     ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1237     
1238     linkSlowCaseIfNotJSCell(iter, base); // base cell check
1239     linkSlowCase(iter); // base array check
1240     linkSlowCase(iter); // vector length check
1241     linkSlowCase(iter); // empty value
1242     
1243     Label slowPath = label();
1244     
1245     emitGetVirtualRegister(base, regT0);
1246     emitGetVirtualRegister(property, regT1);
1247     Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1248
1249     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1250     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1251     m_byValInstructionIndex++;
1252 }
1253
1254 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1255 {
1256     int dst = currentInstruction[1].u.operand;
1257     int base = currentInstruction[2].u.operand;
1258     int index = currentInstruction[4].u.operand;
1259     int enumerator = currentInstruction[5].u.operand;
1260
1261     // Check that base is a cell
1262     emitGetVirtualRegister(base, regT0);
1263     emitJumpSlowCaseIfNotJSCell(regT0, base);
1264
1265     // Check the structure
1266     emitGetVirtualRegister(enumerator, regT2);
1267     load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1268     addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1269
1270     // Compute the offset
1271     emitGetVirtualRegister(index, regT1);
1272     // If index is less than the enumerator's cached inline storage, then it's an inline access
1273     Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1274     addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1275     signExtend32ToPtr(regT1, regT1);
1276     load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1277     
1278     Jump done = jump();
1279
1280     // Otherwise it's out of line
1281     outOfLineAccess.link(this);
1282     loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1283     sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1284     neg32(regT1);
1285     signExtend32ToPtr(regT1, regT1);
1286     int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1287     load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1288     
1289     done.link(this);
1290     emitValueProfilingSite();
1291     emitPutVirtualRegister(dst, regT0);
1292 }
1293
1294 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1295 {
1296     int base = currentInstruction[2].u.operand;
1297     linkSlowCaseIfNotJSCell(iter, base);
1298     linkSlowCase(iter);
1299
1300     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1301     slowPathCall.call();
1302 }
1303
1304 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1305 {
1306     int dst = currentInstruction[1].u.operand;
1307     int enumerator = currentInstruction[2].u.operand;
1308     int index = currentInstruction[3].u.operand;
1309
1310     emitGetVirtualRegister(index, regT0);
1311     emitGetVirtualRegister(enumerator, regT1);
1312     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1313
1314     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1315
1316     Jump done = jump();
1317     inBounds.link(this);
1318
1319     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1320     signExtend32ToPtr(regT0, regT0);
1321     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1322
1323     done.link(this);
1324     emitPutVirtualRegister(dst);
1325 }
1326
1327 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1328 {
1329     int dst = currentInstruction[1].u.operand;
1330     int enumerator = currentInstruction[2].u.operand;
1331     int index = currentInstruction[3].u.operand;
1332
1333     emitGetVirtualRegister(index, regT0);
1334     emitGetVirtualRegister(enumerator, regT1);
1335     Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1336
1337     move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1338
1339     Jump done = jump();
1340     inBounds.link(this);
1341
1342     loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1343     signExtend32ToPtr(regT0, regT0);
1344     load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1345     
1346     done.link(this);
1347     emitPutVirtualRegister(dst);
1348 }
1349
1350 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1351 {
1352     TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1353     int valueToProfile = currentInstruction[1].u.operand;
1354
1355     emitGetVirtualRegister(valueToProfile, regT0);
1356
1357     JumpList jumpToEnd;
1358
1359     jumpToEnd.append(branchTest64(Zero, regT0));
1360
1361     // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1362     // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1363     if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1364         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1365     else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1366         jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1367     else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1368         move(regT0, regT1);
1369         and64(TrustedImm32(~1), regT1);
1370         jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1371     } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1372         jumpToEnd.append(emitJumpIfInt(regT0));
1373     else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1374         jumpToEnd.append(emitJumpIfNumber(regT0));
1375     else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1376         Jump isNotCell = emitJumpIfNotJSCell(regT0);
1377         jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1378         isNotCell.link(this);
1379     }
1380
1381     // Load the type profiling log into T2.
1382     TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1383     move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1384     // Load the next log entry into T1.
1385     loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1386
1387     // Store the JSValue onto the log entry.
1388     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1389
1390     // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1391     Jump notCell = emitJumpIfNotJSCell(regT0);
1392     load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1393     store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1394     Jump skipIsCell = jump();
1395     notCell.link(this);
1396     store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1397     skipIsCell.link(this);
1398
1399     // Store the typeLocation on the log entry.
1400     move(TrustedImmPtr(cachedTypeLocation), regT0);
1401     store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1402
1403     // Increment the current log entry.
1404     addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1405     store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1406     Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1407     // Clear the log if we're at the end of the log.
1408     callOperation(operationProcessTypeProfilerLog);
1409     skipClearLog.link(this);
1410
1411     jumpToEnd.link(this);
1412 }
1413
1414 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1415 {
1416     updateTopCallFrame();
1417     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1418     GPRReg shadowPacketReg = regT0;
1419     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1420     GPRReg scratch2Reg = regT2;
1421     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1422     emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1423     logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1424 }
1425
1426 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1427 {
1428     updateTopCallFrame();
1429     static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1430     GPRReg shadowPacketReg = regT0;
1431     GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1432     GPRReg scratch2Reg = regT2;
1433     ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1434     emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1435     emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1436     logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1437 }
1438
1439 #endif // USE(JSVALUE64)
1440
1441 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1442 {
1443     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1444     slowPathCall.call();
1445 }
1446
1447 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1448 {
1449     linkSlowCase(iter);
1450     linkSlowCase(iter);
1451
1452     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1453     slowPathCall.call();
1454 }
1455
1456 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1457 {
1458     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1459     slowPathCall.call();
1460 }
1461
1462 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1463 {
1464     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1465     slowPathCall.call();
1466 }
1467
1468 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1469 {
1470     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1471     slowPathCall.call();
1472 }
1473
1474 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1475 {
1476     BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1477 #if USE(JSVALUE64)
1478     basicBlockLocation->emitExecuteCode(*this);
1479 #else
1480     basicBlockLocation->emitExecuteCode(*this, regT0);
1481 #endif
1482 }
1483
1484 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1485 {
1486     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1487     slowPathCall.call();
1488 }
1489
1490 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1491 {
1492     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1493     slowPathCall.call();
1494 }
1495
1496 void JIT::emit_op_create_cloned_arguments(Instruction* currentInstruction)
1497 {
1498     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_cloned_arguments);
1499     slowPathCall.call();
1500 }
1501
1502 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1503 {
1504     int dst = currentInstruction[1].u.operand;
1505     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1506     sub32(TrustedImm32(1), regT0);
1507     JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1508     boxInt32(regT0, result);
1509     emitPutVirtualRegister(dst, result);
1510 }
1511
1512 void JIT::emit_op_create_rest(Instruction* currentInstruction)
1513 {
1514     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_rest);
1515     slowPathCall.call();
1516 }
1517
1518 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1519 {
1520     int dst = currentInstruction[1].u.operand;
1521     unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1522     load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1523     sub32(TrustedImm32(1), regT0);
1524     Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1525     sub32(Imm32(numParamsToSkip), regT0);
1526 #if USE(JSVALUE64)
1527     boxInt32(regT0, JSValueRegs(regT0));
1528 #endif
1529     Jump done = jump();
1530
1531     zeroLength.link(this);
1532 #if USE(JSVALUE64)
1533     move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1534 #else
1535     move(TrustedImm32(0), regT0);
1536 #endif
1537
1538     done.link(this);
1539 #if USE(JSVALUE64)
1540     emitPutVirtualRegister(dst, regT0);
1541 #else
1542     move(TrustedImm32(JSValue::Int32Tag), regT1);
1543     emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1544 #endif
1545 }
1546
1547 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1548 {
1549     int dst = currentInstruction[1].u.operand;
1550     int index = currentInstruction[2].u.operand;
1551 #if USE(JSVALUE64)
1552     JSValueRegs resultRegs(regT0);
1553 #else
1554     JSValueRegs resultRegs(regT1, regT0);
1555 #endif
1556
1557     load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1558     Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1559     loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1560     Jump done = jump();
1561
1562     argumentOutOfBounds.link(this);
1563     moveValue(jsUndefined(), resultRegs);
1564
1565     done.link(this);
1566     emitValueProfilingSite();
1567     emitPutVirtualRegister(dst, resultRegs);
1568 }
1569
1570 void JIT::emit_op_unreachable(Instruction* currentInstruction)
1571 {
1572     JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_unreachable);
1573     slowPathCall.call();
1574 }
1575
1576 } // namespace JSC
1577
1578 #endif // ENABLE(JIT)