JSC should have InstanceOf inline caching
[WebKit-https.git] / Source / JavaScriptCore / jit / JIT.h
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #pragma once
27
28 #if ENABLE(JIT)
29
30 // We've run into some problems where changing the size of the class JIT leads to
31 // performance fluctuations. Try forcing alignment in an attempt to stabilize this.
32 #if COMPILER(GCC_OR_CLANG)
33 #define JIT_CLASS_ALIGNMENT alignas(32)
34 #else
35 #define JIT_CLASS_ALIGNMENT
36 #endif
37
38 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
39
40 #include "CodeBlock.h"
41 #include "CommonSlowPaths.h"
42 #include "JITDisassembler.h"
43 #include "JITInlineCacheGenerator.h"
44 #include "JITMathIC.h"
45 #include "JSInterfaceJIT.h"
46 #include "PCToCodeOriginMap.h"
47 #include "UnusedPointer.h"
48
49 namespace JSC {
50
51     enum OpcodeID : unsigned;
52
53     class ArrayAllocationProfile;
54     class CallLinkInfo;
55     class CodeBlock;
56     class FunctionExecutable;
57     class JIT;
58     class Identifier;
59     class Interpreter;
60     class BlockDirectory;
61     class Register;
62     class StructureChain;
63     class StructureStubInfo;
64
65     struct Instruction;
66     struct OperandTypes;
67     struct SimpleJumpTable;
68     struct StringJumpTable;
69
70     struct CallRecord {
71         MacroAssembler::Call from;
72         unsigned bytecodeOffset;
73         FunctionPtr<OperationPtrTag> callee;
74
75         CallRecord()
76         {
77         }
78
79         CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, FunctionPtr<OperationPtrTag> callee)
80             : from(from)
81             , bytecodeOffset(bytecodeOffset)
82             , callee(callee)
83         {
84         }
85     };
86
87     struct JumpTable {
88         MacroAssembler::Jump from;
89         unsigned toBytecodeOffset;
90
91         JumpTable(MacroAssembler::Jump f, unsigned t)
92             : from(f)
93             , toBytecodeOffset(t)
94         {
95         }
96     };
97
98     struct SlowCaseEntry {
99         MacroAssembler::Jump from;
100         unsigned to;
101         
102         SlowCaseEntry(MacroAssembler::Jump f, unsigned t)
103             : from(f)
104             , to(t)
105         {
106         }
107     };
108
109     struct SwitchRecord {
110         enum Type {
111             Immediate,
112             Character,
113             String
114         };
115
116         Type type;
117
118         union {
119             SimpleJumpTable* simpleJumpTable;
120             StringJumpTable* stringJumpTable;
121         } jumpTable;
122
123         unsigned bytecodeOffset;
124         unsigned defaultOffset;
125
126         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
127             : type(type)
128             , bytecodeOffset(bytecodeOffset)
129             , defaultOffset(defaultOffset)
130         {
131             this->jumpTable.simpleJumpTable = jumpTable;
132         }
133
134         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
135             : type(String)
136             , bytecodeOffset(bytecodeOffset)
137             , defaultOffset(defaultOffset)
138         {
139             this->jumpTable.stringJumpTable = jumpTable;
140         }
141     };
142
143     struct ByValCompilationInfo {
144         ByValCompilationInfo() { }
145         
146         ByValCompilationInfo(ByValInfo* byValInfo, unsigned bytecodeIndex, MacroAssembler::PatchableJump notIndexJump, MacroAssembler::PatchableJump badTypeJump, JITArrayMode arrayMode, ArrayProfile* arrayProfile, MacroAssembler::Label doneTarget, MacroAssembler::Label nextHotPathTarget)
147             : byValInfo(byValInfo)
148             , bytecodeIndex(bytecodeIndex)
149             , notIndexJump(notIndexJump)
150             , badTypeJump(badTypeJump)
151             , arrayMode(arrayMode)
152             , arrayProfile(arrayProfile)
153             , doneTarget(doneTarget)
154             , nextHotPathTarget(nextHotPathTarget)
155         {
156         }
157
158         ByValInfo* byValInfo;
159         unsigned bytecodeIndex;
160         MacroAssembler::PatchableJump notIndexJump;
161         MacroAssembler::PatchableJump badTypeJump;
162         JITArrayMode arrayMode;
163         ArrayProfile* arrayProfile;
164         MacroAssembler::Label doneTarget;
165         MacroAssembler::Label nextHotPathTarget;
166         MacroAssembler::Label slowPathTarget;
167         MacroAssembler::Call returnAddress;
168     };
169
170     struct CallCompilationInfo {
171         MacroAssembler::DataLabelPtr hotPathBegin;
172         MacroAssembler::Call hotPathOther;
173         MacroAssembler::Call callReturnLocation;
174         CallLinkInfo* callLinkInfo;
175     };
176
177     void ctiPatchCallByReturnAddress(ReturnAddressPtr, FunctionPtr<CFunctionPtrTag> newCalleeFunction);
178
179     class JIT_CLASS_ALIGNMENT JIT : private JSInterfaceJIT {
180         friend class JITSlowPathCall;
181         friend class JITStubCall;
182
183         using MacroAssembler::Jump;
184         using MacroAssembler::JumpList;
185         using MacroAssembler::Label;
186
187         static const uintptr_t patchGetByIdDefaultStructure = unusedPointer;
188         static const int patchGetByIdDefaultOffset = 0;
189         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
190         // will compress the displacement, and we may not be able to fit a patched offset.
191         static const int patchPutByIdDefaultOffset = 256;
192
193     public:
194         JIT(VM*, CodeBlock* = 0, unsigned loopOSREntryBytecodeOffset = 0);
195         ~JIT();
196
197         void compileWithoutLinking(JITCompilationEffort);
198         CompilationResult link();
199
200         void doMainThreadPreparationBeforeCompile();
201         
202         static CompilationResult compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort, unsigned bytecodeOffset = 0)
203         {
204             return JIT(vm, codeBlock, bytecodeOffset).privateCompile(effort);
205         }
206         
207         static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
208         {
209             JIT jit(vm, codeBlock);
210             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
211             jit.privateCompileGetByVal(byValInfo, returnAddress, arrayMode);
212         }
213
214         static void compileGetByValWithCachedId(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
215         {
216             JIT jit(vm, codeBlock);
217             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
218             jit.privateCompileGetByValWithCachedId(byValInfo, returnAddress, propertyName);
219         }
220
221         static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
222         {
223             JIT jit(vm, codeBlock);
224             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
225             jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
226         }
227         
228         static void compileDirectPutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
229         {
230             JIT jit(vm, codeBlock);
231             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
232             jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
233         }
234
235         static void compilePutByValWithCachedId(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
236         {
237             JIT jit(vm, codeBlock);
238             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
239             jit.privateCompilePutByValWithCachedId(byValInfo, returnAddress, putKind, propertyName);
240         }
241
242         static void compileHasIndexedProperty(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
243         {
244             JIT jit(vm, codeBlock);
245             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
246             jit.privateCompileHasIndexedProperty(byValInfo, returnAddress, arrayMode);
247         }
248
249         static unsigned frameRegisterCountFor(CodeBlock*);
250         static int stackPointerOffsetFor(CodeBlock*);
251
252         JS_EXPORT_PRIVATE static HashMap<CString, Seconds> compileTimeStats();
253         JS_EXPORT_PRIVATE static Seconds totalCompileTime();
254
255     private:
256         void privateCompileMainPass();
257         void privateCompileLinkPass();
258         void privateCompileSlowCases();
259         CompilationResult privateCompile(JITCompilationEffort);
260         
261         void privateCompileGetByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
262         void privateCompileGetByValWithCachedId(ByValInfo*, ReturnAddressPtr, const Identifier&);
263         void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
264         void privateCompilePutByValWithCachedId(ByValInfo*, ReturnAddressPtr, PutKind, const Identifier&);
265
266         void privateCompileHasIndexedProperty(ByValInfo*, ReturnAddressPtr, JITArrayMode);
267
268         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
269
270         // Add a call out from JIT code, without an exception check.
271         Call appendCall(const FunctionPtr<CFunctionPtrTag> function)
272         {
273             Call functionCall = call(OperationPtrTag);
274             m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.retagged<OperationPtrTag>()));
275             return functionCall;
276         }
277
278 #if OS(WINDOWS) && CPU(X86_64)
279         Call appendCallWithSlowPathReturnType(const FunctionPtr<CFunctionPtrTag> function)
280         {
281             Call functionCall = callWithSlowPathReturnType(OperationPtrTag);
282             m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.retagged<OperationPtrTag>()));
283             return functionCall;
284         }
285 #endif
286
287         void exceptionCheck(Jump jumpToHandler)
288         {
289             m_exceptionChecks.append(jumpToHandler);
290         }
291
292         void exceptionCheck()
293         {
294             m_exceptionChecks.append(emitExceptionCheck(*vm()));
295         }
296
297         void exceptionCheckWithCallFrameRollback()
298         {
299             m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck(*vm()));
300         }
301
302         void privateCompileExceptionHandlers();
303
304         void addSlowCase(Jump);
305         void addSlowCase(const JumpList&);
306         void addSlowCase();
307         void addJump(Jump, int);
308         void emitJumpSlowToHot(Jump, int);
309
310         void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
311         void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
312         void compileSetupVarargsFrame(OpcodeID, Instruction*, CallLinkInfo*);
313         void compileCallEval(Instruction*);
314         void compileCallEvalSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&);
315         void emitPutCallResult(Instruction*);
316
317         enum class CompileOpStrictEqType { StrictEq, NStrictEq };
318         void compileOpStrictEq(Instruction*, CompileOpStrictEqType);
319         void compileOpStrictEqJump(Instruction*, CompileOpStrictEqType);
320         enum class CompileOpEqType { Eq, NEq };
321         void compileOpEqJumpSlow(Vector<SlowCaseEntry>::iterator&, CompileOpEqType, int jumpTarget);
322         bool isOperandConstantDouble(int src);
323         
324         void emitLoadDouble(int index, FPRegisterID value);
325         void emitLoadInt32ToDouble(int index, FPRegisterID value);
326
327         enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterBase, ShouldFilterValue, ShouldFilterBaseAndValue };
328         // value register in write barrier is used before any scratch registers
329         // so may safely be the same as either of the scratch registers.
330         void emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode);
331         void emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode);
332         void emitWriteBarrier(JSCell* owner);
333
334         // This assumes that the value to profile is in regT0 and that regT3 is available for
335         // scratch.
336         void emitValueProfilingSite(ValueProfile&);
337         void emitValueProfilingSite(unsigned bytecodeOffset);
338         void emitValueProfilingSite();
339         void emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile*);
340         void emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex);
341         void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile*);
342         void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile*);
343         
344         JITArrayMode chooseArrayMode(ArrayProfile*);
345         
346         // Property is in regT1, base is in regT0. regT2 contains indexing type.
347         // Property is int-checked and zero extended. Base is cell checked.
348         // Structure is already profiled. Returns the slow cases. Fall-through
349         // case contains result in regT0, and it is not yet profiled.
350         JumpList emitInt32Load(Instruction* instruction, PatchableJump& badType) { return emitContiguousLoad(instruction, badType, Int32Shape); }
351         JumpList emitDoubleLoad(Instruction*, PatchableJump& badType);
352         JumpList emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
353         JumpList emitArrayStorageLoad(Instruction*, PatchableJump& badType);
354         JumpList emitLoadForArrayMode(Instruction*, JITArrayMode, PatchableJump& badType);
355
356         JumpList emitInt32GetByVal(Instruction* instruction, PatchableJump& badType) { return emitContiguousGetByVal(instruction, badType, Int32Shape); }
357         JumpList emitDoubleGetByVal(Instruction*, PatchableJump& badType);
358         JumpList emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
359         JumpList emitArrayStorageGetByVal(Instruction*, PatchableJump& badType);
360         JumpList emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType);
361         JumpList emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType);
362         JumpList emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
363         JumpList emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
364         
365         // Property is in regT1, base is in regT0. regT2 contains indecing type.
366         // The value to store is not yet loaded. Property is int-checked and
367         // zero-extended. Base is cell checked. Structure is already profiled.
368         // returns the slow cases.
369         JumpList emitInt32PutByVal(Instruction* currentInstruction, PatchableJump& badType)
370         {
371             return emitGenericContiguousPutByVal(currentInstruction, badType, Int32Shape);
372         }
373         JumpList emitDoublePutByVal(Instruction* currentInstruction, PatchableJump& badType)
374         {
375             return emitGenericContiguousPutByVal(currentInstruction, badType, DoubleShape);
376         }
377         JumpList emitContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType)
378         {
379             return emitGenericContiguousPutByVal(currentInstruction, badType);
380         }
381         JumpList emitGenericContiguousPutByVal(Instruction*, PatchableJump& badType, IndexingType indexingShape = ContiguousShape);
382         JumpList emitArrayStoragePutByVal(Instruction*, PatchableJump& badType);
383         JumpList emitIntTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
384         JumpList emitFloatTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
385
386         // Identifier check helper for GetByVal and PutByVal.
387         void emitByValIdentifierCheck(ByValInfo*, RegisterID cell, RegisterID scratch, const Identifier&, JumpList& slowCases);
388
389         JITGetByIdGenerator emitGetByValWithCachedId(ByValInfo*, Instruction*, const Identifier&, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases);
390         JITPutByIdGenerator emitPutByValWithCachedId(ByValInfo*, Instruction*, PutKind, const Identifier&, JumpList& doneCases, JumpList& slowCases);
391
392         enum FinalObjectMode { MayBeFinal, KnownNotFinal };
393
394         void emitGetVirtualRegister(int src, JSValueRegs dst);
395         void emitPutVirtualRegister(int dst, JSValueRegs src);
396
397         int32_t getOperandConstantInt(int src);
398         double getOperandConstantDouble(int src);
399
400 #if USE(JSVALUE32_64)
401         bool getOperandConstantInt(int op1, int op2, int& op, int32_t& constant);
402
403         void emitLoadTag(int index, RegisterID tag);
404         void emitLoadPayload(int index, RegisterID payload);
405
406         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
407         void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
408         void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
409
410         void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
411         void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
412         void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
413         void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
414         void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
415         void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
416         void emitStoreDouble(int index, FPRegisterID value);
417
418         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
419         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
420
421         void compileGetByIdHotPath(const Identifier*);
422
423         // Arithmetic opcode helpers
424         void emitBinaryDoubleOp(OpcodeID, int dst, int op1, int op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
425
426 #else // USE(JSVALUE32_64)
427         void emitGetVirtualRegister(int src, RegisterID dst);
428         void emitGetVirtualRegister(VirtualRegister src, RegisterID dst);
429         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
430         void emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2);
431         void emitPutVirtualRegister(int dst, RegisterID from = regT0);
432         void emitPutVirtualRegister(VirtualRegister dst, RegisterID from = regT0);
433         void emitStoreCell(int dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
434         {
435             emitPutVirtualRegister(dst, payload);
436         }
437         void emitStoreCell(VirtualRegister dst, RegisterID payload)
438         {
439             emitPutVirtualRegister(dst, payload);
440         }
441
442         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
443         void emitJumpSlowCaseIfJSCell(RegisterID);
444         void emitJumpSlowCaseIfNotJSCell(RegisterID);
445         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
446         Jump emitJumpIfNotInt(RegisterID, RegisterID, RegisterID scratch);
447         PatchableJump emitPatchableJumpIfNotInt(RegisterID);
448         void emitJumpSlowCaseIfNotInt(RegisterID);
449         void emitJumpSlowCaseIfNotNumber(RegisterID);
450         void emitJumpSlowCaseIfNotInt(RegisterID, RegisterID, RegisterID scratch);
451
452         void emitTagBool(RegisterID);
453
454         void compileGetByIdHotPath(int baseVReg, const Identifier*);
455
456 #endif // USE(JSVALUE32_64)
457
458         void emit_compareAndJump(OpcodeID, int op1, int op2, unsigned target, RelationalCondition);
459         void emit_compareUnsigned(int dst, int op1, int op2, RelationalCondition);
460         void emit_compareUnsignedAndJump(int op1, int op2, unsigned target, RelationalCondition);
461         void emit_compareAndJumpSlow(int op1, int op2, unsigned target, DoubleCondition, size_t (JIT_OPERATION *operation)(ExecState*, EncodedJSValue, EncodedJSValue), bool invert, Vector<SlowCaseEntry>::iterator&);
462         
463         void assertStackPointerOffset();
464
465         void emit_op_add(Instruction*);
466         void emit_op_bitand(Instruction*);
467         void emit_op_bitor(Instruction*);
468         void emit_op_bitxor(Instruction*);
469         void emit_op_call(Instruction*);
470         void emit_op_tail_call(Instruction*);
471         void emit_op_call_eval(Instruction*);
472         void emit_op_call_varargs(Instruction*);
473         void emit_op_tail_call_varargs(Instruction*);
474         void emit_op_tail_call_forward_arguments(Instruction*);
475         void emit_op_construct_varargs(Instruction*);
476         void emit_op_catch(Instruction*);
477         void emit_op_construct(Instruction*);
478         void emit_op_create_this(Instruction*);
479         void emit_op_to_this(Instruction*);
480         void emit_op_get_argument(Instruction*);
481         void emit_op_argument_count(Instruction*);
482         void emit_op_get_rest_length(Instruction*);
483         void emit_op_check_tdz(Instruction*);
484         void emit_op_identity_with_profile(Instruction*);
485         void emit_op_debug(Instruction*);
486         void emit_op_del_by_id(Instruction*);
487         void emit_op_del_by_val(Instruction*);
488         void emit_op_div(Instruction*);
489         void emit_op_end(Instruction*);
490         void emit_op_enter(Instruction*);
491         void emit_op_get_scope(Instruction*);
492         void emit_op_eq(Instruction*);
493         void emit_op_eq_null(Instruction*);
494         void emit_op_below(Instruction*);
495         void emit_op_beloweq(Instruction*);
496         void emit_op_try_get_by_id(Instruction*);
497         void emit_op_get_by_id(Instruction*);
498         void emit_op_get_by_id_with_this(Instruction*);
499         void emit_op_get_by_id_direct(Instruction*);
500         void emit_op_get_arguments_length(Instruction*);
501         void emit_op_get_by_val(Instruction*);
502         void emit_op_get_argument_by_val(Instruction*);
503         void emit_op_init_lazy_reg(Instruction*);
504         void emit_op_overrides_has_instance(Instruction*);
505         void emit_op_instanceof(Instruction*);
506         void emit_op_instanceof_custom(Instruction*);
507         void emit_op_is_empty(Instruction*);
508         void emit_op_is_undefined(Instruction*);
509         void emit_op_is_boolean(Instruction*);
510         void emit_op_is_number(Instruction*);
511         void emit_op_is_object(Instruction*);
512         void emit_op_is_cell_with_type(Instruction*);
513         void emit_op_jeq_null(Instruction*);
514         void emit_op_jfalse(Instruction*);
515         void emit_op_jmp(Instruction*);
516         void emit_op_jneq_null(Instruction*);
517         void emit_op_jneq_ptr(Instruction*);
518         void emit_op_jless(Instruction*);
519         void emit_op_jlesseq(Instruction*);
520         void emit_op_jgreater(Instruction*);
521         void emit_op_jgreatereq(Instruction*);
522         void emit_op_jnless(Instruction*);
523         void emit_op_jnlesseq(Instruction*);
524         void emit_op_jngreater(Instruction*);
525         void emit_op_jngreatereq(Instruction*);
526         void emit_op_jeq(Instruction*);
527         void emit_op_jneq(Instruction*);
528         void emit_op_jstricteq(Instruction*);
529         void emit_op_jnstricteq(Instruction*);
530         void emit_op_jbelow(Instruction*);
531         void emit_op_jbeloweq(Instruction*);
532         void emit_op_jtrue(Instruction*);
533         void emit_op_loop_hint(Instruction*);
534         void emit_op_check_traps(Instruction*);
535         void emit_op_nop(Instruction*);
536         void emit_op_super_sampler_begin(Instruction*);
537         void emit_op_super_sampler_end(Instruction*);
538         void emit_op_lshift(Instruction*);
539         void emit_op_mod(Instruction*);
540         void emit_op_mov(Instruction*);
541         void emit_op_mul(Instruction*);
542         void emit_op_negate(Instruction*);
543         void emit_op_neq(Instruction*);
544         void emit_op_neq_null(Instruction*);
545         void emit_op_new_array(Instruction*);
546         void emit_op_new_array_with_size(Instruction*);
547         void emit_op_new_func(Instruction*);
548         void emit_op_new_func_exp(Instruction*);
549         void emit_op_new_generator_func(Instruction*);
550         void emit_op_new_generator_func_exp(Instruction*);
551         void emit_op_new_async_func(Instruction*);
552         void emit_op_new_async_func_exp(Instruction*);
553         void emit_op_new_async_generator_func(Instruction*);
554         void emit_op_new_async_generator_func_exp(Instruction*);
555         void emit_op_new_object(Instruction*);
556         void emit_op_new_regexp(Instruction*);
557         void emit_op_not(Instruction*);
558         void emit_op_nstricteq(Instruction*);
559         void emit_op_dec(Instruction*);
560         void emit_op_inc(Instruction*);
561         void emit_op_profile_type(Instruction*);
562         void emit_op_profile_control_flow(Instruction*);
563         void emit_op_get_parent_scope(Instruction*);
564         void emit_op_put_by_id(Instruction*);
565         void emit_op_put_by_val(Instruction*);
566         void emit_op_put_getter_by_id(Instruction*);
567         void emit_op_put_setter_by_id(Instruction*);
568         void emit_op_put_getter_setter_by_id(Instruction*);
569         void emit_op_put_getter_by_val(Instruction*);
570         void emit_op_put_setter_by_val(Instruction*);
571         void emit_op_ret(Instruction*);
572         void emit_op_rshift(Instruction*);
573         void emit_op_set_function_name(Instruction*);
574         void emit_op_stricteq(Instruction*);
575         void emit_op_sub(Instruction*);
576         void emit_op_switch_char(Instruction*);
577         void emit_op_switch_imm(Instruction*);
578         void emit_op_switch_string(Instruction*);
579         void emit_op_tear_off_arguments(Instruction*);
580         void emit_op_throw(Instruction*);
581         void emit_op_to_number(Instruction*);
582         void emit_op_to_string(Instruction*);
583         void emit_op_to_object(Instruction*);
584         void emit_op_to_primitive(Instruction*);
585         void emit_op_unexpected_load(Instruction*);
586         void emit_op_unsigned(Instruction*);
587         void emit_op_urshift(Instruction*);
588         void emit_op_has_structure_property(Instruction*);
589         void emit_op_has_indexed_property(Instruction*);
590         void emit_op_get_direct_pname(Instruction*);
591         void emit_op_enumerator_structure_pname(Instruction*);
592         void emit_op_enumerator_generic_pname(Instruction*);
593         void emit_op_log_shadow_chicken_prologue(Instruction*);
594         void emit_op_log_shadow_chicken_tail(Instruction*);
595
596         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
597         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
598         void emitSlow_op_tail_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
599         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
600         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
601         void emitSlow_op_tail_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
602         void emitSlow_op_tail_call_forward_arguments(Instruction*, Vector<SlowCaseEntry>::iterator&);
603         void emitSlow_op_construct_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
604         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
605         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
606         void emitSlow_op_get_callee(Instruction*, Vector<SlowCaseEntry>::iterator&);
607         void emitSlow_op_try_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
608         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
609         void emitSlow_op_get_by_id_with_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
610         void emitSlow_op_get_by_id_direct(Instruction*, Vector<SlowCaseEntry>::iterator&);
611         void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
612         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
613         void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
614         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
615         void emitSlow_op_instanceof_custom(Instruction*, Vector<SlowCaseEntry>::iterator&);
616         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
617         void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
618         void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
619         void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
620         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
621         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
622         void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
623         void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
624         void emitSlow_op_jeq(Instruction*, Vector<SlowCaseEntry>::iterator&);
625         void emitSlow_op_jneq(Instruction*, Vector<SlowCaseEntry>::iterator&);
626         void emitSlow_op_jstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
627         void emitSlow_op_jnstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
628         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
629         void emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator&);
630         void emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator&);
631         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
632         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
633         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
634         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
635         void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
636         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
637         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
638         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
639         void emitSlow_op_has_indexed_property(Instruction*, Vector<SlowCaseEntry>::iterator&);
640
641         void emit_op_resolve_scope(Instruction*);
642         void emit_op_get_from_scope(Instruction*);
643         void emit_op_put_to_scope(Instruction*);
644         void emit_op_get_from_arguments(Instruction*);
645         void emit_op_put_to_arguments(Instruction*);
646         void emitSlow_op_get_from_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
647         void emitSlow_op_put_to_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
648
649         void emitSlowCaseCall(Instruction*, Vector<SlowCaseEntry>::iterator&, SlowPathFunction);
650
651         void emitRightShift(Instruction*, bool isUnsigned);
652         void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
653
654         void emitNewFuncCommon(Instruction*);
655         void emitNewFuncExprCommon(Instruction*);
656         void emitVarInjectionCheck(bool needsVarInjectionChecks);
657         void emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth);
658         void emitLoadWithStructureCheck(int scope, Structure** structureSlot);
659 #if USE(JSVALUE64)
660         void emitGetVarFromPointer(JSValue* operand, GPRReg);
661         void emitGetVarFromIndirectPointer(JSValue** operand, GPRReg);
662 #else
663         void emitGetVarFromIndirectPointer(JSValue** operand, GPRReg tag, GPRReg payload);
664         void emitGetVarFromPointer(JSValue* operand, GPRReg tag, GPRReg payload);
665 #endif
666         void emitGetClosureVar(int scope, uintptr_t operand);
667         void emitNotifyWrite(WatchpointSet*);
668         void emitNotifyWrite(GPRReg pointerToSet);
669         void emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet*);
670         void emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet**);
671         void emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet*);
672
673         void emitInitRegister(int dst);
674
675         void emitPutIntToCallFrameHeader(RegisterID from, int entry);
676
677         JSValue getConstantOperand(int src);
678         bool isOperandConstantInt(int src);
679         bool isOperandConstantChar(int src);
680
681         template <typename Generator, typename ProfiledFunction, typename NonProfiledFunction>
682         void emitMathICFast(JITUnaryMathIC<Generator>*, Instruction*, ProfiledFunction, NonProfiledFunction);
683         template <typename Generator, typename ProfiledFunction, typename NonProfiledFunction>
684         void emitMathICFast(JITBinaryMathIC<Generator>*, Instruction*, ProfiledFunction, NonProfiledFunction);
685
686         template <typename Generator, typename ProfiledRepatchFunction, typename ProfiledFunction, typename RepatchFunction>
687         void emitMathICSlow(JITBinaryMathIC<Generator>*, Instruction*, ProfiledRepatchFunction, ProfiledFunction, RepatchFunction);
688         template <typename Generator, typename ProfiledRepatchFunction, typename ProfiledFunction, typename RepatchFunction>
689         void emitMathICSlow(JITUnaryMathIC<Generator>*, Instruction*, ProfiledRepatchFunction, ProfiledFunction, RepatchFunction);
690
691         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
692         {
693             return iter++->from;
694         }
695         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
696         {
697             if (iter->from.isSet())
698                 iter->from.link(this);
699             ++iter;
700         }
701         void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
702         {
703             ASSERT(!iter->from.isSet());
704             ++iter;
705         }
706         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
707         void linkAllSlowCasesForBytecodeOffset(Vector<SlowCaseEntry>& slowCases,
708             Vector<SlowCaseEntry>::iterator&, unsigned bytecodeOffset);
709
710         void linkAllSlowCases(Vector<SlowCaseEntry>::iterator& iter)
711         {
712             linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
713         }
714
715         MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr<CFunctionPtrTag>);
716 #if OS(WINDOWS) && CPU(X86_64)
717         MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr<CFunctionPtrTag>);
718 #endif
719         MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr<CFunctionPtrTag>);
720         MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr<CFunctionPtrTag>, int);
721         MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr<CFunctionPtrTag>, int);
722         
723         template<typename OperationType, typename... Args>
724         std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
725         callOperation(OperationType operation, int result, Args... args)
726         {
727             setupArguments<OperationType>(args...);
728             return appendCallWithExceptionCheckSetJSValueResult(operation, result);
729         }
730
731 #if OS(WINDOWS) && CPU(X86_64)
732         template<typename OperationType, typename... Args>
733         std::enable_if_t<std::is_same<typename FunctionTraits<OperationType>::ResultType, SlowPathReturnType>::value, MacroAssembler::Call>
734         callOperation(OperationType operation, Args... args)
735         {
736             setupArguments<OperationType>(args...);
737             return appendCallWithExceptionCheckAndSlowPathReturnType(operation);
738         }
739
740         template<typename Type>
741         static constexpr bool is64BitType() { return sizeof(Type) <= 8; }
742
743         template<>
744         static constexpr bool is64BitType<void>() { return true; }
745
746         template<typename OperationType, typename... Args>
747         std::enable_if_t<!std::is_same<typename FunctionTraits<OperationType>::ResultType, SlowPathReturnType>::value, MacroAssembler::Call>
748         callOperation(OperationType operation, Args... args)
749         {
750             static_assert(is64BitType<typename FunctionTraits<OperationType>::ResultType>(), "Win64 cannot use standard call when return type is larger than 64 bits.");
751             setupArguments<OperationType>(args...);
752             return appendCallWithExceptionCheck(operation);
753         }
754 #else // OS(WINDOWS) && CPU(X86_64)
755         template<typename OperationType, typename... Args>
756         MacroAssembler::Call callOperation(OperationType operation, Args... args)
757         {
758             setupArguments<OperationType>(args...);
759             return appendCallWithExceptionCheck(operation);
760         }
761 #endif // OS(WINDOWS) && CPU(X86_64)
762
763         template<typename OperationType, typename... Args>
764         std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
765         callOperationWithProfile(OperationType operation, int result, Args... args)
766         {
767             setupArguments<OperationType>(args...);
768             return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, result);
769         }
770
771         template<typename OperationType, typename... Args>
772         MacroAssembler::Call callOperationWithResult(OperationType operation, JSValueRegs resultRegs, Args... args)
773         {
774             setupArguments<OperationType>(args...);
775             auto result = appendCallWithExceptionCheck(operation);
776             setupResults(resultRegs);
777             return result;
778         }
779
780         template<typename OperationType, typename... Args>
781         MacroAssembler::Call callOperationNoExceptionCheck(OperationType operation, Args... args)
782         {
783             setupArguments<OperationType>(args...);
784             updateTopCallFrame();
785             return appendCall(operation);
786         }
787
788         template<typename OperationType, typename... Args>
789         MacroAssembler::Call callOperationWithCallFrameRollbackOnException(OperationType operation, Args... args)
790         {
791             setupArguments<OperationType>(args...);
792             return appendCallWithCallFrameRollbackOnException(operation);
793         }
794
795         template<typename SnippetGenerator>
796         void emitBitBinaryOpFastPath(Instruction* currentInstruction);
797
798         void emitRightShiftFastPath(Instruction* currentInstruction, OpcodeID);
799
800         Jump checkStructure(RegisterID reg, Structure* structure);
801
802         void updateTopCallFrame();
803
804         Call emitNakedCall(CodePtr<NoPtrTag> function = CodePtr<NoPtrTag>());
805         Call emitNakedTailCall(CodePtr<NoPtrTag> function = CodePtr<NoPtrTag>());
806
807         // Loads the character value of a single character string into dst.
808         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
809         
810 #if ENABLE(DFG_JIT)
811         void emitEnterOptimizationCheck();
812 #else
813         void emitEnterOptimizationCheck() { }
814 #endif
815
816 #ifndef NDEBUG
817         void printBytecodeOperandTypes(int src1, int src2);
818 #endif
819
820 #if ENABLE(SAMPLING_FLAGS)
821         void setSamplingFlag(int32_t);
822         void clearSamplingFlag(int32_t);
823 #endif
824
825 #if ENABLE(SAMPLING_COUNTERS)
826         void emitCount(AbstractSamplingCounter&, int32_t = 1);
827 #endif
828
829 #if ENABLE(OPCODE_SAMPLING)
830         void sampleInstruction(Instruction*, bool = false);
831 #endif
832
833 #if ENABLE(CODEBLOCK_SAMPLING)
834         void sampleCodeBlock(CodeBlock*);
835 #else
836         void sampleCodeBlock(CodeBlock*) {}
837 #endif
838
839 #if ENABLE(DFG_JIT)
840         bool canBeOptimized() { return m_canBeOptimized; }
841         bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; }
842         bool shouldEmitProfiling() { return m_shouldEmitProfiling; }
843 #else
844         bool canBeOptimized() { return false; }
845         bool canBeOptimizedOrInlined() { return false; }
846         // Enables use of value profiler with tiered compilation turned off,
847         // in which case all code gets profiled.
848         bool shouldEmitProfiling() { return false; }
849 #endif
850
851         static bool reportCompileTimes();
852         static bool computeCompileTimes();
853         
854         // If you need to check the value of an instruction multiple times and the instruction is
855         // part of a LLInt inline cache, then you want to use this. It will give you the value of
856         // the instruction at the start of JITing.
857         Instruction* copiedInstruction(Instruction*);
858
859         Interpreter* m_interpreter;
860
861         PoisonedRefCountedArray<CodeBlockPoison, Instruction> m_instructions;
862
863         Vector<CallRecord> m_calls;
864         Vector<Label> m_labels;
865         Vector<JITGetByIdGenerator> m_getByIds;
866         Vector<JITGetByIdWithThisGenerator> m_getByIdsWithThis;
867         Vector<JITPutByIdGenerator> m_putByIds;
868         Vector<JITInstanceOfGenerator> m_instanceOfs;
869         Vector<ByValCompilationInfo> m_byValCompilationInfo;
870         Vector<CallCompilationInfo> m_callCompilationInfo;
871         Vector<JumpTable> m_jmpTable;
872
873         unsigned m_bytecodeOffset;
874         Vector<SlowCaseEntry> m_slowCases;
875         Vector<SwitchRecord> m_switches;
876
877         JumpList m_exceptionChecks;
878         JumpList m_exceptionChecksWithCallFrameRollback;
879         Label m_exceptionHandler;
880
881         unsigned m_getByIdIndex;
882         unsigned m_getByIdWithThisIndex;
883         unsigned m_putByIdIndex;
884         unsigned m_instanceOfIndex;
885         unsigned m_byValInstructionIndex;
886         unsigned m_callLinkInfoIndex;
887         
888         Label m_arityCheck;
889         std::unique_ptr<LinkBuffer> m_linkBuffer;
890
891         std::unique_ptr<JITDisassembler> m_disassembler;
892         RefPtr<Profiler::Compilation> m_compilation;
893         static CodeRef<JITThunkPtrTag> stringGetByValStubGenerator(VM*);
894
895         PCToCodeOriginMapBuilder m_pcToCodeOriginMapBuilder;
896
897         HashMap<Instruction*, void*> m_instructionToMathIC;
898         HashMap<Instruction*, MathICGenerationState> m_instructionToMathICGenerationState;
899
900         bool m_canBeOptimized;
901         bool m_canBeOptimizedOrInlined;
902         bool m_shouldEmitProfiling;
903         bool m_shouldUseIndexMasking;
904         unsigned m_loopOSREntryBytecodeOffset { 0 };
905     };
906
907 } // namespace JSC
908
909 #endif // ENABLE(JIT)