[JSC] Shrink the Math inline caches some more
[WebKit-https.git] / Source / JavaScriptCore / jit / JIT.h
1 /*
2  * Copyright (C) 2008, 2012-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // We've run into some problems where changing the size of the class JIT leads to
32 // performance fluctuations.  Try forcing alignment in an attempt to stabalize this.
33 #if COMPILER(GCC_OR_CLANG)
34 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
35 #else
36 #define JIT_CLASS_ALIGNMENT
37 #endif
38
39 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
40
41 #include "CodeBlock.h"
42 #include "CompactJITCodeMap.h"
43 #include "JITDisassembler.h"
44 #include "JITInlineCacheGenerator.h"
45 #include "JITMathIC.h"
46 #include "JSInterfaceJIT.h"
47 #include "PCToCodeOriginMap.h"
48 #include "UnusedPointer.h"
49
50 namespace JSC {
51
52     enum OpcodeID : unsigned;
53
54     class ArrayAllocationProfile;
55     class CallLinkInfo;
56     class CodeBlock;
57     class FunctionExecutable;
58     class JIT;
59     class Identifier;
60     class Interpreter;
61     class MarkedAllocator;
62     class Register;
63     class StructureChain;
64     class StructureStubInfo;
65
66     struct Instruction;
67     struct OperandTypes;
68     struct SimpleJumpTable;
69     struct StringJumpTable;
70
71     struct CallRecord {
72         MacroAssembler::Call from;
73         unsigned bytecodeOffset;
74         void* to;
75
76         CallRecord()
77         {
78         }
79
80         CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
81             : from(from)
82             , bytecodeOffset(bytecodeOffset)
83             , to(to)
84         {
85         }
86     };
87
88     struct JumpTable {
89         MacroAssembler::Jump from;
90         unsigned toBytecodeOffset;
91
92         JumpTable(MacroAssembler::Jump f, unsigned t)
93             : from(f)
94             , toBytecodeOffset(t)
95         {
96         }
97     };
98
99     struct SlowCaseEntry {
100         MacroAssembler::Jump from;
101         unsigned to;
102         
103         SlowCaseEntry(MacroAssembler::Jump f, unsigned t)
104             : from(f)
105             , to(t)
106         {
107         }
108     };
109
110     struct SwitchRecord {
111         enum Type {
112             Immediate,
113             Character,
114             String
115         };
116
117         Type type;
118
119         union {
120             SimpleJumpTable* simpleJumpTable;
121             StringJumpTable* stringJumpTable;
122         } jumpTable;
123
124         unsigned bytecodeOffset;
125         unsigned defaultOffset;
126
127         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
128             : type(type)
129             , bytecodeOffset(bytecodeOffset)
130             , defaultOffset(defaultOffset)
131         {
132             this->jumpTable.simpleJumpTable = jumpTable;
133         }
134
135         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
136             : type(String)
137             , bytecodeOffset(bytecodeOffset)
138             , defaultOffset(defaultOffset)
139         {
140             this->jumpTable.stringJumpTable = jumpTable;
141         }
142     };
143
144     struct ByValCompilationInfo {
145         ByValCompilationInfo() { }
146         
147         ByValCompilationInfo(ByValInfo* byValInfo, unsigned bytecodeIndex, MacroAssembler::PatchableJump notIndexJump, MacroAssembler::PatchableJump badTypeJump, JITArrayMode arrayMode, ArrayProfile* arrayProfile, MacroAssembler::Label doneTarget, MacroAssembler::Label nextHotPathTarget)
148             : byValInfo(byValInfo)
149             , bytecodeIndex(bytecodeIndex)
150             , notIndexJump(notIndexJump)
151             , badTypeJump(badTypeJump)
152             , arrayMode(arrayMode)
153             , arrayProfile(arrayProfile)
154             , doneTarget(doneTarget)
155             , nextHotPathTarget(nextHotPathTarget)
156         {
157         }
158
159         ByValInfo* byValInfo;
160         unsigned bytecodeIndex;
161         MacroAssembler::PatchableJump notIndexJump;
162         MacroAssembler::PatchableJump badTypeJump;
163         JITArrayMode arrayMode;
164         ArrayProfile* arrayProfile;
165         MacroAssembler::Label doneTarget;
166         MacroAssembler::Label nextHotPathTarget;
167         MacroAssembler::Label slowPathTarget;
168         MacroAssembler::Call returnAddress;
169     };
170
171     struct CallCompilationInfo {
172         MacroAssembler::DataLabelPtr hotPathBegin;
173         MacroAssembler::Call hotPathOther;
174         MacroAssembler::Call callReturnLocation;
175         CallLinkInfo* callLinkInfo;
176     };
177
178     void ctiPatchCallByReturnAddress(ReturnAddressPtr, FunctionPtr newCalleeFunction);
179
180     class JIT : private JSInterfaceJIT {
181         friend class JITSlowPathCall;
182         friend class JITStubCall;
183
184         using MacroAssembler::Jump;
185         using MacroAssembler::JumpList;
186         using MacroAssembler::Label;
187
188         static const uintptr_t patchGetByIdDefaultStructure = unusedPointer;
189         static const int patchGetByIdDefaultOffset = 0;
190         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
191         // will compress the displacement, and we may not be able to fit a patched offset.
192         static const int patchPutByIdDefaultOffset = 256;
193
194     public:
195         JIT(VM*, CodeBlock* = 0);
196         ~JIT();
197
198         void compileWithoutLinking(JITCompilationEffort);
199         CompilationResult link();
200
201         void doMainThreadPreparationBeforeCompile();
202         
203         static CompilationResult compile(VM* vm, CodeBlock* codeBlock, JITCompilationEffort effort)
204         {
205             return JIT(vm, codeBlock).privateCompile(effort);
206         }
207         
208         static void compileGetByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
209         {
210             JIT jit(vm, codeBlock);
211             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
212             jit.privateCompileGetByVal(byValInfo, returnAddress, arrayMode);
213         }
214
215         static void compileGetByValWithCachedId(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
216         {
217             JIT jit(vm, codeBlock);
218             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
219             jit.privateCompileGetByValWithCachedId(byValInfo, returnAddress, propertyName);
220         }
221
222         static void compilePutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
223         {
224             JIT jit(vm, codeBlock);
225             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
226             jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
227         }
228         
229         static void compileDirectPutByVal(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
230         {
231             JIT jit(vm, codeBlock);
232             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
233             jit.privateCompilePutByVal(byValInfo, returnAddress, arrayMode);
234         }
235
236         static void compilePutByValWithCachedId(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
237         {
238             JIT jit(vm, codeBlock);
239             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
240             jit.privateCompilePutByValWithCachedId(byValInfo, returnAddress, putKind, propertyName);
241         }
242
243         static void compileHasIndexedProperty(VM* vm, CodeBlock* codeBlock, ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
244         {
245             JIT jit(vm, codeBlock);
246             jit.m_bytecodeOffset = byValInfo->bytecodeIndex;
247             jit.privateCompileHasIndexedProperty(byValInfo, returnAddress, arrayMode);
248         }
249
250         static CodeRef compileCTINativeCall(VM*, NativeFunction);
251
252         static unsigned frameRegisterCountFor(CodeBlock*);
253         static int stackPointerOffsetFor(CodeBlock*);
254
255         JS_EXPORT_PRIVATE static HashMap<CString, double> compileTimeStats();
256
257     private:
258         void privateCompileMainPass();
259         void privateCompileLinkPass();
260         void privateCompileSlowCases();
261         CompilationResult privateCompile(JITCompilationEffort);
262         
263         void privateCompileGetByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
264         void privateCompileGetByValWithCachedId(ByValInfo*, ReturnAddressPtr, const Identifier&);
265         void privateCompilePutByVal(ByValInfo*, ReturnAddressPtr, JITArrayMode);
266         void privateCompilePutByValWithCachedId(ByValInfo*, ReturnAddressPtr, PutKind, const Identifier&);
267
268         void privateCompileHasIndexedProperty(ByValInfo*, ReturnAddressPtr, JITArrayMode);
269
270         Label privateCompileCTINativeCall(VM*, bool isConstruct = false);
271         CodeRef privateCompileCTINativeCall(VM*, NativeFunction);
272         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
273
274         // Add a call out from JIT code, without an exception check.
275         Call appendCall(const FunctionPtr& function)
276         {
277             Call functionCall = call();
278             m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value()));
279             return functionCall;
280         }
281
282 #if OS(WINDOWS) && CPU(X86_64)
283         Call appendCallWithSlowPathReturnType(const FunctionPtr& function)
284         {
285             Call functionCall = callWithSlowPathReturnType();
286             m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.value()));
287             return functionCall;
288         }
289 #endif
290
291         void exceptionCheck(Jump jumpToHandler)
292         {
293             m_exceptionChecks.append(jumpToHandler);
294         }
295
296         void exceptionCheck()
297         {
298             m_exceptionChecks.append(emitExceptionCheck());
299         }
300
301         void exceptionCheckWithCallFrameRollback()
302         {
303             m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck());
304         }
305
306         void privateCompileExceptionHandlers();
307
308         void addSlowCase(Jump);
309         void addSlowCase(const JumpList&);
310         void addSlowCase();
311         void addJump(Jump, int);
312         void emitJumpSlowToHot(Jump, int);
313
314         void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
315         void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
316         void compileSetupVarargsFrame(OpcodeID, Instruction*, CallLinkInfo*);
317         void compileCallEval(Instruction*);
318         void compileCallEvalSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&);
319         void emitPutCallResult(Instruction*);
320
321         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
322         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
323         bool isOperandConstantDouble(int src);
324         
325         void emitLoadDouble(int index, FPRegisterID value);
326         void emitLoadInt32ToDouble(int index, FPRegisterID value);
327         Jump emitJumpIfCellObject(RegisterID cellReg);
328         Jump emitJumpIfCellNotObject(RegisterID cellReg);
329
330         enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterBase, ShouldFilterValue, ShouldFilterBaseAndValue };
331         // value register in write barrier is used before any scratch registers
332         // so may safely be the same as either of the scratch registers.
333         void emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode);
334         void emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode);
335         void emitWriteBarrier(JSCell* owner);
336
337         // This assumes that the value to profile is in regT0 and that regT3 is available for
338         // scratch.
339         void emitValueProfilingSite(ValueProfile*);
340         void emitValueProfilingSite(unsigned bytecodeOffset);
341         void emitValueProfilingSite();
342         void emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile*);
343         void emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex);
344         void emitArrayProfileStoreToHoleSpecialCase(ArrayProfile*);
345         void emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile*);
346         
347         JITArrayMode chooseArrayMode(ArrayProfile*);
348         
349         // Property is in regT1, base is in regT0. regT2 contains indexing type.
350         // Property is int-checked and zero extended. Base is cell checked.
351         // Structure is already profiled. Returns the slow cases. Fall-through
352         // case contains result in regT0, and it is not yet profiled.
353         JumpList emitInt32Load(Instruction* instruction, PatchableJump& badType) { return emitContiguousLoad(instruction, badType, Int32Shape); }
354         JumpList emitDoubleLoad(Instruction*, PatchableJump& badType);
355         JumpList emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
356         JumpList emitArrayStorageLoad(Instruction*, PatchableJump& badType);
357         JumpList emitLoadForArrayMode(Instruction*, JITArrayMode, PatchableJump& badType);
358
359         JumpList emitInt32GetByVal(Instruction* instruction, PatchableJump& badType) { return emitContiguousGetByVal(instruction, badType, Int32Shape); }
360         JumpList emitDoubleGetByVal(Instruction*, PatchableJump& badType);
361         JumpList emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape = ContiguousShape);
362         JumpList emitArrayStorageGetByVal(Instruction*, PatchableJump& badType);
363         JumpList emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType);
364         JumpList emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType);
365         JumpList emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
366         JumpList emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType);
367         
368         // Property is in regT1, base is in regT0. regT2 contains indecing type.
369         // The value to store is not yet loaded. Property is int-checked and
370         // zero-extended. Base is cell checked. Structure is already profiled.
371         // returns the slow cases.
372         JumpList emitInt32PutByVal(Instruction* currentInstruction, PatchableJump& badType)
373         {
374             return emitGenericContiguousPutByVal(currentInstruction, badType, Int32Shape);
375         }
376         JumpList emitDoublePutByVal(Instruction* currentInstruction, PatchableJump& badType)
377         {
378             return emitGenericContiguousPutByVal(currentInstruction, badType, DoubleShape);
379         }
380         JumpList emitContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType)
381         {
382             return emitGenericContiguousPutByVal(currentInstruction, badType);
383         }
384         JumpList emitGenericContiguousPutByVal(Instruction*, PatchableJump& badType, IndexingType indexingShape = ContiguousShape);
385         JumpList emitArrayStoragePutByVal(Instruction*, PatchableJump& badType);
386         JumpList emitIntTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
387         JumpList emitFloatTypedArrayPutByVal(Instruction*, PatchableJump& badType, TypedArrayType);
388
389         // Identifier check helper for GetByVal and PutByVal.
390         void emitByValIdentifierCheck(ByValInfo*, RegisterID cell, RegisterID scratch, const Identifier&, JumpList& slowCases);
391
392         JITGetByIdGenerator emitGetByValWithCachedId(ByValInfo*, Instruction*, const Identifier&, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases);
393         JITPutByIdGenerator emitPutByValWithCachedId(ByValInfo*, Instruction*, PutKind, const Identifier&, JumpList& doneCases, JumpList& slowCases);
394
395         enum FinalObjectMode { MayBeFinal, KnownNotFinal };
396
397         void emitGetVirtualRegister(int src, JSValueRegs dst);
398         void emitPutVirtualRegister(int dst, JSValueRegs src);
399
400         int32_t getOperandConstantInt(int src);
401         double getOperandConstantDouble(int src);
402
403 #if USE(JSVALUE32_64)
404         bool getOperandConstantInt(int op1, int op2, int& op, int32_t& constant);
405
406         void emitLoadTag(int index, RegisterID tag);
407         void emitLoadPayload(int index, RegisterID payload);
408
409         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
410         void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
411         void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
412
413         void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
414         void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
415         void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
416         void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
417         void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
418         void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
419         void emitStoreDouble(int index, FPRegisterID value);
420
421         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
422         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
423
424         void compileGetByIdHotPath(const Identifier*);
425
426         // Arithmetic opcode helpers
427         void emitBinaryDoubleOp(OpcodeID, int dst, int op1, int op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
428
429 #else // USE(JSVALUE32_64)
430         void emitGetVirtualRegister(int src, RegisterID dst);
431         void emitGetVirtualRegister(VirtualRegister src, RegisterID dst);
432         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
433         void emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2);
434         void emitPutVirtualRegister(int dst, RegisterID from = regT0);
435         void emitPutVirtualRegister(VirtualRegister dst, RegisterID from = regT0);
436         void emitStoreCell(int dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
437         {
438             emitPutVirtualRegister(dst, payload);
439         }
440         void emitStoreCell(VirtualRegister dst, RegisterID payload)
441         {
442             emitPutVirtualRegister(dst, payload);
443         }
444
445         Jump emitJumpIfJSCell(RegisterID);
446         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
447         void emitJumpSlowCaseIfJSCell(RegisterID);
448         void emitJumpSlowCaseIfNotJSCell(RegisterID);
449         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
450         Jump emitJumpIfInt(RegisterID);
451         Jump emitJumpIfNotInt(RegisterID);
452         Jump emitJumpIfNotInt(RegisterID, RegisterID, RegisterID scratch);
453         PatchableJump emitPatchableJumpIfNotInt(RegisterID);
454         void emitJumpSlowCaseIfNotInt(RegisterID);
455         void emitJumpSlowCaseIfNotNumber(RegisterID);
456         void emitJumpSlowCaseIfNotInt(RegisterID, RegisterID, RegisterID scratch);
457
458         void emitTagBool(RegisterID);
459
460         void compileGetByIdHotPath(int baseVReg, const Identifier*);
461
462 #endif // USE(JSVALUE32_64)
463
464         void emit_compareAndJump(OpcodeID, int op1, int op2, unsigned target, RelationalCondition);
465         void emit_compareAndJumpSlow(int op1, int op2, unsigned target, DoubleCondition, size_t (JIT_OPERATION *operation)(ExecState*, EncodedJSValue, EncodedJSValue), bool invert, Vector<SlowCaseEntry>::iterator&);
466         
467         void assertStackPointerOffset();
468
469         void emit_op_add(Instruction*);
470         void emit_op_bitand(Instruction*);
471         void emit_op_bitor(Instruction*);
472         void emit_op_bitxor(Instruction*);
473         void emit_op_call(Instruction*);
474         void emit_op_tail_call(Instruction*);
475         void emit_op_call_eval(Instruction*);
476         void emit_op_call_varargs(Instruction*);
477         void emit_op_tail_call_varargs(Instruction*);
478         void emit_op_tail_call_forward_arguments(Instruction*);
479         void emit_op_construct_varargs(Instruction*);
480         void emit_op_catch(Instruction*);
481         void emit_op_construct(Instruction*);
482         void emit_op_create_this(Instruction*);
483         void emit_op_to_this(Instruction*);
484         void emit_op_create_direct_arguments(Instruction*);
485         void emit_op_create_scoped_arguments(Instruction*);
486         void emit_op_create_cloned_arguments(Instruction*);
487         void emit_op_argument_count(Instruction*);
488         void emit_op_create_rest(Instruction*);
489         void emit_op_get_rest_length(Instruction*);
490         void emit_op_check_tdz(Instruction*);
491         void emit_op_assert(Instruction*);
492         void emit_op_debug(Instruction*);
493         void emit_op_del_by_id(Instruction*);
494         void emit_op_del_by_val(Instruction*);
495         void emit_op_div(Instruction*);
496         void emit_op_end(Instruction*);
497         void emit_op_enter(Instruction*);
498         void emit_op_get_scope(Instruction*);
499         void emit_op_eq(Instruction*);
500         void emit_op_eq_null(Instruction*);
501         void emit_op_try_get_by_id(Instruction*);
502         void emit_op_get_by_id(Instruction*);
503         void emit_op_get_by_id_with_this(Instruction*);
504         void emit_op_get_by_val_with_this(Instruction*);
505         void emit_op_get_arguments_length(Instruction*);
506         void emit_op_get_by_val(Instruction*);
507         void emit_op_get_argument_by_val(Instruction*);
508         void emit_op_init_lazy_reg(Instruction*);
509         void emit_op_overrides_has_instance(Instruction*);
510         void emit_op_instanceof(Instruction*);
511         void emit_op_instanceof_custom(Instruction*);
512         void emit_op_is_empty(Instruction*);
513         void emit_op_is_undefined(Instruction*);
514         void emit_op_is_boolean(Instruction*);
515         void emit_op_is_number(Instruction*);
516         void emit_op_is_object(Instruction*);
517         void emit_op_is_cell_with_type(Instruction*);
518         void emit_op_jeq_null(Instruction*);
519         void emit_op_jfalse(Instruction*);
520         void emit_op_jmp(Instruction*);
521         void emit_op_jneq_null(Instruction*);
522         void emit_op_jneq_ptr(Instruction*);
523         void emit_op_jless(Instruction*);
524         void emit_op_jlesseq(Instruction*);
525         void emit_op_jgreater(Instruction*);
526         void emit_op_jgreatereq(Instruction*);
527         void emit_op_jnless(Instruction*);
528         void emit_op_jnlesseq(Instruction*);
529         void emit_op_jngreater(Instruction*);
530         void emit_op_jngreatereq(Instruction*);
531         void emit_op_jtrue(Instruction*);
532         void emit_op_loop_hint(Instruction*);
533         void emit_op_watchdog(Instruction*);
534         void emit_op_lshift(Instruction*);
535         void emit_op_mod(Instruction*);
536         void emit_op_mov(Instruction*);
537         void emit_op_mul(Instruction*);
538         void emit_op_negate(Instruction*);
539         void emit_op_neq(Instruction*);
540         void emit_op_neq_null(Instruction*);
541         void emit_op_new_array(Instruction*);
542         void emit_op_new_array_with_size(Instruction*);
543         void emit_op_new_array_buffer(Instruction*);
544         void emit_op_new_func(Instruction*);
545         void emit_op_new_func_exp(Instruction*);
546         void emit_op_new_generator_func(Instruction*);
547         void emit_op_new_generator_func_exp(Instruction*);
548         void emit_op_new_object(Instruction*);
549         void emit_op_new_regexp(Instruction*);
550         void emit_op_not(Instruction*);
551         void emit_op_nstricteq(Instruction*);
552         void emit_op_dec(Instruction*);
553         void emit_op_inc(Instruction*);
554         void emit_op_pow(Instruction*);
555         void emit_op_profile_type(Instruction*);
556         void emit_op_profile_control_flow(Instruction*);
557         void emit_op_push_with_scope(Instruction*);
558         void emit_op_create_lexical_environment(Instruction*);
559         void emit_op_get_parent_scope(Instruction*);
560         void emit_op_put_by_id(Instruction*);
561         void emit_op_put_by_id_with_this(Instruction*);
562         void emit_op_put_by_index(Instruction*);
563         void emit_op_put_by_val(Instruction*);
564         void emit_op_put_by_val_with_this(Instruction*);
565         void emit_op_put_getter_by_id(Instruction*);
566         void emit_op_put_setter_by_id(Instruction*);
567         void emit_op_put_getter_setter_by_id(Instruction*);
568         void emit_op_put_getter_by_val(Instruction*);
569         void emit_op_put_setter_by_val(Instruction*);
570         void emit_op_ret(Instruction*);
571         void emit_op_rshift(Instruction*);
572         void emit_op_set_function_name(Instruction*);
573         void emit_op_strcat(Instruction*);
574         void emit_op_stricteq(Instruction*);
575         void emit_op_sub(Instruction*);
576         void emit_op_switch_char(Instruction*);
577         void emit_op_switch_imm(Instruction*);
578         void emit_op_switch_string(Instruction*);
579         void emit_op_tear_off_arguments(Instruction*);
580         void emit_op_throw(Instruction*);
581         void emit_op_throw_static_error(Instruction*);
582         void emit_op_to_number(Instruction*);
583         void emit_op_to_string(Instruction*);
584         void emit_op_to_primitive(Instruction*);
585         void emit_op_unexpected_load(Instruction*);
586         void emit_op_unsigned(Instruction*);
587         void emit_op_urshift(Instruction*);
588         void emit_op_get_enumerable_length(Instruction*);
589         void emit_op_has_generic_property(Instruction*);
590         void emit_op_has_structure_property(Instruction*);
591         void emit_op_has_indexed_property(Instruction*);
592         void emit_op_get_direct_pname(Instruction*);
593         void emit_op_get_property_enumerator(Instruction*);
594         void emit_op_enumerator_structure_pname(Instruction*);
595         void emit_op_enumerator_generic_pname(Instruction*);
596         void emit_op_to_index_string(Instruction*);
597         void emit_op_log_shadow_chicken_prologue(Instruction*);
598         void emit_op_log_shadow_chicken_tail(Instruction*);
599
600         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
601         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
602         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
603         void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
604         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
605         void emitSlow_op_tail_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
606         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
607         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
608         void emitSlow_op_tail_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
609         void emitSlow_op_tail_call_forward_arguments(Instruction*, Vector<SlowCaseEntry>::iterator&);
610         void emitSlow_op_construct_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
611         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
612         void emitSlow_op_to_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
613         void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
614         void emitSlow_op_check_tdz(Instruction*, Vector<SlowCaseEntry>::iterator&);
615         void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
616         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
617         void emitSlow_op_get_callee(Instruction*, Vector<SlowCaseEntry>::iterator&);
618         void emitSlow_op_try_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
619         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
620         void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
621         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
622         void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
623         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
624         void emitSlow_op_instanceof_custom(Instruction*, Vector<SlowCaseEntry>::iterator&);
625         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
626         void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
627         void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
628         void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
629         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
630         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
631         void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
632         void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
633         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
634         void emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator&);
635         void emitSlow_op_watchdog(Instruction*, Vector<SlowCaseEntry>::iterator&);
636         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
637         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
638         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
639         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
640         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
641         void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
642         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
643         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
644         void emitSlow_op_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
645         void emitSlow_op_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
646         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
647         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
648         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
649         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
650         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
651         void emitSlow_op_to_number(Instruction*, Vector<SlowCaseEntry>::iterator&);
652         void emitSlow_op_to_string(Instruction*, Vector<SlowCaseEntry>::iterator&);
653         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
654         void emitSlow_op_unsigned(Instruction*, Vector<SlowCaseEntry>::iterator&);
655         void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
656         void emitSlow_op_has_indexed_property(Instruction*, Vector<SlowCaseEntry>::iterator&);
657         void emitSlow_op_has_structure_property(Instruction*, Vector<SlowCaseEntry>::iterator&);
658         void emitSlow_op_get_direct_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
659
660         void emit_op_resolve_scope(Instruction*);
661         void emit_op_get_from_scope(Instruction*);
662         void emit_op_put_to_scope(Instruction*);
663         void emit_op_get_from_arguments(Instruction*);
664         void emit_op_put_to_arguments(Instruction*);
665         void emitSlow_op_resolve_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
666         void emitSlow_op_get_from_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
667         void emitSlow_op_put_to_scope(Instruction*, Vector<SlowCaseEntry>::iterator&);
668
669         void emitRightShift(Instruction*, bool isUnsigned);
670         void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
671
672         void emitNewFuncCommon(Instruction*);
673         void emitNewFuncExprCommon(Instruction*);
674         void emitVarInjectionCheck(bool needsVarInjectionChecks);
675         void emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth);
676         void emitLoadWithStructureCheck(int scope, Structure** structureSlot);
677 #if USE(JSVALUE64)
678         void emitGetVarFromPointer(JSValue* operand, GPRReg);
679         void emitGetVarFromIndirectPointer(JSValue** operand, GPRReg);
680 #else
681         void emitGetVarFromIndirectPointer(JSValue** operand, GPRReg tag, GPRReg payload);
682         void emitGetVarFromPointer(JSValue* operand, GPRReg tag, GPRReg payload);
683 #endif
684         void emitGetClosureVar(int scope, uintptr_t operand);
685         void emitNotifyWrite(WatchpointSet*);
686         void emitNotifyWrite(GPRReg pointerToSet);
687         void emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet*);
688         void emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet**);
689         void emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet*);
690
691         void emitInitRegister(int dst);
692
693         void emitPutIntToCallFrameHeader(RegisterID from, int entry);
694
695         JSValue getConstantOperand(int src);
696         bool isOperandConstantInt(int src);
697         bool isOperandConstantChar(int src);
698
699         template <typename Generator, typename ProfiledFunction, typename NonProfiledFunction>
700         void emitMathICFast(JITUnaryMathIC<Generator>*, Instruction*, ProfiledFunction, NonProfiledFunction);
701         template <typename Generator, typename ProfiledFunction, typename NonProfiledFunction>
702         void emitMathICFast(JITBinaryMathIC<Generator>*, Instruction*, ProfiledFunction, NonProfiledFunction);
703
704         template <typename Generator, typename ProfiledRepatchFunction, typename ProfiledFunction, typename RepatchFunction>
705         void emitMathICSlow(JITBinaryMathIC<Generator>*, Instruction*, ProfiledRepatchFunction, ProfiledFunction, RepatchFunction);
706         template <typename Generator, typename ProfiledRepatchFunction, typename ProfiledFunction, typename RepatchFunction>
707         void emitMathICSlow(JITUnaryMathIC<Generator>*, Instruction*, ProfiledRepatchFunction, ProfiledFunction, RepatchFunction);
708
709         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
710         {
711             return iter++->from;
712         }
713         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
714         {
715             if (iter->from.isSet())
716                 iter->from.link(this);
717             ++iter;
718         }
719         void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
720         {
721             ASSERT(!iter->from.isSet());
722             ++iter;
723         }
724         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
725         void linkAllSlowCasesForBytecodeOffset(Vector<SlowCaseEntry>& slowCases,
726             Vector<SlowCaseEntry>::iterator&, unsigned bytecodeOffset);
727
728         MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr&);
729 #if OS(WINDOWS) && CPU(X86_64)
730         MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr&);
731 #endif
732         MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr&);
733         MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr&, int);
734         MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr&, int);
735         
736         enum WithProfileTag { WithProfile };
737         
738         MacroAssembler::Call callOperation(C_JITOperation_E);
739         MacroAssembler::Call callOperation(C_JITOperation_EO, GPRReg);
740         MacroAssembler::Call callOperation(C_JITOperation_EL, GPRReg);
741         MacroAssembler::Call callOperation(C_JITOperation_EL, TrustedImmPtr);
742         MacroAssembler::Call callOperation(C_JITOperation_ESt, Structure*);
743         MacroAssembler::Call callOperation(C_JITOperation_EZ, int32_t);
744         MacroAssembler::Call callOperation(Z_JITOperation_EJZZ, GPRReg, int32_t, int32_t);
745         MacroAssembler::Call callOperation(J_JITOperation_E, int);
746         MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg);
747         MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, GPRReg, int32_t);
748         MacroAssembler::Call callOperation(J_JITOperation_EAapJcpZ, int, ArrayAllocationProfile*, const JSValue*, int32_t);
749         MacroAssembler::Call callOperation(J_JITOperation_EC, int, JSCell*);
750         MacroAssembler::Call callOperation(V_JITOperation_EC, JSCell*);
751         MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg);
752         MacroAssembler::Call callOperation(J_JITOperation_EJ, JSValueRegs, JSValueRegs);
753 #if USE(JSVALUE64)
754         MacroAssembler::Call callOperation(J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, UniquedStringImpl*);
755         MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, UniquedStringImpl*);
756 #else
757         MacroAssembler::Call callOperation(J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, GPRReg, UniquedStringImpl*);
758         MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_ESsiJI, int, StructureStubInfo*, GPRReg, GPRReg, UniquedStringImpl*);
759 #endif
760         MacroAssembler::Call callOperation(J_JITOperation_EJI, int, GPRReg, UniquedStringImpl*);
761         MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg);
762         MacroAssembler::Call callOperation(J_JITOperation_EJArp, JSValueRegs, JSValueRegs, ArithProfile*);
763         MacroAssembler::Call callOperation(J_JITOperation_EJJArp, JSValueRegs, JSValueRegs, JSValueRegs, ArithProfile*);
764         MacroAssembler::Call callOperation(J_JITOperation_EJJ, JSValueRegs, JSValueRegs, JSValueRegs);
765         MacroAssembler::Call callOperation(J_JITOperation_EJMic, JSValueRegs, JSValueRegs, TrustedImmPtr);
766         MacroAssembler::Call callOperation(J_JITOperation_EJJMic, JSValueRegs, JSValueRegs, JSValueRegs, TrustedImmPtr);
767         MacroAssembler::Call callOperation(J_JITOperation_EJJAp, int, GPRReg, GPRReg, ArrayProfile*);
768         MacroAssembler::Call callOperation(J_JITOperation_EJJBy, int, GPRReg, GPRReg, ByValInfo*);
769         MacroAssembler::Call callOperation(Z_JITOperation_EJOJ, GPRReg, GPRReg, GPRReg);
770         MacroAssembler::Call callOperation(C_JITOperation_EJsc, GPRReg);
771         MacroAssembler::Call callOperation(J_JITOperation_EJscC, int, GPRReg, JSCell*);
772         MacroAssembler::Call callOperation(J_JITOperation_EJscCJ, int, GPRReg, JSCell*, GPRReg);
773         MacroAssembler::Call callOperation(C_JITOperation_EJscZ, GPRReg, int32_t);
774         MacroAssembler::Call callOperation(C_JITOperation_EJscZ, int, GPRReg, int32_t);
775 #if USE(JSVALUE64)
776         MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg);
777 #else
778         MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg);
779 #endif
780         MacroAssembler::Call callOperation(J_JITOperation_EP, int, void*);
781         MacroAssembler::Call callOperation(WithProfileTag, J_JITOperation_EPc, int, Instruction*);
782         MacroAssembler::Call callOperation(J_JITOperation_EPc, int, Instruction*);
783         MacroAssembler::Call callOperation(J_JITOperation_EZ, int, int32_t);
784         MacroAssembler::Call callOperation(J_JITOperation_EZZ, int, int32_t, int32_t);
785         MacroAssembler::Call callOperation(P_JITOperation_E);
786         MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, size_t);
787         MacroAssembler::Call callOperation(S_JITOperation_ECC, RegisterID, RegisterID);
788         MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID);
789         MacroAssembler::Call callOperation(S_JITOperation_EJI, GPRReg, UniquedStringImpl*);
790         MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID);
791         MacroAssembler::Call callOperation(S_JITOperation_EOJss, RegisterID, RegisterID);
792         MacroAssembler::Call callOperation(Sprt_JITOperation_EZ, int32_t);
793         MacroAssembler::Call callOperation(V_JITOperation_E);
794         MacroAssembler::Call callOperation(V_JITOperation_EC, RegisterID);
795         MacroAssembler::Call callOperation(V_JITOperation_ECC, RegisterID, RegisterID);
796         MacroAssembler::Call callOperation(V_JITOperation_ECIZC, RegisterID, UniquedStringImpl*, int32_t, RegisterID);
797         MacroAssembler::Call callOperation(V_JITOperation_ECIZCC, RegisterID, UniquedStringImpl*, int32_t, RegisterID, RegisterID);
798 #if USE(JSVALUE64)
799         MacroAssembler::Call callOperation(V_JITOperation_ECJZC, RegisterID, RegisterID, int32_t, RegisterID);
800 #else
801         MacroAssembler::Call callOperation(V_JITOperation_ECJZC, RegisterID, RegisterID, RegisterID, int32_t, RegisterID);
802 #endif
803         MacroAssembler::Call callOperation(J_JITOperation_EE, RegisterID);
804         MacroAssembler::Call callOperation(V_JITOperation_EZSymtabJ, int, SymbolTable*, RegisterID);
805         MacroAssembler::Call callOperation(J_JITOperation_EZSymtabJ, int, SymbolTable*, RegisterID);
806         MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID);
807         MacroAssembler::Call callOperationNoExceptionCheck(Z_JITOperation_E);
808 #if USE(JSVALUE64)
809         MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID);
810 #else
811         MacroAssembler::Call callOperationNoExceptionCheck(V_JITOperation_EJ, RegisterID, RegisterID);
812 #endif
813 #if USE(JSVALUE64)
814         MacroAssembler::Call callOperation(F_JITOperation_EFJZZ, RegisterID, RegisterID, int32_t, RegisterID);
815         MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, UniquedStringImpl*);
816         MacroAssembler::Call callOperation(V_JITOperation_ECIZJJ, RegisterID, UniquedStringImpl*, int32_t, RegisterID, RegisterID);
817         MacroAssembler::Call callOperation(V_JITOperation_ECJ, RegisterID, RegisterID);
818 #else
819         MacroAssembler::Call callOperation(V_JITOperation_ESsiJJI, StructureStubInfo*, RegisterID, RegisterID, RegisterID, RegisterID, UniquedStringImpl*);
820         MacroAssembler::Call callOperation(V_JITOperation_ECJ, RegisterID, RegisterID, RegisterID);
821 #endif
822         MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID);
823         MacroAssembler::Call callOperation(V_JITOperation_EJJJAp, RegisterID, RegisterID, RegisterID, ArrayProfile*);
824         MacroAssembler::Call callOperation(V_JITOperation_EJJJBy, RegisterID, RegisterID, RegisterID, ByValInfo*);
825         MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, int32_t, RegisterID);
826         MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, int32_t);
827         MacroAssembler::Call callOperation(V_JITOperation_EPc, Instruction*);
828         MacroAssembler::Call callOperation(V_JITOperation_EZ, int32_t);
829         MacroAssembler::Call callOperation(V_JITOperation_EZJ, int, GPRReg);
830         MacroAssembler::Call callOperationWithCallFrameRollbackOnException(J_JITOperation_E);
831         MacroAssembler::Call callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb, CodeBlock*);
832         MacroAssembler::Call callOperationWithCallFrameRollbackOnException(Z_JITOperation_E);
833 #if USE(JSVALUE32_64)
834         MacroAssembler::Call callOperation(F_JITOperation_EFJZZ, RegisterID, RegisterID, RegisterID, int32_t, RegisterID);
835         MacroAssembler::Call callOperation(Z_JITOperation_EJZZ, GPRReg, GPRReg, int32_t, int32_t);
836         MacroAssembler::Call callOperation(J_JITOperation_EAapJ, int, ArrayAllocationProfile*, GPRReg, GPRReg);
837         MacroAssembler::Call callOperation(J_JITOperation_EJ, int, GPRReg, GPRReg);
838         MacroAssembler::Call callOperation(J_JITOperation_EJI, int, GPRReg, GPRReg, UniquedStringImpl*);
839         MacroAssembler::Call callOperation(J_JITOperation_EJJ, int, GPRReg, GPRReg, GPRReg, GPRReg);
840         MacroAssembler::Call callOperation(Z_JITOperation_EJOJ, GPRReg, GPRReg, GPRReg, GPRReg, GPRReg);
841         MacroAssembler::Call callOperation(J_JITOperation_EJJAp, int, GPRReg, GPRReg, GPRReg, GPRReg, ArrayProfile*);
842         MacroAssembler::Call callOperation(J_JITOperation_EJJBy, int, GPRReg, GPRReg, GPRReg, GPRReg, ByValInfo*);
843         MacroAssembler::Call callOperation(P_JITOperation_EJS, GPRReg, GPRReg, size_t);
844         MacroAssembler::Call callOperation(S_JITOperation_EJ, RegisterID, RegisterID);
845         MacroAssembler::Call callOperation(S_JITOperation_EJI, GPRReg, GPRReg, UniquedStringImpl*);
846         MacroAssembler::Call callOperation(S_JITOperation_EJJ, RegisterID, RegisterID, RegisterID, RegisterID);
847         MacroAssembler::Call callOperation(V_JITOperation_EZSymtabJ, int, SymbolTable*, RegisterID, RegisterID);
848         MacroAssembler::Call callOperation(V_JITOperation_EJ, RegisterID, RegisterID);
849         MacroAssembler::Call callOperation(V_JITOperation_EJJJ, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID);
850         MacroAssembler::Call callOperation(V_JITOperation_EJJJAp, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, ArrayProfile*);
851         MacroAssembler::Call callOperation(V_JITOperation_EJJJBy, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, RegisterID, ByValInfo*);
852         MacroAssembler::Call callOperation(V_JITOperation_EJZ, RegisterID, RegisterID, int32_t);
853         MacroAssembler::Call callOperation(V_JITOperation_EJZJ, RegisterID, RegisterID, int32_t, RegisterID, RegisterID);
854         MacroAssembler::Call callOperation(V_JITOperation_EZJ, int32_t, RegisterID, RegisterID);
855         MacroAssembler::Call callOperation(J_JITOperation_EJscCJ, int, GPRReg, JSCell*, GPRReg, GPRReg);
856 #endif
857
858         template<typename SnippetGenerator>
859         void emitBitBinaryOpFastPath(Instruction* currentInstruction);
860
861         void emitRightShiftFastPath(Instruction* currentInstruction, OpcodeID);
862
863         Jump checkStructure(RegisterID reg, Structure* structure);
864
865         void updateTopCallFrame();
866
867         Call emitNakedCall(CodePtr function = CodePtr());
868         Call emitNakedTailCall(CodePtr function = CodePtr());
869
870         // Loads the character value of a single character string into dst.
871         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
872         
873 #if ENABLE(DFG_JIT)
874         void emitEnterOptimizationCheck();
875 #else
876         void emitEnterOptimizationCheck() { }
877 #endif
878
879 #ifndef NDEBUG
880         void printBytecodeOperandTypes(int src1, int src2);
881 #endif
882
883 #if ENABLE(SAMPLING_FLAGS)
884         void setSamplingFlag(int32_t);
885         void clearSamplingFlag(int32_t);
886 #endif
887
888 #if ENABLE(SAMPLING_COUNTERS)
889         void emitCount(AbstractSamplingCounter&, int32_t = 1);
890 #endif
891
892 #if ENABLE(OPCODE_SAMPLING)
893         void sampleInstruction(Instruction*, bool = false);
894 #endif
895
896 #if ENABLE(CODEBLOCK_SAMPLING)
897         void sampleCodeBlock(CodeBlock*);
898 #else
899         void sampleCodeBlock(CodeBlock*) {}
900 #endif
901
902 #if ENABLE(DFG_JIT)
903         bool canBeOptimized() { return m_canBeOptimized; }
904         bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; }
905         bool shouldEmitProfiling() { return m_shouldEmitProfiling; }
906 #else
907         bool canBeOptimized() { return false; }
908         bool canBeOptimizedOrInlined() { return false; }
909         // Enables use of value profiler with tiered compilation turned off,
910         // in which case all code gets profiled.
911         bool shouldEmitProfiling() { return false; }
912 #endif
913
914         static bool reportCompileTimes();
915         static bool computeCompileTimes();
916         
917         // If you need to check the value of an instruction multiple times and the instruction is
918         // part of a LLInt inline cache, then you want to use this. It will give you the value of
919         // the instruction at the start of JITing.
920         Instruction* copiedInstruction(Instruction*);
921
922         Interpreter* m_interpreter;
923         
924         RefCountedArray<Instruction> m_instructions;
925
926         Vector<CallRecord> m_calls;
927         Vector<Label> m_labels;
928         Vector<JITGetByIdGenerator> m_getByIds;
929         Vector<JITPutByIdGenerator> m_putByIds;
930         Vector<ByValCompilationInfo> m_byValCompilationInfo;
931         Vector<CallCompilationInfo> m_callCompilationInfo;
932         Vector<JumpTable> m_jmpTable;
933
934         unsigned m_bytecodeOffset;
935         Vector<SlowCaseEntry> m_slowCases;
936         Vector<SwitchRecord> m_switches;
937
938         JumpList m_exceptionChecks;
939         JumpList m_exceptionChecksWithCallFrameRollback;
940         Label m_exceptionHandler;
941
942         unsigned m_getByIdIndex;
943         unsigned m_putByIdIndex;
944         unsigned m_byValInstructionIndex;
945         unsigned m_callLinkInfoIndex;
946         
947         Label m_arityCheck;
948         std::unique_ptr<LinkBuffer> m_linkBuffer;
949
950         std::unique_ptr<JITDisassembler> m_disassembler;
951         RefPtr<Profiler::Compilation> m_compilation;
952         WeakRandom m_randomGenerator;
953         static CodeRef stringGetByValStubGenerator(VM*);
954
955         PCToCodeOriginMapBuilder m_pcToCodeOriginMapBuilder;
956
957         HashMap<Instruction*, void*> m_instructionToMathIC;
958         HashMap<Instruction*, MathICGenerationState> m_instructionToMathICGenerationState;
959
960         bool m_canBeOptimized;
961         bool m_canBeOptimizedOrInlined;
962         bool m_shouldEmitProfiling;
963     } JIT_CLASS_ALIGNMENT;
964
965 } // namespace JSC
966
967 #endif // ENABLE(JIT)
968
969 #endif // JIT_h