Implement fast path for op_new_array in the baseline JIT
[WebKit-https.git] / Source / JavaScriptCore / jit / JIT.h
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
35
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations.  Try forcing alignment in an attempt to stabalize this.
38 #if COMPILER(GCC)
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
40 #else
41 #define JIT_CLASS_ALIGNMENT
42 #endif
43
44 #define ASSERT_JIT_OFFSET_UNUSED(variable, actual, expected) ASSERT_WITH_MESSAGE_UNUSED(variable, actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
45 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
46
47 #include "CodeBlock.h"
48 #include "CompactJITCodeMap.h"
49 #include "Interpreter.h"
50 #include "JSInterfaceJIT.h"
51 #include "Opcode.h"
52 #include "Profiler.h"
53 #include <bytecode/SamplingTool.h>
54
55 namespace JSC {
56
57     class CodeBlock;
58     class FunctionExecutable;
59     class JIT;
60     class JSPropertyNameIterator;
61     class Interpreter;
62     class Register;
63     class RegisterFile;
64     class ScopeChainNode;
65     class StructureChain;
66
67     struct CallLinkInfo;
68     struct Instruction;
69     struct OperandTypes;
70     struct PolymorphicAccessStructureList;
71     struct SimpleJumpTable;
72     struct StringJumpTable;
73     struct StructureStubInfo;
74
75     struct CallRecord {
76         MacroAssembler::Call from;
77         unsigned bytecodeOffset;
78         void* to;
79
80         CallRecord()
81         {
82         }
83
84         CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
85             : from(from)
86             , bytecodeOffset(bytecodeOffset)
87             , to(to)
88         {
89         }
90     };
91
92     struct JumpTable {
93         MacroAssembler::Jump from;
94         unsigned toBytecodeOffset;
95
96         JumpTable(MacroAssembler::Jump f, unsigned t)
97             : from(f)
98             , toBytecodeOffset(t)
99         {
100         }
101     };
102
103     struct SlowCaseEntry {
104         MacroAssembler::Jump from;
105         unsigned to;
106         unsigned hint;
107         
108         SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
109             : from(f)
110             , to(t)
111             , hint(h)
112         {
113         }
114     };
115
116     struct SwitchRecord {
117         enum Type {
118             Immediate,
119             Character,
120             String
121         };
122
123         Type type;
124
125         union {
126             SimpleJumpTable* simpleJumpTable;
127             StringJumpTable* stringJumpTable;
128         } jumpTable;
129
130         unsigned bytecodeOffset;
131         unsigned defaultOffset;
132
133         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
134             : type(type)
135             , bytecodeOffset(bytecodeOffset)
136             , defaultOffset(defaultOffset)
137         {
138             this->jumpTable.simpleJumpTable = jumpTable;
139         }
140
141         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
142             : type(String)
143             , bytecodeOffset(bytecodeOffset)
144             , defaultOffset(defaultOffset)
145         {
146             this->jumpTable.stringJumpTable = jumpTable;
147         }
148     };
149
150     struct PropertyStubCompilationInfo {
151         unsigned bytecodeIndex;
152         MacroAssembler::Call callReturnLocation;
153         MacroAssembler::Label hotPathBegin;
154         
155 #if !ASSERT_DISABLED
156         PropertyStubCompilationInfo()
157             : bytecodeIndex(std::numeric_limits<unsigned>::max())
158         {
159         }
160 #endif
161     };
162
163     struct StructureStubCompilationInfo {
164         MacroAssembler::DataLabelPtr hotPathBegin;
165         MacroAssembler::Call hotPathOther;
166         MacroAssembler::Call callReturnLocation;
167         CallLinkInfo::CallType callType;
168         unsigned bytecodeIndex;
169     };
170
171     struct MethodCallCompilationInfo {
172         MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
173             : bytecodeIndex(bytecodeIndex)
174             , propertyAccessIndex(propertyAccessIndex)
175         {
176         }
177
178         unsigned bytecodeIndex;
179         MacroAssembler::DataLabelPtr structureToCompare;
180         unsigned propertyAccessIndex;
181     };
182
183     // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
184     void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
185     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
186     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
187
188     class JIT : private JSInterfaceJIT {
189         friend class JITStubCall;
190
191         using MacroAssembler::Jump;
192         using MacroAssembler::JumpList;
193         using MacroAssembler::Label;
194
195         static const int patchGetByIdDefaultStructure = -1;
196         static const int patchGetByIdDefaultOffset = 0;
197         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
198         // will compress the displacement, and we may not be able to fit a patched offset.
199         static const int patchPutByIdDefaultOffset = 256;
200
201     public:
202         static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0)
203         {
204             return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck);
205         }
206
207         static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
208         {
209             JIT jit(globalData, codeBlock);
210             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
211             jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
212         }
213
214         static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
215         {
216             JIT jit(globalData, codeBlock);
217             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
218             jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
219         }
220         static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
221         {
222             JIT jit(globalData, codeBlock);
223             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
224             jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
225         }
226         static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
227         {
228             JIT jit(globalData, codeBlock);
229             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
230             jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
231         }
232
233         static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
234         {
235             JIT jit(globalData, codeBlock);
236             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
237             jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
238         }
239         
240         static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
241         {
242             JIT jit(globalData, codeBlock);
243             jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
244             jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
245         }
246
247         static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
248         {
249             if (!globalData->canUseJIT())
250                 return 0;
251             JIT jit(globalData, 0);
252             return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
253         }
254
255         static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
256         {
257             if (!globalData->canUseJIT())
258                 return CodeRef();
259             JIT jit(globalData, 0);
260             return jit.privateCompileCTINativeCall(globalData, func);
261         }
262
263         static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
264         static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
265         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
266         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
267         static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
268
269         static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
270         {
271             JIT jit(globalData, codeBlock);
272             return jit.privateCompilePatchGetArrayLength(returnAddress);
273         }
274
275         static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, JSGlobalData*, CodeSpecializationKind);
276
277     private:
278         struct JSRInfo {
279             DataLabelPtr storeLocation;
280             Label target;
281
282             JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
283                 : storeLocation(storeLocation)
284                 , target(targetLocation)
285             {
286             }
287         };
288
289         JIT(JSGlobalData*, CodeBlock* = 0);
290
291         void privateCompileMainPass();
292         void privateCompileLinkPass();
293         void privateCompileSlowCases();
294         JITCode privateCompile(CodePtr* functionEntryArityCheck);
295         void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
296         void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
297         void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
298         void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
299         void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
300         void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
301
302         PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
303         Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
304         CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
305         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
306
307         static bool isDirectPutById(StructureStubInfo*);
308
309         void addSlowCase(Jump);
310         void addSlowCase(JumpList);
311         void addSlowCase();
312         void addJump(Jump, int);
313         void emitJumpSlowToHot(Jump, int);
314
315         void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
316         void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
317         void compileLoadVarargs(Instruction*);
318         void compileCallEval();
319         void compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator&);
320
321         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
322         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
323         bool isOperandConstantImmediateDouble(unsigned src);
324         
325         void emitLoadDouble(int index, FPRegisterID value);
326         void emitLoadInt32ToDouble(int index, FPRegisterID value);
327         Jump emitJumpIfNotObject(RegisterID structureReg);
328         Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
329
330         void testPrototype(JSValue, JumpList& failureCases);
331
332         enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
333         // value register in write barrier is used before any scratch registers
334         // so may safely be the same as either of the scratch registers.
335         void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
336         void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
337
338         template<typename ClassType, bool destructor, typename StructureType> void emitAllocateBasicJSObject(StructureType, RegisterID result, RegisterID storagePtr);
339         void emitAllocateBasicStorage(size_t, RegisterID result, RegisterID storagePtr);
340         template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
341         void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
342         void emitAllocateJSArray(unsigned valuesRegister, unsigned length, RegisterID cellResult, RegisterID storageResult, RegisterID storagePtr);
343         
344 #if ENABLE(VALUE_PROFILER)
345         // This assumes that the value to profile is in regT0 and that regT3 is available for
346         // scratch.
347         void emitValueProfilingSite(ValueProfile*);
348         void emitValueProfilingSite(unsigned bytecodeOffset);
349         void emitValueProfilingSite();
350 #else
351         void emitValueProfilingSite(unsigned) { }
352         void emitValueProfilingSite() { }
353 #endif
354
355 #if USE(JSVALUE32_64)
356         bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
357
358         void emitLoadTag(int index, RegisterID tag);
359         void emitLoadPayload(int index, RegisterID payload);
360
361         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
362         void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
363         void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
364
365         void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
366         void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
367         void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
368         void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
369         void emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
370         void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
371         void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
372         void emitStoreDouble(int index, FPRegisterID value);
373
374         bool isLabeled(unsigned bytecodeOffset);
375         void map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload);
376         void unmap(RegisterID);
377         void unmap();
378         bool isMapped(int virtualRegisterIndex);
379         bool getMappedPayload(int virtualRegisterIndex, RegisterID& payload);
380         bool getMappedTag(int virtualRegisterIndex, RegisterID& tag);
381
382         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
383         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
384
385         void compileGetByIdHotPath();
386         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
387         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
388         void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
389         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
390         void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset);
391
392         // Arithmetic opcode helpers
393         void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
394         void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
395         void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
396
397 #if CPU(X86)
398         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
399         static const int patchOffsetPutByIdStructure = 7;
400         static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
401         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
402         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
403         static const int patchOffsetGetByIdStructure = 7;
404         static const int patchOffsetGetByIdBranchToSlowCase = 13;
405         static const int patchOffsetGetByIdPropertyMapOffset1 = 19;
406         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
407         static const int patchOffsetGetByIdPutResult = 22;
408 #if ENABLE(OPCODE_SAMPLING)
409         static const int patchOffsetGetByIdSlowCaseCall = 44;
410 #else
411         static const int patchOffsetGetByIdSlowCaseCall = 40;
412 #endif
413         static const int patchOffsetOpCallCompareToJump = 6;
414
415         static const int patchOffsetMethodCheckProtoObj = 11;
416         static const int patchOffsetMethodCheckProtoStruct = 18;
417         static const int patchOffsetMethodCheckPutFunction = 29;
418 #elif CPU(ARM_TRADITIONAL)
419         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
420         static const int patchOffsetPutByIdStructure = 4;
421         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
422         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
423         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
424         static const int patchOffsetGetByIdStructure = 4;
425         static const int patchOffsetGetByIdBranchToSlowCase = 16;
426         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
427         static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
428         static const int patchOffsetGetByIdPutResult = 36;
429 #if ENABLE(OPCODE_SAMPLING)
430         #error "OPCODE_SAMPLING is not yet supported"
431 #else
432         static const int patchOffsetGetByIdSlowCaseCall = 48;
433 #endif
434         static const int patchOffsetOpCallCompareToJump = 12;
435
436         static const int patchOffsetMethodCheckProtoObj = 12;
437         static const int patchOffsetMethodCheckProtoStruct = 20;
438         static const int patchOffsetMethodCheckPutFunction = 32;
439
440         // sequenceOpCall
441         static const int sequenceOpCallInstructionSpace = 12;
442         static const int sequenceOpCallConstantSpace = 2;
443         // sequenceMethodCheck
444         static const int sequenceMethodCheckInstructionSpace = 40;
445         static const int sequenceMethodCheckConstantSpace = 6;
446         // sequenceGetByIdHotPath
447         static const int sequenceGetByIdHotPathInstructionSpace = 36;
448         static const int sequenceGetByIdHotPathConstantSpace = 4;
449         // sequenceGetByIdSlowCase
450         static const int sequenceGetByIdSlowCaseInstructionSpace = 56;
451         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
452         // sequencePutById
453         static const int sequencePutByIdInstructionSpace = 36;
454         static const int sequencePutByIdConstantSpace = 4;
455 #elif CPU(ARM_THUMB2)
456         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
457         static const int patchOffsetPutByIdStructure = 10;
458         static const int patchOffsetPutByIdPropertyMapOffset1 = 36;
459         static const int patchOffsetPutByIdPropertyMapOffset2 = 48;
460         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
461         static const int patchOffsetGetByIdStructure = 10;
462         static const int patchOffsetGetByIdBranchToSlowCase = 26;
463         static const int patchOffsetGetByIdPropertyMapOffset1 = 28;
464         static const int patchOffsetGetByIdPropertyMapOffset2 = 30;
465         static const int patchOffsetGetByIdPutResult = 32;
466 #if ENABLE(OPCODE_SAMPLING)
467         #error "OPCODE_SAMPLING is not yet supported"
468 #else
469         static const int patchOffsetGetByIdSlowCaseCall = 48;
470 #endif
471         static const int patchOffsetOpCallCompareToJump = 16;
472
473         static const int patchOffsetMethodCheckProtoObj = 24;
474         static const int patchOffsetMethodCheckProtoStruct = 34;
475         static const int patchOffsetMethodCheckPutFunction = 58;
476
477         // sequenceOpCall
478         static const int sequenceOpCallInstructionSpace = 12;
479         static const int sequenceOpCallConstantSpace = 2;
480         // sequenceMethodCheck
481         static const int sequenceMethodCheckInstructionSpace = 40;
482         static const int sequenceMethodCheckConstantSpace = 6;
483         // sequenceGetByIdHotPath
484         static const int sequenceGetByIdHotPathInstructionSpace = 36;
485         static const int sequenceGetByIdHotPathConstantSpace = 4;
486         // sequenceGetByIdSlowCase
487         static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
488         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
489         // sequencePutById
490         static const int sequencePutByIdInstructionSpace = 36;
491         static const int sequencePutByIdConstantSpace = 4;
492 #elif CPU(MIPS)
493 #if WTF_MIPS_ISA(1)
494         static const int patchOffsetPutByIdStructure = 16;
495         static const int patchOffsetPutByIdPropertyMapOffset1 = 56;
496         static const int patchOffsetPutByIdPropertyMapOffset2 = 72;
497         static const int patchOffsetGetByIdStructure = 16;
498         static const int patchOffsetGetByIdBranchToSlowCase = 48;
499         static const int patchOffsetGetByIdPropertyMapOffset1 = 56;
500         static const int patchOffsetGetByIdPropertyMapOffset2 = 76;
501         static const int patchOffsetGetByIdPutResult = 96;
502 #if ENABLE(OPCODE_SAMPLING)
503         #error "OPCODE_SAMPLING is not yet supported"
504 #else
505         static const int patchOffsetGetByIdSlowCaseCall = 64;
506 #endif
507         static const int patchOffsetOpCallCompareToJump = 32;
508         static const int patchOffsetMethodCheckProtoObj = 32;
509         static const int patchOffsetMethodCheckProtoStruct = 56;
510         static const int patchOffsetMethodCheckPutFunction = 88;
511 #else // WTF_MIPS_ISA(1)
512         static const int patchOffsetPutByIdStructure = 12;
513         static const int patchOffsetPutByIdPropertyMapOffset1 = 48;
514         static const int patchOffsetPutByIdPropertyMapOffset2 = 64;
515         static const int patchOffsetGetByIdStructure = 12;
516         static const int patchOffsetGetByIdBranchToSlowCase = 44;
517         static const int patchOffsetGetByIdPropertyMapOffset1 = 48;
518         static const int patchOffsetGetByIdPropertyMapOffset2 = 64;
519         static const int patchOffsetGetByIdPutResult = 80;
520 #if ENABLE(OPCODE_SAMPLING)
521         #error "OPCODE_SAMPLING is not yet supported"
522 #else
523         static const int patchOffsetGetByIdSlowCaseCall = 64;
524 #endif
525         static const int patchOffsetOpCallCompareToJump = 32;
526         static const int patchOffsetMethodCheckProtoObj = 32;
527         static const int patchOffsetMethodCheckProtoStruct = 52;
528         static const int patchOffsetMethodCheckPutFunction = 84;
529 #endif
530 #elif CPU(SH4)
531        // These architecture specific value are used to enable patching - see comment on op_put_by_id.
532         static const int patchOffsetGetByIdStructure = 6;
533         static const int patchOffsetPutByIdPropertyMapOffset = 24;
534         static const int patchOffsetPutByIdStructure = 6;
535         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
536         static const int patchOffsetGetByIdBranchToSlowCase = 10;
537         static const int patchOffsetGetByIdPropertyMapOffset = 24;
538         static const int patchOffsetGetByIdPutResult = 24;
539
540         // sequenceOpCall
541         static const int sequenceOpCallInstructionSpace = 12;
542         static const int sequenceOpCallConstantSpace = 2;
543         // sequenceMethodCheck
544         static const int sequenceMethodCheckInstructionSpace = 40;
545         static const int sequenceMethodCheckConstantSpace = 6;
546         // sequenceGetByIdHotPath
547         static const int sequenceGetByIdHotPathInstructionSpace = 36;
548         static const int sequenceGetByIdHotPathConstantSpace = 5;
549         // sequenceGetByIdSlowCase
550         static const int sequenceGetByIdSlowCaseInstructionSpace = 30;
551         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
552         // sequencePutById
553         static const int sequencePutByIdInstructionSpace = 36;
554         static const int sequencePutByIdConstantSpace = 5;
555
556         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
557         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
558
559         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
560         static const int patchOffsetPutByIdPropertyMapOffset2 = 26;
561
562 #if ENABLE(OPCODE_SAMPLING)
563         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
564 #else
565         static const int patchOffsetGetByIdSlowCaseCall = 26;
566 #endif
567         static const int patchOffsetOpCallCompareToJump = 4;
568
569         static const int patchOffsetMethodCheckProtoObj = 12;
570         static const int patchOffsetMethodCheckProtoStruct = 20;
571         static const int patchOffsetMethodCheckPutFunction = 32;
572 #else
573 #error "JSVALUE32_64 not supported on this platform."
574 #endif
575
576 #else // USE(JSVALUE32_64)
577         void emitGetVirtualRegister(int src, RegisterID dst);
578         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
579         void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
580         void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
581         {
582             emitPutVirtualRegister(dst, payload);
583         }
584
585         int32_t getConstantOperandImmediateInt(unsigned src);
586
587         void killLastResultRegister();
588
589         Jump emitJumpIfJSCell(RegisterID);
590         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
591         void emitJumpSlowCaseIfJSCell(RegisterID);
592         Jump emitJumpIfNotJSCell(RegisterID);
593         void emitJumpSlowCaseIfNotJSCell(RegisterID);
594         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
595 #if USE(JSVALUE32_64)
596         JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
597         {
598             return emitJumpIfImmediateInteger(reg);
599         }
600         
601         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
602         {
603             return emitJumpIfNotImmediateInteger(reg);
604         }
605 #endif
606         Jump emitJumpIfImmediateInteger(RegisterID);
607         Jump emitJumpIfNotImmediateInteger(RegisterID);
608         Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
609         void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
610         void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
611         void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
612
613 #if USE(JSVALUE32_64)
614         void emitFastArithDeTagImmediate(RegisterID);
615         Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
616 #endif
617         void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
618         void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
619
620         void emitTagAsBoolImmediate(RegisterID reg);
621         void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
622 #if USE(JSVALUE64)
623         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
624 #else
625         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
626 #endif
627
628         void compileGetByIdHotPath(int baseVReg, Identifier*);
629         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
630         void compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset);
631         void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
632         void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
633         void compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset);
634
635 #if CPU(X86_64)
636         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
637         static const int patchOffsetPutByIdStructure = 10;
638         static const int patchOffsetPutByIdPropertyMapOffset = 31;
639         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
640         static const int patchOffsetGetByIdStructure = 10;
641         static const int patchOffsetGetByIdBranchToSlowCase = 20;
642         static const int patchOffsetGetByIdPropertyMapOffset = 28;
643         static const int patchOffsetGetByIdPutResult = 28;
644 #if ENABLE(OPCODE_SAMPLING)
645         static const int patchOffsetGetByIdSlowCaseCall = 72;
646 #else
647         static const int patchOffsetGetByIdSlowCaseCall = 62;
648 #endif
649         static const int patchOffsetOpCallCompareToJump = 9;
650
651         static const int patchOffsetMethodCheckProtoObj = 20;
652         static const int patchOffsetMethodCheckProtoStruct = 30;
653         static const int patchOffsetMethodCheckPutFunction = 50;
654 #elif CPU(X86)
655         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
656         static const int patchOffsetPutByIdStructure = 7;
657         static const int patchOffsetPutByIdPropertyMapOffset = 22;
658         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
659         static const int patchOffsetGetByIdStructure = 7;
660         static const int patchOffsetGetByIdBranchToSlowCase = 13;
661         static const int patchOffsetGetByIdPropertyMapOffset = 22;
662         static const int patchOffsetGetByIdPutResult = 22;
663 #if ENABLE(OPCODE_SAMPLING)
664         static const int patchOffsetGetByIdSlowCaseCall = 33;
665 #else
666         static const int patchOffsetGetByIdSlowCaseCall = 23;
667 #endif
668         static const int patchOffsetOpCallCompareToJump = 6;
669
670         static const int patchOffsetMethodCheckProtoObj = 11;
671         static const int patchOffsetMethodCheckProtoStruct = 18;
672         static const int patchOffsetMethodCheckPutFunction = 29;
673 #elif CPU(ARM_THUMB2)
674         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
675         static const int patchOffsetPutByIdStructure = 10;
676         static const int patchOffsetPutByIdPropertyMapOffset = 46;
677         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
678         static const int patchOffsetGetByIdStructure = 10;
679         static const int patchOffsetGetByIdBranchToSlowCase = 26;
680         static const int patchOffsetGetByIdPropertyMapOffset = 46;
681         static const int patchOffsetGetByIdPutResult = 50;
682 #if ENABLE(OPCODE_SAMPLING)
683         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
684 #else
685         static const int patchOffsetGetByIdSlowCaseCall = 28;
686 #endif
687         static const int patchOffsetOpCallCompareToJump = 16;
688
689         static const int patchOffsetMethodCheckProtoObj = 24;
690         static const int patchOffsetMethodCheckProtoStruct = 34;
691         static const int patchOffsetMethodCheckPutFunction = 58;
692 #elif CPU(ARM_TRADITIONAL)
693         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
694         static const int patchOffsetPutByIdStructure = 4;
695         static const int patchOffsetPutByIdPropertyMapOffset = 20;
696         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
697         static const int patchOffsetGetByIdStructure = 4;
698         static const int patchOffsetGetByIdBranchToSlowCase = 16;
699         static const int patchOffsetGetByIdPropertyMapOffset = 20;
700         static const int patchOffsetGetByIdPutResult = 28;
701 #if ENABLE(OPCODE_SAMPLING)
702         #error "OPCODE_SAMPLING is not yet supported"
703 #else
704         static const int patchOffsetGetByIdSlowCaseCall = 28;
705 #endif
706         static const int patchOffsetOpCallCompareToJump = 12;
707
708         static const int patchOffsetMethodCheckProtoObj = 12;
709         static const int patchOffsetMethodCheckProtoStruct = 20;
710         static const int patchOffsetMethodCheckPutFunction = 32;
711
712         // sequenceOpCall
713         static const int sequenceOpCallInstructionSpace = 12;
714         static const int sequenceOpCallConstantSpace = 2;
715         // sequenceMethodCheck
716         static const int sequenceMethodCheckInstructionSpace = 40;
717         static const int sequenceMethodCheckConstantSpace = 6;
718         // sequenceGetByIdHotPath
719         static const int sequenceGetByIdHotPathInstructionSpace = 28;
720         static const int sequenceGetByIdHotPathConstantSpace = 3;
721         // sequenceGetByIdSlowCase
722         static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
723         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
724         // sequencePutById
725         static const int sequencePutByIdInstructionSpace = 28;
726         static const int sequencePutByIdConstantSpace = 3;
727 #elif CPU(MIPS)
728 #if WTF_MIPS_ISA(1)
729         static const int patchOffsetPutByIdStructure = 16;
730         static const int patchOffsetPutByIdPropertyMapOffset = 68;
731         static const int patchOffsetGetByIdStructure = 16;
732         static const int patchOffsetGetByIdBranchToSlowCase = 48;
733         static const int patchOffsetGetByIdPropertyMapOffset = 68;
734         static const int patchOffsetGetByIdPutResult = 88;
735 #if ENABLE(OPCODE_SAMPLING)
736         #error "OPCODE_SAMPLING is not yet supported"
737 #else
738         static const int patchOffsetGetByIdSlowCaseCall = 40;
739 #endif
740         static const int patchOffsetOpCallCompareToJump = 32;
741         static const int patchOffsetMethodCheckProtoObj = 32;
742         static const int patchOffsetMethodCheckProtoStruct = 56;
743         static const int patchOffsetMethodCheckPutFunction = 88;
744 #else // WTF_MIPS_ISA(1)
745         static const int patchOffsetPutByIdStructure = 12;
746         static const int patchOffsetPutByIdPropertyMapOffset = 60;
747         static const int patchOffsetGetByIdStructure = 12;
748         static const int patchOffsetGetByIdBranchToSlowCase = 44;
749         static const int patchOffsetGetByIdPropertyMapOffset = 60;
750         static const int patchOffsetGetByIdPutResult = 76;
751 #if ENABLE(OPCODE_SAMPLING)
752         #error "OPCODE_SAMPLING is not yet supported"
753 #else
754         static const int patchOffsetGetByIdSlowCaseCall = 40;
755 #endif
756         static const int patchOffsetOpCallCompareToJump = 32;
757         static const int patchOffsetMethodCheckProtoObj = 32;
758         static const int patchOffsetMethodCheckProtoStruct = 52;
759         static const int patchOffsetMethodCheckPutFunction = 84;
760 #endif
761 #endif
762 #endif // USE(JSVALUE32_64)
763
764 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
765 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
766 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
767 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
768
769         void beginUninterruptedSequence(int, int);
770         void endUninterruptedSequence(int, int, int);
771
772 #else
773 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)  do { beginUninterruptedSequence(); } while (false)
774 #define END_UNINTERRUPTED_SEQUENCE(name)  do { endUninterruptedSequence(); } while (false)
775 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false)
776 #endif
777
778         void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
779         void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
780
781         void emit_op_add(Instruction*);
782         void emit_op_bitand(Instruction*);
783         void emit_op_bitnot(Instruction*);
784         void emit_op_bitor(Instruction*);
785         void emit_op_bitxor(Instruction*);
786         void emit_op_call(Instruction*);
787         void emit_op_call_eval(Instruction*);
788         void emit_op_call_varargs(Instruction*);
789         void emit_op_call_put_result(Instruction*);
790         void emit_op_catch(Instruction*);
791         void emit_op_construct(Instruction*);
792         void emit_op_get_callee(Instruction*);
793         void emit_op_create_this(Instruction*);
794         void emit_op_convert_this(Instruction*);
795         void emit_op_create_arguments(Instruction*);
796         void emit_op_debug(Instruction*);
797         void emit_op_del_by_id(Instruction*);
798         void emit_op_div(Instruction*);
799         void emit_op_end(Instruction*);
800         void emit_op_enter(Instruction*);
801         void emit_op_create_activation(Instruction*);
802         void emit_op_eq(Instruction*);
803         void emit_op_eq_null(Instruction*);
804         void emit_op_get_by_id(Instruction*);
805         void emit_op_get_arguments_length(Instruction*);
806         void emit_op_get_by_val(Instruction*);
807         void emit_op_get_argument_by_val(Instruction*);
808         void emit_op_get_by_pname(Instruction*);
809         void emit_op_get_global_var(Instruction*);
810         void emit_op_get_scoped_var(Instruction*);
811         void emit_op_init_lazy_reg(Instruction*);
812         void emit_op_check_has_instance(Instruction*);
813         void emit_op_instanceof(Instruction*);
814         void emit_op_jeq_null(Instruction*);
815         void emit_op_jfalse(Instruction*);
816         void emit_op_jmp(Instruction*);
817         void emit_op_jmp_scopes(Instruction*);
818         void emit_op_jneq_null(Instruction*);
819         void emit_op_jneq_ptr(Instruction*);
820         void emit_op_jless(Instruction*);
821         void emit_op_jlesseq(Instruction*);
822         void emit_op_jgreater(Instruction*);
823         void emit_op_jgreatereq(Instruction*);
824         void emit_op_jnless(Instruction*);
825         void emit_op_jnlesseq(Instruction*);
826         void emit_op_jngreater(Instruction*);
827         void emit_op_jngreatereq(Instruction*);
828         void emit_op_jsr(Instruction*);
829         void emit_op_jtrue(Instruction*);
830         void emit_op_loop(Instruction*);
831         void emit_op_loop_hint(Instruction*);
832         void emit_op_loop_if_less(Instruction*);
833         void emit_op_loop_if_lesseq(Instruction*);
834         void emit_op_loop_if_greater(Instruction*);
835         void emit_op_loop_if_greatereq(Instruction*);
836         void emit_op_loop_if_true(Instruction*);
837         void emit_op_loop_if_false(Instruction*);
838         void emit_op_lshift(Instruction*);
839         void emit_op_method_check(Instruction*);
840         void emit_op_mod(Instruction*);
841         void emit_op_mov(Instruction*);
842         void emit_op_mul(Instruction*);
843         void emit_op_negate(Instruction*);
844         void emit_op_neq(Instruction*);
845         void emit_op_neq_null(Instruction*);
846         void emit_op_new_array(Instruction*);
847         void emit_op_new_array_buffer(Instruction*);
848         void emit_op_new_func(Instruction*);
849         void emit_op_new_func_exp(Instruction*);
850         void emit_op_new_object(Instruction*);
851         void emit_op_new_regexp(Instruction*);
852         void emit_op_get_pnames(Instruction*);
853         void emit_op_next_pname(Instruction*);
854         void emit_op_not(Instruction*);
855         void emit_op_nstricteq(Instruction*);
856         void emit_op_pop_scope(Instruction*);
857         void emit_op_post_dec(Instruction*);
858         void emit_op_post_inc(Instruction*);
859         void emit_op_pre_dec(Instruction*);
860         void emit_op_pre_inc(Instruction*);
861         void emit_op_profile_did_call(Instruction*);
862         void emit_op_profile_will_call(Instruction*);
863         void emit_op_push_new_scope(Instruction*);
864         void emit_op_push_scope(Instruction*);
865         void emit_op_put_by_id(Instruction*);
866         void emit_op_put_by_index(Instruction*);
867         void emit_op_put_by_val(Instruction*);
868         void emit_op_put_getter_setter(Instruction*);
869         void emit_op_put_global_var(Instruction*);
870         void emit_op_put_scoped_var(Instruction*);
871         void emit_op_resolve(Instruction*);
872         void emit_op_resolve_base(Instruction*);
873         void emit_op_ensure_property_exists(Instruction*);
874         void emit_op_resolve_global(Instruction*, bool dynamic = false);
875         void emit_op_resolve_global_dynamic(Instruction*);
876         void emit_op_resolve_skip(Instruction*);
877         void emit_op_resolve_with_base(Instruction*);
878         void emit_op_resolve_with_this(Instruction*);
879         void emit_op_ret(Instruction*);
880         void emit_op_ret_object_or_this(Instruction*);
881         void emit_op_rshift(Instruction*);
882         void emit_op_sret(Instruction*);
883         void emit_op_strcat(Instruction*);
884         void emit_op_stricteq(Instruction*);
885         void emit_op_sub(Instruction*);
886         void emit_op_switch_char(Instruction*);
887         void emit_op_switch_imm(Instruction*);
888         void emit_op_switch_string(Instruction*);
889         void emit_op_tear_off_activation(Instruction*);
890         void emit_op_tear_off_arguments(Instruction*);
891         void emit_op_throw(Instruction*);
892         void emit_op_throw_reference_error(Instruction*);
893         void emit_op_to_jsnumber(Instruction*);
894         void emit_op_to_primitive(Instruction*);
895         void emit_op_unexpected_load(Instruction*);
896         void emit_op_urshift(Instruction*);
897 #if ENABLE(JIT_USE_SOFT_MODULO)
898         void softModulo();
899 #endif
900
901         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
902         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
903         void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
904         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
905         void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
906         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
907         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
908         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
909         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
910         void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
911         void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
912         void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
913         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
914         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
915         void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
916         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
917         void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
918         void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
919         void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
920         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
921         void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
922         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
923         void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
924         void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
925         void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
926         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
927         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
928         void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
929         void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
930         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
931         void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
932         void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
933         void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
934         void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
935         void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
936         void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
937         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
938         void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
939         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
940         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
941         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
942         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
943         void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
944         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
945         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
946         void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
947         void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
948         void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
949         void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
950         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
951         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
952         void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
953         void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
954         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
955         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
956         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
957         void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
958         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
959         void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
960         void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
961         void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
962         void emitSlow_op_new_array(Instruction*, Vector<SlowCaseEntry>::iterator&);
963         
964         void emitRightShift(Instruction*, bool isUnsigned);
965         void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
966
967         /* This function is deprecated. */
968         void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
969
970         void emitInitRegister(unsigned dst);
971
972         void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
973         void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
974         void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
975         void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
976         void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
977         void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
978
979         JSValue getConstantOperand(unsigned src);
980         bool isOperandConstantImmediateInt(unsigned src);
981         bool isOperandConstantImmediateChar(unsigned src);
982
983         bool atJumpTarget();
984
985         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
986         {
987             return iter++->from;
988         }
989         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
990         {
991             iter->from.link(this);
992             ++iter;
993         }
994         void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
995         {
996             ASSERT(!iter->from.isSet());
997             ++iter;
998         }
999         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
1000
1001         Jump checkStructure(RegisterID reg, Structure* structure);
1002
1003         void restoreArgumentReference();
1004         void restoreArgumentReferenceForTrampoline();
1005         void updateTopCallFrame();
1006
1007         Call emitNakedCall(CodePtr function = CodePtr());
1008
1009         void preserveReturnAddressAfterCall(RegisterID);
1010         void restoreReturnAddressBeforeReturn(RegisterID);
1011         void restoreReturnAddressBeforeReturn(Address);
1012
1013         // Loads the character value of a single character string into dst.
1014         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
1015         
1016         enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
1017 #if ENABLE(DFG_JIT)
1018         void emitOptimizationCheck(OptimizationCheckKind);
1019 #else
1020         void emitOptimizationCheck(OptimizationCheckKind) { }
1021 #endif
1022         
1023         void emitTimeoutCheck();
1024 #ifndef NDEBUG
1025         void printBytecodeOperandTypes(unsigned src1, unsigned src2);
1026 #endif
1027
1028 #if ENABLE(SAMPLING_FLAGS)
1029         void setSamplingFlag(int32_t);
1030         void clearSamplingFlag(int32_t);
1031 #endif
1032
1033 #if ENABLE(SAMPLING_COUNTERS)
1034         void emitCount(AbstractSamplingCounter&, int32_t = 1);
1035 #endif
1036
1037 #if ENABLE(OPCODE_SAMPLING)
1038         void sampleInstruction(Instruction*, bool = false);
1039 #endif
1040
1041 #if ENABLE(CODEBLOCK_SAMPLING)
1042         void sampleCodeBlock(CodeBlock*);
1043 #else
1044         void sampleCodeBlock(CodeBlock*) {}
1045 #endif
1046
1047 #if ENABLE(DFG_JIT)
1048         bool canBeOptimized() { return m_canBeOptimized; }
1049         bool shouldEmitProfiling() { return m_canBeOptimized; }
1050 #else
1051         bool canBeOptimized() { return false; }
1052         // Enables use of value profiler with tiered compilation turned off,
1053         // in which case all code gets profiled.
1054         bool shouldEmitProfiling() { return true; }
1055 #endif
1056
1057         Interpreter* m_interpreter;
1058         JSGlobalData* m_globalData;
1059         CodeBlock* m_codeBlock;
1060
1061         Vector<CallRecord> m_calls;
1062         Vector<Label> m_labels;
1063         Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
1064         Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
1065         Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
1066         Vector<JumpTable> m_jmpTable;
1067
1068         unsigned m_bytecodeOffset;
1069         Vector<JSRInfo> m_jsrSites;
1070         Vector<SlowCaseEntry> m_slowCases;
1071         Vector<SwitchRecord> m_switches;
1072
1073         unsigned m_propertyAccessInstructionIndex;
1074         unsigned m_globalResolveInfoIndex;
1075         unsigned m_callLinkInfoIndex;
1076
1077 #if USE(JSVALUE32_64)
1078         unsigned m_jumpTargetIndex;
1079         unsigned m_mappedBytecodeOffset;
1080         int m_mappedVirtualRegisterIndex;
1081         RegisterID m_mappedTag;
1082         RegisterID m_mappedPayload;
1083 #else
1084         int m_lastResultBytecodeRegister;
1085 #endif
1086         unsigned m_jumpTargetsPosition;
1087
1088 #ifndef NDEBUG
1089 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
1090         Label m_uninterruptedInstructionSequenceBegin;
1091         int m_uninterruptedConstantSequenceBegin;
1092 #endif
1093 #endif
1094         WeakRandom m_randomGenerator;
1095         static CodeRef stringGetByValStubGenerator(JSGlobalData*);
1096
1097 #if ENABLE(VALUE_PROFILER)
1098         bool m_canBeOptimized;
1099 #endif
1100     } JIT_CLASS_ALIGNMENT;
1101
1102     inline void JIT::emit_op_loop(Instruction* currentInstruction)
1103     {
1104         emitTimeoutCheck();
1105         emit_op_jmp(currentInstruction);
1106     }
1107
1108     inline void JIT::emit_op_loop_hint(Instruction*)
1109     {
1110         emitOptimizationCheck(LoopOptimizationCheck);
1111     }
1112
1113     inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
1114     {
1115         emitTimeoutCheck();
1116         emit_op_jtrue(currentInstruction);
1117     }
1118
1119     inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1120     {
1121         emitSlow_op_jtrue(currentInstruction, iter);
1122     }
1123
1124     inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
1125     {
1126         emitTimeoutCheck();
1127         emit_op_jfalse(currentInstruction);
1128     }
1129
1130     inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1131     {
1132         emitSlow_op_jfalse(currentInstruction, iter);
1133     }
1134
1135     inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1136     {
1137         emitTimeoutCheck();
1138         emit_op_jless(currentInstruction);
1139     }
1140
1141     inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1142     {
1143         emitSlow_op_jless(currentInstruction, iter);
1144     }
1145
1146     inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1147     {
1148         emitTimeoutCheck();
1149         emit_op_jlesseq(currentInstruction);
1150     }
1151
1152     inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1153     {
1154         emitSlow_op_jlesseq(currentInstruction, iter);
1155     }
1156
1157     inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
1158     {
1159         emitTimeoutCheck();
1160         emit_op_jgreater(currentInstruction);
1161     }
1162
1163     inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1164     {
1165         emitSlow_op_jgreater(currentInstruction, iter);
1166     }
1167
1168     inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
1169     {
1170         emitTimeoutCheck();
1171         emit_op_jgreatereq(currentInstruction);
1172     }
1173
1174     inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1175     {
1176         emitSlow_op_jgreatereq(currentInstruction, iter);
1177     }
1178
1179 } // namespace JSC
1180
1181 #endif // ENABLE(JIT)
1182
1183 #endif // JIT_h