2008-09-15 Gavin Barraclough <barraclough@apple.com>
[WebKit-https.git] / JavaScriptCore / VM / CTI.h
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef CTI_h
27 #define CTI_h
28
29 #if ENABLE(CTI)
30
31 #define WTF_USE_CTI_REPATCH_PIC 1
32
33 #include "Opcode.h"
34 #include "RegisterFile.h"
35 #include <masm/X86Assembler.h>
36 #include <profiler/Profiler.h>
37 #include <wtf/AlwaysInline.h>
38 #include <wtf/Vector.h>
39
40 #if ENABLE(SAMPLING_TOOL)
41 #include "SamplingTool.h"
42 #endif
43
44 #if COMPILER(MSVC)
45 #define CTI_ARGS void** args
46 #define ARGS (args)
47 #else
48 #define CTI_ARGS void* args
49 #define ARGS (&args)
50 #endif
51
52 #define CTI_ARGS_2ndResult 0x08
53
54 #define CTI_ARGS_code 0x0C
55 #define CTI_ARGS_exec 0x0D
56 #define CTI_ARGS_registerFile 0x0E
57 #define CTI_ARGS_r 0x0F
58 #define CTI_ARGS_scopeChain 0x10
59 #define CTI_ARGS_codeBlock 0x11
60 #define CTI_ARGS_exception 0x12
61 #define CTI_ARGS_profilerReference 0x13
62 #define ARG_exec ((ExecState*)(ARGS)[CTI_ARGS_exec])
63 #define ARG_registerFile ((RegisterFile*)(ARGS)[CTI_ARGS_registerFile])
64 #define ARG_r ((Register*)(ARGS)[CTI_ARGS_r])
65 #define ARG_scopeChain ((ScopeChainNode*)(ARGS)[CTI_ARGS_scopeChain])
66 #define ARG_codeBlock ((CodeBlock*)(ARGS)[CTI_ARGS_codeBlock])
67 #define ARG_exception ((JSValue**)(ARGS)[CTI_ARGS_exception])
68 #define ARG_profilerReference ((Profiler**)(ARGS)[CTI_ARGS_profilerReference])
69
70 #define ARG_setScopeChain(newScopeChain) (*(volatile ScopeChainNode**)&(ARGS)[CTI_ARGS_scopeChain] = newScopeChain)
71 #define ARG_setCodeBlock(newCodeBlock) (*(volatile CodeBlock**)&(ARGS)[CTI_ARGS_codeBlock] = newCodeBlock)
72 #define ARG_setR(newR) (*(volatile Register**)&(ARGS)[CTI_ARGS_r] = newR)
73 #define ARG_set2ndResult(new2ndResult) (*(volatile JSValue**)&(ARGS)[CTI_ARGS_2ndResult] = new2ndResult)
74
75 #define ARG_src1 ((JSValue*)((ARGS)[1]))
76 #define ARG_src2 ((JSValue*)((ARGS)[2]))
77 #define ARG_src3 ((JSValue*)((ARGS)[3]))
78 #define ARG_src4 ((JSValue*)((ARGS)[4]))
79 #define ARG_id1 ((Identifier*)((ARGS)[1]))
80 #define ARG_id2 ((Identifier*)((ARGS)[2]))
81 #define ARG_id3 ((Identifier*)((ARGS)[3]))
82 #define ARG_id4 ((Identifier*)((ARGS)[4]))
83 #define ARG_int1 ((int)((ARGS)[1]))
84 #define ARG_int2 ((int)((ARGS)[2]))
85 #define ARG_int3 ((int)((ARGS)[3]))
86 #define ARG_int4 ((int)((ARGS)[4]))
87 #define ARG_func1 ((FuncDeclNode*)((ARGS)[1]))
88 #define ARG_funcexp1 ((FuncExprNode*)((ARGS)[1]))
89 #define ARG_registers1 ((Register*)((ARGS)[1]))
90 #define ARG_regexp1 ((RegExp*)((ARGS)[1]))
91 #define ARG_pni1 ((JSPropertyNameIterator*)((ARGS)[1]))
92 #define ARG_instr4 ((Instruction*)((ARGS)[4]))
93 #define ARG_instr5 ((Instruction*)((ARGS)[5]))
94
95 #define CTI_RETURN_ADDRESS ((ARGS)[-1])
96
97 namespace JSC {
98
99     class CodeBlock;
100     class ExecState;
101     class JSPropertyNameIterator;
102     class JSValue;
103     class Machine;
104     class Register;
105     class RegisterFile;
106     class ScopeChainNode;
107     class SimpleJumpTable;
108     class StringJumpTable;
109     class StructureIDChain;
110     struct Instruction;
111
112     typedef JSValue* (*CTIHelper_j)(CTI_ARGS);
113     typedef JSPropertyNameIterator* (*CTIHelper_p)(CTI_ARGS);
114     typedef void (*CTIHelper_v)(CTI_ARGS);
115     typedef void* (*CTIHelper_s)(CTI_ARGS);
116     typedef int (*CTIHelper_b)(CTI_ARGS);
117
118     struct CallRecord {
119         X86Assembler::JmpSrc from;
120         void* to;
121         unsigned opcodeIndex;
122
123         CallRecord()
124         {
125         }
126
127         CallRecord(X86Assembler::JmpSrc f, CTIHelper_j t, unsigned i)
128             : from(f)
129             , to((void*)t)
130             , opcodeIndex(i)
131         {
132         }
133
134         CallRecord(X86Assembler::JmpSrc f, CTIHelper_p t, unsigned i)
135             : from(f)
136             , to((void*)t)
137             , opcodeIndex(i)
138         {
139         }
140         
141         CallRecord(X86Assembler::JmpSrc f, CTIHelper_v t, unsigned i)
142             : from(f)
143             , to((void*)t)
144             , opcodeIndex(i)
145         {
146         }
147         
148         CallRecord(X86Assembler::JmpSrc f, CTIHelper_s t, unsigned i)
149             : from(f)
150             , to((void*)t)
151             , opcodeIndex(i)
152         {
153         }
154         
155         CallRecord(X86Assembler::JmpSrc f, CTIHelper_b t, unsigned i)
156             : from(f)
157             , to((void*)t)
158             , opcodeIndex(i)
159         {
160         }
161     };
162
163     struct JmpTable {
164         X86Assembler::JmpSrc from;
165         unsigned to;
166         
167         JmpTable(X86Assembler::JmpSrc f, unsigned t)
168             : from(f)
169             , to(t)
170         {
171         }
172     };
173
174     struct SlowCaseEntry {
175         X86Assembler::JmpSrc from;
176         unsigned to;
177         unsigned hint;
178         
179         SlowCaseEntry(X86Assembler::JmpSrc f, unsigned t, unsigned h = 0)
180             : from(f)
181             , to(t)
182             , hint(h)
183         {
184         }
185     };
186
187     struct SwitchRecord {
188         enum Type {
189             Immediate,
190             Character,
191             String
192         };
193
194         Type m_type;
195
196         union {
197             SimpleJumpTable* m_simpleJumpTable;
198             StringJumpTable* m_stringJumpTable;
199         } m_jumpTable;
200
201         unsigned m_opcodeIndex;
202         unsigned m_defaultOffset;
203
204         SwitchRecord(SimpleJumpTable* jumpTable, unsigned opcodeIndex, unsigned defaultOffset, Type type)
205             : m_type(type)
206             , m_opcodeIndex(opcodeIndex)
207             , m_defaultOffset(defaultOffset)
208         {
209             m_jumpTable.m_simpleJumpTable = jumpTable;
210         }
211
212         SwitchRecord(StringJumpTable* jumpTable, unsigned opcodeIndex, unsigned defaultOffset)
213             : m_type(String)
214             , m_opcodeIndex(opcodeIndex)
215             , m_defaultOffset(defaultOffset)
216         {
217             m_jumpTable.m_stringJumpTable = jumpTable;
218         }
219     };
220
221     struct StructureStubCompilationInfo {
222         X86Assembler::JmpSrc callReturnLocation;
223         X86Assembler::JmpDst hotPathBegin;
224     };
225
226     extern "C" {
227         JSValue* ctiTrampoline(void* code, ExecState* exec, RegisterFile* registerFile, Register* r, ScopeChainNode* scopeChain, CodeBlock* codeBlock, JSValue** exception, Profiler**);
228         void ctiVMThrowTrampoline();
229     };
230
231     void ctiSetReturnAddress(void** where, void* what);
232     void ctiRepatchCallByReturnAddress(void* where, void* what);
233
234     class CTI {
235         static const int repatchGetByIdDefaultStructureID = -1;
236         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
237         // will compress the displacement, and we may not be able to fit a repatched offset.
238         static const int repatchGetByIdDefaultOffset = 256;
239
240         // These architecture specific value are used to enable repatching - see comment on op_put_by_id.
241         static const int repatchOffsetPutByIdStructureID = 19;
242         static const int repatchOffsetPutByIdPropertyMapOffset = 34;
243         // These architecture specific value are used to enable repatching - see comment on op_get_by_id.
244         static const int repatchOffsetGetByIdStructureID = 19;
245         static const int repatchOffsetGetByIdBranchToSlowCase = 25;
246         static const int repatchOffsetGetByIdPropertyMapOffset = 34;
247 #if ENABLE(SAMPLING_TOOL)
248         static const int repatchOffsetGetByIdSlowCaseCall = 27;
249 #else
250         static const int repatchOffsetGetByIdSlowCaseCall = 17;
251 #endif
252
253     public:
254         static void compile(Machine* machine, ExecState* exec, CodeBlock* codeBlock)
255         {
256             CTI cti(machine, exec, codeBlock);
257             cti.privateCompile();
258         }
259
260 #if ENABLE(WREC)
261         static void* compileRegExp(ExecState* exec, const UString& pattern, unsigned* numSubpatterns_ptr, const char** error_ptr, bool ignoreCase = false, bool multiline = false);
262 #endif
263
264         static void compileGetByIdSelf(Machine* machine, ExecState* exec, CodeBlock* codeBlock, StructureID* structureID, size_t cachedOffset, void* returnAddress)
265         {
266             CTI cti(machine, exec, codeBlock);
267             cti.privateCompileGetByIdSelf(structureID, cachedOffset, returnAddress);
268         }
269
270         static void compileGetByIdProto(Machine* machine, ExecState* exec, CodeBlock* codeBlock, StructureID* structureID, StructureID* prototypeStructureID, size_t cachedOffset, void* returnAddress)
271         {
272             CTI cti(machine, exec, codeBlock);
273             cti.privateCompileGetByIdProto(structureID, prototypeStructureID, cachedOffset, returnAddress);
274         }
275
276         static void compileGetByIdChain(Machine* machine, ExecState* exec, CodeBlock* codeBlock, StructureID* structureID, StructureIDChain* chain, size_t count, size_t cachedOffset, void* returnAddress)
277         {
278             CTI cti(machine, exec, codeBlock);
279             cti.privateCompileGetByIdChain(structureID, chain, count, cachedOffset, returnAddress);
280         }
281
282         static void compilePutByIdReplace(Machine* machine, ExecState* exec, CodeBlock* codeBlock, StructureID* structureID, size_t cachedOffset, void* returnAddress)
283         {
284             CTI cti(machine, exec, codeBlock);
285             cti.privateCompilePutByIdReplace(structureID, cachedOffset, returnAddress);
286         }
287         
288         static void compilePutByIdTransition(Machine* machine, ExecState* exec, CodeBlock* codeBlock, StructureID* oldStructureID, StructureID* newStructureID, size_t cachedOffset, StructureIDChain* sIDC, void* returnAddress)
289         {
290             CTI cti(machine, exec, codeBlock);
291             cti.privateCompilePutByIdTransition(oldStructureID, newStructureID, cachedOffset, sIDC, returnAddress);
292         }
293
294         static void* compileArrayLengthTrampoline(Machine* machine, ExecState* exec, CodeBlock* codeBlock)
295         {
296             CTI cti(machine, exec, codeBlock);
297             return cti.privateCompileArrayLengthTrampoline();
298         }
299
300         static void* compileStringLengthTrampoline(Machine* machine, ExecState* exec, CodeBlock* codeBlock)
301         {
302             CTI cti(machine, exec, codeBlock);
303             return cti.privateCompileStringLengthTrampoline();
304         }
305
306         static void patchGetByIdSelf(CodeBlock* codeBlock, StructureID* structureID, size_t cachedOffset, void* returnAddress);
307         static void patchPutByIdReplace(CodeBlock* codeBlock, StructureID* structureID, size_t cachedOffset, void* returnAddress);
308
309         static void compilePatchGetArrayLength(Machine* machine, ExecState* exec, CodeBlock* codeBlock, void* returnAddress)
310         {
311             CTI cti(machine, exec, codeBlock);
312             return cti.privateCompilePatchGetArrayLength(returnAddress);
313         }
314
315         inline static JSValue* execute(void* code, ExecState* exec, RegisterFile* registerFile, Register* r, ScopeChainNode* scopeChain, CodeBlock* codeBlock, JSValue** exception)
316         {
317             JSValue* value = ctiTrampoline(code, exec, registerFile, r, scopeChain, codeBlock, exception, Profiler::enabledProfilerReference());
318 #if ENABLE(SAMPLING_TOOL)
319             currentOpcodeID = static_cast<OpcodeID>(-1);
320 #endif
321             return value;
322         }
323
324     private:
325         CTI(Machine*, ExecState*, CodeBlock*);
326         void privateCompileMainPass();
327         void privateCompileLinkPass();
328         void privateCompileSlowCases();
329         void privateCompile();
330         void privateCompileGetByIdSelf(StructureID*, size_t cachedOffset, void* returnAddress);
331         void privateCompileGetByIdProto(StructureID*, StructureID* prototypeStructureID, size_t cachedOffset, void* returnAddress);
332         void privateCompileGetByIdChain(StructureID*, StructureIDChain*, size_t count, size_t cachedOffset, void* returnAddress);
333         void privateCompilePutByIdReplace(StructureID*, size_t cachedOffset, void* returnAddress);
334         void privateCompilePutByIdTransition(StructureID*, StructureID*, size_t cachedOffset, StructureIDChain*, void* returnAddress);
335
336         void* privateCompileArrayLengthTrampoline();
337         void* privateCompileStringLengthTrampoline();
338         void privateCompilePatchGetArrayLength(void* returnAddress);
339
340         enum CompileOpCallType { OpCallNormal, OpCallEval, OpConstruct };
341         void compileOpCall(Instruction* instruction, unsigned i, CompileOpCallType type = OpCallNormal);
342
343         void emitGetArg(unsigned src, X86Assembler::RegisterID dst);
344         void emitGetPutArg(unsigned src, unsigned offset, X86Assembler::RegisterID scratch);
345         void emitPutArg(X86Assembler::RegisterID src, unsigned offset);
346         void emitPutArgConstant(unsigned value, unsigned offset);
347         void emitPutResult(unsigned dst, X86Assembler::RegisterID from = X86::eax);
348
349         void emitPutCTIParam(X86Assembler::RegisterID from, unsigned name);
350         void emitGetCTIParam(unsigned name, X86Assembler::RegisterID to);
351
352         void emitPutToCallFrameHeader(X86Assembler::RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
353         void emitGetFromCallFrameHeader(RegisterFile::CallFrameHeaderEntry entry, X86Assembler::RegisterID to);
354
355         JSValue* getConstantImmediateNumericArg(unsigned src);
356         unsigned getDeTaggedConstantImmediate(JSValue* imm);
357
358         void CTI::emitJumpSlowCaseIfIsJSCell(X86Assembler::RegisterID reg, unsigned opcodeIndex);
359         void CTI::emitJumpSlowCaseIfNotJSCell(X86Assembler::RegisterID reg, unsigned opcodeIndex);
360         void emitJumpSlowCaseIfNotImm(X86Assembler::RegisterID, unsigned opcodeIndex);
361         void emitJumpSlowCaseIfNotImms(X86Assembler::RegisterID, X86Assembler::RegisterID, unsigned opcodeIndex);
362
363         void emitFastArithDeTagImmediate(X86Assembler::RegisterID);
364         void emitFastArithReTagImmediate(X86Assembler::RegisterID);
365         void emitFastArithPotentiallyReTagImmediate(X86Assembler::RegisterID);
366         void emitFastArithImmToInt(X86Assembler::RegisterID);
367         void emitFastArithIntToImmOrSlowCase(X86Assembler::RegisterID, unsigned opcodeIndex);
368         void emitFastArithIntToImmNoCheck(X86Assembler::RegisterID);
369
370         void emitDebugExceptionCheck();
371
372         X86Assembler::JmpSrc emitCall(unsigned opcodeIndex, CTIHelper_j);
373         X86Assembler::JmpSrc emitCall(unsigned opcodeIndex, CTIHelper_p);
374         X86Assembler::JmpSrc emitCall(unsigned opcodeIndex, CTIHelper_b);
375         X86Assembler::JmpSrc emitCall(unsigned opcodeIndex, CTIHelper_v);
376         X86Assembler::JmpSrc emitCall(unsigned opcodeIndex, CTIHelper_s);
377         
378         void emitGetVariableObjectRegister(X86Assembler::RegisterID variableObject, int index, X86Assembler::RegisterID dst);
379         void emitPutVariableObjectRegister(X86Assembler::RegisterID src, X86Assembler::RegisterID variableObject, int index);
380         
381         void emitSlowScriptCheck(unsigned opcodeIndex);
382 #ifndef NDEBUG
383         void printOpcodeOperandTypes(unsigned src1, unsigned src2);
384 #endif
385
386         X86Assembler m_jit;
387         Machine* m_machine;
388         ExecState* m_exec;
389         CodeBlock* m_codeBlock;
390
391         Vector<CallRecord> m_calls;
392         Vector<X86Assembler::JmpDst> m_labels;
393         Vector<StructureStubCompilationInfo> m_structureStubCompilationInfo;
394         Vector<JmpTable> m_jmpTable;
395
396         struct JSRInfo {
397             X86Assembler::JmpDst addrPosition;
398             X86Assembler::JmpDst target;
399
400             JSRInfo(const X86Assembler::JmpDst& storeLocation, const X86Assembler::JmpDst& targetLocation)
401                 : addrPosition(storeLocation)
402                 , target(targetLocation)
403             {
404             }
405         };
406
407         Vector<JSRInfo> m_jsrSites;
408         Vector<SlowCaseEntry> m_slowCases;
409         Vector<SwitchRecord> m_switches;
410
411         // This limit comes from the limit set in PCRE
412         static const int MaxPatternSize = (1 << 16);
413
414     };
415 }
416
417 #endif // ENABLE(CTI)
418
419 #endif // CTI_h