Transition stack check JITStubs to CCallHelper functions
[WebKit-https.git] / Source / JavaScriptCore / jit / AssemblyHelpers.h
1 /*
2  * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef AssemblyHelpers_h
27 #define AssemblyHelpers_h
28
29 #include <wtf/Platform.h>
30
31 #if ENABLE(JIT)
32
33 #include "CodeBlock.h"
34 #include "FPRInfo.h"
35 #include "GPRInfo.h"
36 #include "JITCode.h"
37 #include "MacroAssembler.h"
38 #include "VM.h"
39
40 namespace JSC {
41
42 typedef void (*V_DebugOperation_EPP)(ExecState*, void*, void*);
43
44 class AssemblyHelpers : public MacroAssembler {
45 public:
46     AssemblyHelpers(VM* vm, CodeBlock* codeBlock)
47         : m_vm(vm)
48         , m_codeBlock(codeBlock)
49         , m_baselineCodeBlock(codeBlock ? codeBlock->baselineVersion() : 0)
50     {
51         if (m_codeBlock) {
52             ASSERT(m_baselineCodeBlock);
53             ASSERT(!m_baselineCodeBlock->alternative());
54             ASSERT(m_baselineCodeBlock->jitType() == JITCode::None || JITCode::isBaselineCode(m_baselineCodeBlock->jitType()));
55         }
56     }
57     
58     CodeBlock* codeBlock() { return m_codeBlock; }
59     VM* vm() { return m_vm; }
60     AssemblerType_T& assembler() { return m_assembler; }
61     
62 #if CPU(X86_64) || CPU(X86)
63     void preserveReturnAddressAfterCall(GPRReg reg)
64     {
65         pop(reg);
66     }
67
68     void restoreReturnAddressBeforeReturn(GPRReg reg)
69     {
70         push(reg);
71     }
72
73     void restoreReturnAddressBeforeReturn(Address address)
74     {
75         push(address);
76     }
77 #endif // CPU(X86_64) || CPU(X86)
78
79 #if CPU(ARM)
80     ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg)
81     {
82         move(linkRegister, reg);
83     }
84
85     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg)
86     {
87         move(reg, linkRegister);
88     }
89
90     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address)
91     {
92         loadPtr(address, linkRegister);
93     }
94 #endif
95
96 #if CPU(MIPS)
97     ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg)
98     {
99         move(returnAddressRegister, reg);
100     }
101
102     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg)
103     {
104         move(reg, returnAddressRegister);
105     }
106
107     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address)
108     {
109         loadPtr(address, returnAddressRegister);
110     }
111 #endif
112
113 #if CPU(SH4)
114     ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg)
115     {
116         m_assembler.stspr(reg);
117     }
118
119     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg)
120     {
121         m_assembler.ldspr(reg);
122     }
123
124     ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address)
125     {
126         loadPtrLinkReg(address);
127     }
128 #endif
129
130     void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, GPRReg to)
131     {
132         loadPtr(Address(GPRInfo::callFrameRegister, entry * sizeof(Register)), to);
133     }
134     void emitPutToCallFrameHeader(GPRReg from, JSStack::CallFrameHeaderEntry entry)
135     {
136 #if USE(JSVALUE64)
137         store64(from, Address(GPRInfo::callFrameRegister, entry * sizeof(Register)));
138 #else
139         store32(from, Address(GPRInfo::callFrameRegister, entry * sizeof(Register)));
140 #endif
141     }
142
143     void emitPutImmediateToCallFrameHeader(void* value, JSStack::CallFrameHeaderEntry entry)
144     {
145         storePtr(TrustedImmPtr(value), Address(GPRInfo::callFrameRegister, entry * sizeof(Register)));
146     }
147
148     Jump branchIfNotCell(GPRReg reg)
149     {
150 #if USE(JSVALUE64)
151         return branchTest64(MacroAssembler::NonZero, reg, GPRInfo::tagMaskRegister);
152 #else
153         return branch32(MacroAssembler::NotEqual, reg, TrustedImm32(JSValue::CellTag));
154 #endif
155     }
156     
157     static Address addressFor(VirtualRegister virtualRegister)
158     {
159         ASSERT(virtualRegister.isValid());
160         return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register));
161     }
162     static Address addressFor(int operand)
163     {
164         return addressFor(static_cast<VirtualRegister>(operand));
165     }
166
167     static Address tagFor(VirtualRegister virtualRegister)
168     {
169         ASSERT(virtualRegister.isValid());
170         return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
171     }
172     static Address tagFor(int operand)
173     {
174         return tagFor(static_cast<VirtualRegister>(operand));
175     }
176
177     static Address payloadFor(VirtualRegister virtualRegister)
178     {
179         ASSERT(virtualRegister.isValid());
180         return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
181     }
182     static Address payloadFor(int operand)
183     {
184         return payloadFor(static_cast<VirtualRegister>(operand));
185     }
186
187     Jump branchIfNotObject(GPRReg structureReg)
188     {
189         return branch8(Below, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
190     }
191
192     static GPRReg selectScratchGPR(GPRReg preserve1 = InvalidGPRReg, GPRReg preserve2 = InvalidGPRReg, GPRReg preserve3 = InvalidGPRReg, GPRReg preserve4 = InvalidGPRReg)
193     {
194         if (preserve1 != GPRInfo::regT0 && preserve2 != GPRInfo::regT0 && preserve3 != GPRInfo::regT0 && preserve4 != GPRInfo::regT0)
195             return GPRInfo::regT0;
196
197         if (preserve1 != GPRInfo::regT1 && preserve2 != GPRInfo::regT1 && preserve3 != GPRInfo::regT1 && preserve4 != GPRInfo::regT1)
198             return GPRInfo::regT1;
199
200         if (preserve1 != GPRInfo::regT2 && preserve2 != GPRInfo::regT2 && preserve3 != GPRInfo::regT2 && preserve4 != GPRInfo::regT2)
201             return GPRInfo::regT2;
202
203         if (preserve1 != GPRInfo::regT3 && preserve2 != GPRInfo::regT3 && preserve3 != GPRInfo::regT3 && preserve4 != GPRInfo::regT3)
204             return GPRInfo::regT3;
205
206         return GPRInfo::regT4;
207     }
208
209     // Add a debug call. This call has no effect on JIT code execution state.
210     void debugCall(V_DebugOperation_EPP function, void* argument)
211     {
212         size_t scratchSize = sizeof(EncodedJSValue) * (GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters);
213         ScratchBuffer* scratchBuffer = m_vm->scratchBufferForSize(scratchSize);
214         EncodedJSValue* buffer = static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer());
215
216         for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
217 #if USE(JSVALUE64)
218             store64(GPRInfo::toRegister(i), buffer + i);
219 #else
220             store32(GPRInfo::toRegister(i), buffer + i);
221 #endif
222         }
223
224         for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
225             move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0);
226             storeDouble(FPRInfo::toRegister(i), GPRInfo::regT0);
227         }
228
229         // Tell GC mark phase how much of the scratch buffer is active during call.
230         move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0);
231         storePtr(TrustedImmPtr(scratchSize), GPRInfo::regT0);
232
233 #if CPU(X86_64) || CPU(ARM) || CPU(MIPS) || CPU(SH4)
234         move(TrustedImmPtr(buffer), GPRInfo::argumentGPR2);
235         move(TrustedImmPtr(argument), GPRInfo::argumentGPR1);
236         move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
237         GPRReg scratch = selectScratchGPR(GPRInfo::argumentGPR0, GPRInfo::argumentGPR1, GPRInfo::argumentGPR2);
238 #elif CPU(X86)
239         poke(GPRInfo::callFrameRegister, 0);
240         poke(TrustedImmPtr(argument), 1);
241         poke(TrustedImmPtr(buffer), 2);
242         GPRReg scratch = GPRInfo::regT0;
243 #else
244 #error "JIT not supported on this platform."
245 #endif
246         move(TrustedImmPtr(reinterpret_cast<void*>(function)), scratch);
247         call(scratch);
248
249         move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0);
250         storePtr(TrustedImmPtr(0), GPRInfo::regT0);
251
252         for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
253             move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0);
254             loadDouble(GPRInfo::regT0, FPRInfo::toRegister(i));
255         }
256         for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
257 #if USE(JSVALUE64)
258             load64(buffer + i, GPRInfo::toRegister(i));
259 #else
260             load32(buffer + i, GPRInfo::toRegister(i));
261 #endif
262         }
263     }
264
265     // These methods JIT generate dynamic, debug-only checks - akin to ASSERTs.
266 #if !ASSERT_DISABLED
267     void jitAssertIsInt32(GPRReg);
268     void jitAssertIsJSInt32(GPRReg);
269     void jitAssertIsJSNumber(GPRReg);
270     void jitAssertIsJSDouble(GPRReg);
271     void jitAssertIsCell(GPRReg);
272     void jitAssertHasValidCallFrame();
273     void jitAssertIsNull(GPRReg);
274 #else
275     void jitAssertIsInt32(GPRReg) { }
276     void jitAssertIsJSInt32(GPRReg) { }
277     void jitAssertIsJSNumber(GPRReg) { }
278     void jitAssertIsJSDouble(GPRReg) { }
279     void jitAssertIsCell(GPRReg) { }
280     void jitAssertHasValidCallFrame() { }
281     void jitAssertIsNull(GPRReg) { }
282 #endif
283
284     // These methods convert between doubles, and doubles boxed and JSValues.
285 #if USE(JSVALUE64)
286     GPRReg boxDouble(FPRReg fpr, GPRReg gpr)
287     {
288         moveDoubleTo64(fpr, gpr);
289         sub64(GPRInfo::tagTypeNumberRegister, gpr);
290         jitAssertIsJSDouble(gpr);
291         return gpr;
292     }
293     FPRReg unboxDouble(GPRReg gpr, FPRReg fpr)
294     {
295         jitAssertIsJSDouble(gpr);
296         add64(GPRInfo::tagTypeNumberRegister, gpr);
297         move64ToDouble(gpr, fpr);
298         return fpr;
299     }
300     
301     void boxInt52(GPRReg source, GPRReg target, GPRReg scratch, FPRReg fpScratch)
302     {
303         // Is it an int32?
304         signExtend32ToPtr(source, scratch);
305         Jump isInt32 = branch64(Equal, source, scratch);
306         
307         // Nope, it's not, but regT0 contains the int64 value.
308         convertInt64ToDouble(source, fpScratch);
309         boxDouble(fpScratch, target);
310         Jump done = jump();
311         
312         isInt32.link(this);
313         zeroExtend32ToPtr(source, target);
314         or64(GPRInfo::tagTypeNumberRegister, target);
315         
316         done.link(this);
317     }
318 #endif
319
320 #if USE(JSVALUE32_64)
321     void boxDouble(FPRReg fpr, GPRReg tagGPR, GPRReg payloadGPR)
322     {
323         moveDoubleToInts(fpr, payloadGPR, tagGPR);
324     }
325     void unboxDouble(GPRReg tagGPR, GPRReg payloadGPR, FPRReg fpr, FPRReg scratchFPR)
326     {
327         moveIntsToDouble(payloadGPR, tagGPR, fpr, scratchFPR);
328     }
329 #endif
330     
331     enum ExceptionCheckKind { NormalExceptionCheck, InvertedExceptionCheck };
332     Jump emitExceptionCheck(ExceptionCheckKind kind = NormalExceptionCheck)
333     {
334 #if USE(JSVALUE64)
335         return branchTest64(kind == NormalExceptionCheck ? NonZero : Zero, AbsoluteAddress(vm()->addressOfException()));
336 #elif USE(JSVALUE32_64)
337         return branch32(kind == NormalExceptionCheck ? NotEqual : Equal, AbsoluteAddress(reinterpret_cast<char*>(vm()->addressOfException()) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
338 #endif
339     }
340
341 #if ENABLE(SAMPLING_COUNTERS)
342     static void emitCount(MacroAssembler& jit, AbstractSamplingCounter& counter, int32_t increment = 1)
343     {
344         jit.add64(TrustedImm32(increment), AbsoluteAddress(counter.addressOfCounter()));
345     }
346     void emitCount(AbstractSamplingCounter& counter, int32_t increment = 1)
347     {
348         add64(TrustedImm32(increment), AbsoluteAddress(counter.addressOfCounter()));
349     }
350 #endif
351
352 #if ENABLE(SAMPLING_FLAGS)
353     void setSamplingFlag(int32_t);
354     void clearSamplingFlag(int32_t flag);
355 #endif
356
357     JSGlobalObject* globalObjectFor(CodeOrigin codeOrigin)
358     {
359         return codeBlock()->globalObjectFor(codeOrigin);
360     }
361     
362     bool strictModeFor(CodeOrigin codeOrigin)
363     {
364         if (!codeOrigin.inlineCallFrame)
365             return codeBlock()->isStrictMode();
366         return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->isStrictMode();
367     }
368     
369     ExecutableBase* executableFor(const CodeOrigin& codeOrigin);
370     
371     CodeBlock* baselineCodeBlockFor(const CodeOrigin& codeOrigin)
372     {
373         return baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, baselineCodeBlock());
374     }
375     
376     CodeBlock* baselineCodeBlockFor(InlineCallFrame* inlineCallFrame)
377     {
378         if (!inlineCallFrame)
379             return baselineCodeBlock();
380         return baselineCodeBlockForInlineCallFrame(inlineCallFrame);
381     }
382     
383     CodeBlock* baselineCodeBlock()
384     {
385         return m_baselineCodeBlock;
386     }
387     
388     VirtualRegister baselineArgumentsRegisterFor(InlineCallFrame* inlineCallFrame)
389     {
390         if (!inlineCallFrame)
391             return baselineCodeBlock()->argumentsRegister();
392         
393         return VirtualRegister(baselineCodeBlockForInlineCallFrame(
394             inlineCallFrame)->argumentsRegister().offset() + inlineCallFrame->stackOffset);
395     }
396     
397     VirtualRegister baselineArgumentsRegisterFor(const CodeOrigin& codeOrigin)
398     {
399         return baselineArgumentsRegisterFor(codeOrigin.inlineCallFrame);
400     }
401     
402     SharedSymbolTable* symbolTableFor(const CodeOrigin& codeOrigin)
403     {
404         return baselineCodeBlockFor(codeOrigin)->symbolTable();
405     }
406
407     int offsetOfLocals(const CodeOrigin& codeOrigin)
408     {
409         if (!codeOrigin.inlineCallFrame)
410             return 0;
411         return codeOrigin.inlineCallFrame->stackOffset * sizeof(Register);
412     }
413
414     int offsetOfArgumentsIncludingThis(InlineCallFrame* inlineCallFrame)
415     {
416         if (!inlineCallFrame)
417             return CallFrame::argumentOffsetIncludingThis(0) * sizeof(Register);
418         if (inlineCallFrame->arguments.size() <= 1)
419             return 0;
420         ValueRecovery recovery = inlineCallFrame->arguments[1];
421         RELEASE_ASSERT(recovery.technique() == DisplacedInJSStack);
422         return (recovery.virtualRegister().offset() - 1) * sizeof(Register);
423     }
424     
425     int offsetOfArgumentsIncludingThis(const CodeOrigin& codeOrigin)
426     {
427         return offsetOfArgumentsIncludingThis(codeOrigin.inlineCallFrame);
428     }
429
430     void writeBarrier(GPRReg owner, GPRReg scratch1, GPRReg scratch2, WriteBarrierUseKind useKind)
431     {
432         UNUSED_PARAM(owner);
433         UNUSED_PARAM(scratch1);
434         UNUSED_PARAM(scratch2);
435         UNUSED_PARAM(useKind);
436         ASSERT(owner != scratch1);
437         ASSERT(owner != scratch2);
438         ASSERT(scratch1 != scratch2);
439         
440 #if ENABLE(WRITE_BARRIER_PROFILING)
441         emitCount(WriteBarrierCounters::jitCounterFor(useKind));
442 #endif
443     }
444
445     Vector<BytecodeAndMachineOffset>& decodedCodeMapFor(CodeBlock*);
446     
447 protected:
448     VM* m_vm;
449     CodeBlock* m_codeBlock;
450     CodeBlock* m_baselineCodeBlock;
451
452     HashMap<CodeBlock*, Vector<BytecodeAndMachineOffset> > m_decodedCodeMaps;
453 };
454
455 } // namespace JSC
456
457 #endif // ENABLE(JIT)
458
459 #endif // AssemblyHelpers_h
460