fourthTier: Change JSStack to grow from high to low addresses
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall.cpp
1 /*
2  * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE64)
30 #include "JIT.h"
31
32 #include "Arguments.h"
33 #include "CodeBlock.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "Interpreter.h"
39 #include "Operations.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43 #include "ThunkGenerators.h"
44 #include <wtf/StringPrintStream.h>
45
46 #ifndef NDEBUG
47 #include <stdio.h>
48 #endif
49
50 using namespace std;
51
52 namespace JSC {
53
54 void JIT::emitPutCallResult(Instruction* instruction)
55 {
56     int dst = instruction[1].u.operand;
57     emitValueProfilingSite(regT4);
58     emitPutVirtualRegister(dst);
59     if (canBeOptimizedOrInlined()) {
60         // Make lastResultRegister tracking simpler in the DFG. This is needed because
61         // the DFG may have the SetLocal corresponding to this Call's return value in
62         // a different basic block, if inlining happened. The DFG isn't smart enough to
63         // track the baseline JIT's last result register across basic blocks.
64         killLastResultRegister();
65     }
66 }
67
68 void JIT::compileLoadVarargs(Instruction* instruction)
69 {
70     int thisValue = instruction[3].u.operand;
71     int arguments = instruction[4].u.operand;
72     int firstFreeRegister = instruction[5].u.operand;
73
74     killLastResultRegister();
75
76     JumpList slowCase;
77     JumpList end;
78     bool canOptimize = m_codeBlock->usesArguments()
79         && arguments == m_codeBlock->argumentsRegister()
80         && !m_codeBlock->symbolTable()->slowArguments();
81
82     if (canOptimize) {
83         emitGetVirtualRegister(arguments, regT0);
84         slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue()))));
85
86         emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
87         slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1)));
88         // regT0: argumentCountIncludingThis
89
90         move(regT0, regT1);
91         neg64(regT1);
92         add64(TrustedImm32(firstFreeRegister - JSStack::CallFrameHeaderSize), regT1);
93         lshift64(TrustedImm32(3), regT1);
94         addPtr(callFrameRegister, regT1);
95         // regT1: newCallFrame
96
97         slowCase.append(branchPtr(Above, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT1));
98
99         // Initialize ArgumentCount.
100         store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
101
102         // Initialize 'this'.
103         emitGetVirtualRegister(thisValue, regT2);
104         store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
105
106         // Copy arguments.
107         signExtend32ToPtr(regT0, regT0);
108         end.append(branchSub64(Zero, TrustedImm32(1), regT0));
109         // regT0: argumentCount
110
111         Label copyLoop = label();
112         load64(BaseIndex(callFrameRegister, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT2);
113         store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
114         branchSub64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this);
115
116         end.append(jump());
117     }
118
119     if (canOptimize)
120         slowCase.link(this);
121
122     JITStubCall stubCall(this, cti_op_load_varargs);
123     stubCall.addArgument(thisValue, regT0);
124     stubCall.addArgument(arguments, regT0);
125     stubCall.addArgument(Imm32(firstFreeRegister));
126     stubCall.call(regT1);
127
128     if (canOptimize)
129         end.link(this);
130 }
131
132 void JIT::compileCallEval(Instruction* instruction)
133 {
134     JITStubCall stubCall(this, cti_op_call_eval); // Initializes ScopeChain; ReturnPC; CodeBlock.
135     stubCall.call();
136     addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
137     emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
138
139     sampleCodeBlock(m_codeBlock);
140     
141     emitPutCallResult(instruction);
142 }
143
144 void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
145 {
146     linkSlowCase(iter);
147
148     emitGetFromCallFrameHeader64(JSStack::Callee, regT0);
149     emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code());
150
151     sampleCodeBlock(m_codeBlock);
152     
153     emitPutCallResult(instruction);
154 }
155
156 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
157 {
158     int callee = instruction[2].u.operand;
159
160     /* Caller always:
161         - Updates callFrameRegister to callee callFrame.
162         - Initializes ArgumentCount; CallerFrame; Callee.
163
164        For a JS call:
165         - Caller initializes ScopeChain.
166         - Callee initializes ReturnPC; CodeBlock.
167         - Callee restores callFrameRegister before return.
168
169        For a non-JS call:
170         - Caller initializes ScopeChain; ReturnPC; CodeBlock.
171         - Caller restores callFrameRegister after return.
172     */
173
174     if (opcodeID == op_call_varargs)
175         compileLoadVarargs(instruction);
176     else {
177         int argCount = instruction[3].u.operand;
178         int registerOffset = -instruction[4].u.operand;
179
180         if (opcodeID == op_call && shouldEmitProfiling()) {
181             emitGetVirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0);
182             Jump done = emitJumpIfNotJSCell(regT0);
183             loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
184             storePtr(regT0, instruction[6].u.arrayProfile->addressOfLastSeenStructure());
185             done.link(this);
186         }
187     
188         addPtr(TrustedImm32(registerOffset * sizeof(Register)), callFrameRegister, regT1);
189         store32(TrustedImm32(argCount), Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
190     } // regT1 holds newCallFrame with ArgumentCount initialized.
191     
192     uint32_t bytecodeOffset = instruction - m_codeBlock->instructions().begin();
193     uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset);
194     store32(TrustedImm32(locationBits), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
195     emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
196
197     store64(callFrameRegister, Address(regT1, JSStack::CallerFrame * static_cast<int>(sizeof(Register))));
198     store64(regT0, Address(regT1, JSStack::Callee * static_cast<int>(sizeof(Register))));
199     move(regT1, callFrameRegister);
200
201     if (opcodeID == op_call_eval) {
202         compileCallEval(instruction);
203         return;
204     }
205
206     DataLabelPtr addressOfLinkedFunctionCheck;
207     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
208     Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
209     END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
210     addSlowCase(slowCase);
211
212     ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
213     m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
214     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
215     m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID);
216     m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset;
217
218     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1);
219     emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
220     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
221
222     sampleCodeBlock(m_codeBlock);
223     
224     emitPutCallResult(instruction);
225 }
226
227 void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
228 {
229     if (opcodeID == op_call_eval) {
230         compileCallEvalSlowCase(instruction, iter);
231         return;
232     }
233
234     linkSlowCase(iter);
235     
236     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code());
237
238     sampleCodeBlock(m_codeBlock);
239     
240     emitPutCallResult(instruction);
241 }
242
243 void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
244 {
245     JumpList slowCases;
246     
247     slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister));
248     slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure)));
249     slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
250     
251     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1);
252     emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
253     
254     Call call = nearCall();
255     Jump done = jump();
256     
257     slowCases.link(this);
258     move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2);
259     restoreReturnAddressBeforeReturn(regT2);
260     Jump slow = jump();
261     
262     LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
263     
264     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
265     patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
266     patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code()));
267     
268     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
269         FINALIZE_CODE(
270             patchBuffer,
271             ("Baseline closure call stub for %s, return point %p, target %p (%s)",
272                 toCString(*m_codeBlock).data(),
273                 callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(),
274                 codePtr.executableAddress(),
275                 toCString(pointerDump(calleeCodeBlock)).data())),
276         *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,
277         callLinkInfo->codeOrigin));
278     
279     RepatchBuffer repatchBuffer(m_codeBlock);
280     
281     repatchBuffer.replaceWithJump(
282         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
283         CodeLocationLabel(stubRoutine->code().code()));
284     repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code());
285     
286     callLinkInfo->stub = stubRoutine.release();
287 }
288
289 void JIT::emit_op_call(Instruction* currentInstruction)
290 {
291     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
292 }
293
294 void JIT::emit_op_call_eval(Instruction* currentInstruction)
295 {
296     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
297 }
298
299 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
300 {
301     compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
302 }
303
304 void JIT::emit_op_construct(Instruction* currentInstruction)
305 {
306     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
307 }
308
309 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
310 {
311     compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
312 }
313
314 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
315 {
316     compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
317 }
318  
319 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
320 {
321     compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
322 }
323
324 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
325 {
326     compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
327 }
328
329 } // namespace JSC
330
331 #endif // USE(JSVALUE64)
332 #endif // ENABLE(JIT)