JIT Engines use the wrong stack limit for stack checks
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall.cpp
1 /*
2  * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE64)
30 #include "JIT.h"
31
32 #include "Arguments.h"
33 #include "CodeBlock.h"
34 #include "JITInlines.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "JSCInlines.h"
39 #include "RepatchBuffer.h"
40 #include "ResultType.h"
41 #include "SamplingTool.h"
42 #include "StackAlignment.h"
43 #include "ThunkGenerators.h"
44 #include <wtf/StringPrintStream.h>
45
46
47 namespace JSC {
48
49 void JIT::emitPutCallResult(Instruction* instruction)
50 {
51     int dst = instruction[1].u.operand;
52     emitValueProfilingSite();
53     emitPutVirtualRegister(dst);
54 }
55
56 void JIT::compileLoadVarargs(Instruction* instruction)
57 {
58     int thisValue = instruction[3].u.operand;
59     int arguments = instruction[4].u.operand;
60     int firstFreeRegister = instruction[5].u.operand;
61     int firstVarArgOffset = instruction[6].u.operand;
62
63     JumpList slowCase;
64     JumpList end;
65     bool canOptimize = m_codeBlock->usesArguments()
66         && arguments == m_codeBlock->argumentsRegister().offset()
67         && !m_codeBlock->symbolTable()->slowArguments();
68
69     if (canOptimize) {
70         emitGetVirtualRegister(arguments, regT0);
71         slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue()))));
72
73         emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
74         if (firstVarArgOffset)
75             sub32(TrustedImm32(firstVarArgOffset), regT0);
76         slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1)));
77         // regT0: argumentCountIncludingThis
78         move(regT0, regT1);
79         add64(TrustedImm32(-firstFreeRegister + JSStack::CallFrameHeaderSize), regT1);
80         // regT1 now has the required frame size in Register units
81         // Round regT1 to next multiple of stackAlignmentRegisters()
82         add64(TrustedImm32(stackAlignmentRegisters() - 1), regT1);
83         and64(TrustedImm32(~(stackAlignmentRegisters() - 1)), regT1);
84
85         neg64(regT1);
86         lshift64(TrustedImm32(3), regT1);
87         addPtr(callFrameRegister, regT1);
88         // regT1: newCallFrame
89
90         slowCase.append(branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), regT1));
91
92         // Initialize ArgumentCount.
93         store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
94
95         // Initialize 'this'.
96         emitGetVirtualRegister(thisValue, regT2);
97         store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
98
99         // Copy arguments.
100         signExtend32ToPtr(regT0, regT0);
101         end.append(branchSub64(Zero, TrustedImm32(1), regT0));
102         // regT0: argumentCount
103
104         Label copyLoop = label();
105         load64(BaseIndex(callFrameRegister, regT0, TimesEight, (CallFrame::thisArgumentOffset() + firstVarArgOffset) * static_cast<int>(sizeof(Register))), regT2);
106         store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
107         branchSub64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this);
108
109         end.append(jump());
110     }
111
112     if (canOptimize)
113         slowCase.link(this);
114
115     emitGetVirtualRegister(arguments, regT1);
116     callOperation(operationSizeFrameForVarargs, regT1, firstFreeRegister, firstVarArgOffset);
117     move(returnValueGPR, stackPointerRegister);
118     emitGetVirtualRegister(thisValue, regT1);
119     emitGetVirtualRegister(arguments, regT2);
120     callOperation(operationLoadVarargs, returnValueGPR, regT1, regT2, firstVarArgOffset);
121     move(returnValueGPR, regT1);
122
123     if (canOptimize)
124         end.link(this);
125     
126     addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister);
127 }
128
129 void JIT::compileCallEval(Instruction* instruction)
130 {
131     addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1);
132     callOperationNoExceptionCheck(operationCallEval, regT1);
133
134     Jump noException = emitExceptionCheck(InvertedExceptionCheck);
135     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);    
136     exceptionCheck(jump());
137
138     noException.link(this);
139     addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
140
141     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
142     checkStackPointerAlignment();
143
144     sampleCodeBlock(m_codeBlock);
145     
146     emitPutCallResult(instruction);
147 }
148
149 void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
150 {
151     linkSlowCase(iter);
152
153     load64(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT0);
154     emitNakedCall(m_vm->getCTIStub(virtualCallThunkGenerator).code());
155     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
156     checkStackPointerAlignment();
157
158     sampleCodeBlock(m_codeBlock);
159     
160     emitPutCallResult(instruction);
161 }
162
163 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
164 {
165     int callee = instruction[2].u.operand;
166
167     /* Caller always:
168         - Updates callFrameRegister to callee callFrame.
169         - Initializes ArgumentCount; CallerFrame; Callee.
170
171        For a JS call:
172         - Caller initializes ScopeChain.
173         - Callee initializes ReturnPC; CodeBlock.
174         - Callee restores callFrameRegister before return.
175
176        For a non-JS call:
177         - Caller initializes ScopeChain; ReturnPC; CodeBlock.
178         - Caller restores callFrameRegister after return.
179     */
180     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct), call_and_construct_opcodes_must_be_same_length);
181     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_varargs), call_and_call_varargs_opcodes_must_be_same_length);
182     if (opcodeID == op_call_varargs)
183         compileLoadVarargs(instruction);
184     else {
185         int argCount = instruction[3].u.operand;
186         int registerOffset = -instruction[4].u.operand;
187
188         if (opcodeID == op_call && shouldEmitProfiling()) {
189             emitGetVirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0);
190             Jump done = emitJumpIfNotJSCell(regT0);
191             loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
192             storePtr(regT0, instruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile->addressOfLastSeenStructure());
193             done.link(this);
194         }
195     
196         addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
197         store32(TrustedImm32(argCount), Address(stackPointerRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
198     } // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
199     
200     uint32_t bytecodeOffset = instruction - m_codeBlock->instructions().begin();
201     uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset);
202     store32(TrustedImm32(locationBits), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + TagOffset));
203     emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
204
205     store64(regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC)));
206
207     if (opcodeID == op_call_eval) {
208         compileCallEval(instruction);
209         return;
210     }
211
212     DataLabelPtr addressOfLinkedFunctionCheck;
213     Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
214     addSlowCase(slowCase);
215
216     ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
217     m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
218     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
219     m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID);
220     m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset;
221
222     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT2);
223     store64(regT2, Address(MacroAssembler::stackPointerRegister, JSStack::ScopeChain * sizeof(Register) - sizeof(CallerFrameAndPC)));
224
225     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
226
227     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
228     checkStackPointerAlignment();
229
230     sampleCodeBlock(m_codeBlock);
231     
232     emitPutCallResult(instruction);
233 }
234
235 void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
236 {
237     if (opcodeID == op_call_eval) {
238         compileCallEvalSlowCase(instruction, iter);
239         return;
240     }
241
242     linkSlowCase(iter);
243
244     ThunkGenerator generator = linkThunkGeneratorFor(
245         opcodeID == op_construct ? CodeForConstruct : CodeForCall,
246         RegisterPreservationNotRequired);
247     
248     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code());
249
250     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
251     checkStackPointerAlignment();
252
253     sampleCodeBlock(m_codeBlock);
254     
255     emitPutCallResult(instruction);
256 }
257
258 void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
259 {
260     JumpList slowCases;
261
262     slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister));
263     slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure)));
264     slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
265     
266     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1);
267     emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
268     
269     Call call = nearCall();
270     Jump done = jump();
271     
272     slowCases.link(this);
273     move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2);
274     restoreReturnAddressBeforeReturn(regT2);
275     Jump slow = jump();
276     
277     LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
278     
279     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
280     patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
281     patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallThunkGenerator).code()));
282     
283     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
284         FINALIZE_CODE(
285             patchBuffer,
286             ("Baseline closure call stub for %s, return point %p, target %p (%s)",
287                 toCString(*m_codeBlock).data(),
288                 callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(),
289                 codePtr.executableAddress(),
290                 toCString(pointerDump(calleeCodeBlock)).data())),
291         *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,
292         callLinkInfo->codeOrigin));
293     
294     RepatchBuffer repatchBuffer(m_codeBlock);
295     
296     repatchBuffer.replaceWithJump(
297         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
298         CodeLocationLabel(stubRoutine->code().code()));
299     repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallThunkGenerator).code());
300
301     callLinkInfo->stub = stubRoutine.release();
302 }
303
304 void JIT::emit_op_call(Instruction* currentInstruction)
305 {
306     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
307 }
308
309 void JIT::emit_op_call_eval(Instruction* currentInstruction)
310 {
311     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
312 }
313
314 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
315 {
316     compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
317 }
318
319 void JIT::emit_op_construct(Instruction* currentInstruction)
320 {
321     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
322 }
323
324 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
325 {
326     compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
327 }
328
329 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
330 {
331     compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
332 }
333  
334 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
335 {
336     compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
337 }
338
339 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
340 {
341     compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
342 }
343
344 } // namespace JSC
345
346 #endif // USE(JSVALUE64)
347 #endif // ENABLE(JIT)