LinkBuffer should not keep a reference to the MacroAssembler
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall.cpp
1 /*
2  * Copyright (C) 2008, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE64)
30 #include "JIT.h"
31
32 #include "Arguments.h"
33 #include "CodeBlock.h"
34 #include "JITInlines.h"
35 #include "JSArray.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "JSCInlines.h"
39 #include "LinkBuffer.h"
40 #include "RepatchBuffer.h"
41 #include "ResultType.h"
42 #include "SamplingTool.h"
43 #include "StackAlignment.h"
44 #include "ThunkGenerators.h"
45 #include <wtf/StringPrintStream.h>
46
47
48 namespace JSC {
49
50 void JIT::emitPutCallResult(Instruction* instruction)
51 {
52     int dst = instruction[1].u.operand;
53     emitValueProfilingSite();
54     emitPutVirtualRegister(dst);
55 }
56
57 void JIT::compileLoadVarargs(Instruction* instruction)
58 {
59     int thisValue = instruction[3].u.operand;
60     int arguments = instruction[4].u.operand;
61     int firstFreeRegister = instruction[5].u.operand;
62     int firstVarArgOffset = instruction[6].u.operand;
63
64     JumpList slowCase;
65     JumpList end;
66     bool canOptimize = m_codeBlock->usesArguments()
67         && arguments == m_codeBlock->argumentsRegister().offset()
68         && !m_codeBlock->symbolTable()->slowArguments();
69
70     if (canOptimize) {
71         emitGetVirtualRegister(arguments, regT0);
72         slowCase.append(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(JSValue()))));
73
74         emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
75         if (firstVarArgOffset) {
76             Jump sufficientArguments = branch32(GreaterThan, regT0, TrustedImm32(firstVarArgOffset + 1));
77             move(TrustedImm32(1), regT0);
78             Jump endVarArgs = jump();
79             sufficientArguments.link(this);
80             sub32(TrustedImm32(firstVarArgOffset), regT0);
81             endVarArgs.link(this);
82         }
83         slowCase.append(branch32(Above, regT0, TrustedImm32(Arguments::MaxArguments + 1)));
84         // regT0: argumentCountIncludingThis
85         move(regT0, regT1);
86         add64(TrustedImm32(-firstFreeRegister + JSStack::CallFrameHeaderSize), regT1);
87         // regT1 now has the required frame size in Register units
88         // Round regT1 to next multiple of stackAlignmentRegisters()
89         add64(TrustedImm32(stackAlignmentRegisters() - 1), regT1);
90         and64(TrustedImm32(~(stackAlignmentRegisters() - 1)), regT1);
91
92         neg64(regT1);
93         lshift64(TrustedImm32(3), regT1);
94         addPtr(callFrameRegister, regT1);
95         // regT1: newCallFrame
96
97         slowCase.append(branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), regT1));
98
99         // Initialize ArgumentCount.
100         store32(regT0, Address(regT1, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
101
102         // Initialize 'this'.
103         emitGetVirtualRegister(thisValue, regT2);
104         store64(regT2, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
105
106         // Copy arguments.
107         signExtend32ToPtr(regT0, regT0);
108         end.append(branchSub64(Zero, TrustedImm32(1), regT0));
109         // regT0: argumentCount
110
111         Label copyLoop = label();
112         load64(BaseIndex(callFrameRegister, regT0, TimesEight, (CallFrame::thisArgumentOffset() + firstVarArgOffset) * static_cast<int>(sizeof(Register))), regT2);
113         store64(regT2, BaseIndex(regT1, regT0, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
114         branchSub64(NonZero, TrustedImm32(1), regT0).linkTo(copyLoop, this);
115
116         end.append(jump());
117     }
118
119     if (canOptimize)
120         slowCase.link(this);
121
122     emitGetVirtualRegister(arguments, regT1);
123     callOperation(operationSizeFrameForVarargs, regT1, firstFreeRegister, firstVarArgOffset);
124     move(returnValueGPR, stackPointerRegister);
125     emitGetVirtualRegister(thisValue, regT1);
126     emitGetVirtualRegister(arguments, regT2);
127     callOperation(operationLoadVarargs, returnValueGPR, regT1, regT2, firstVarArgOffset);
128     move(returnValueGPR, regT1);
129
130     if (canOptimize)
131         end.link(this);
132     
133     addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister);
134 }
135
136 void JIT::compileCallEval(Instruction* instruction)
137 {
138     addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1);
139     callOperationNoExceptionCheck(operationCallEval, regT1);
140
141     Jump noException = emitExceptionCheck(InvertedExceptionCheck);
142     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);    
143     exceptionCheck(jump());
144
145     noException.link(this);
146     addSlowCase(branch64(Equal, regT0, TrustedImm64(JSValue::encode(JSValue()))));
147
148     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
149     checkStackPointerAlignment();
150
151     sampleCodeBlock(m_codeBlock);
152     
153     emitPutCallResult(instruction);
154 }
155
156 void JIT::compileCallEvalSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
157 {
158     linkSlowCase(iter);
159
160     load64(Address(stackPointerRegister, sizeof(Register) * JSStack::Callee - sizeof(CallerFrameAndPC)), regT0);
161     move(TrustedImmPtr(&CallLinkInfo::dummy()), regT2);
162     emitNakedCall(m_vm->getCTIStub(virtualCallThunkGenerator).code());
163     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
164     checkStackPointerAlignment();
165
166     sampleCodeBlock(m_codeBlock);
167     
168     emitPutCallResult(instruction);
169 }
170
171 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
172 {
173     int callee = instruction[2].u.operand;
174
175     /* Caller always:
176         - Updates callFrameRegister to callee callFrame.
177         - Initializes ArgumentCount; CallerFrame; Callee.
178
179        For a JS call:
180         - Caller initializes ScopeChain.
181         - Callee initializes ReturnPC; CodeBlock.
182         - Callee restores callFrameRegister before return.
183
184        For a non-JS call:
185         - Caller initializes ScopeChain; ReturnPC; CodeBlock.
186         - Caller restores callFrameRegister after return.
187     */
188     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct), call_and_construct_opcodes_must_be_same_length);
189     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_varargs), call_and_call_varargs_opcodes_must_be_same_length);
190     COMPILE_ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct_varargs), call_and_construct_varargs_opcodes_must_be_same_length);
191     if (opcodeID == op_call_varargs || opcodeID == op_construct_varargs)
192         compileLoadVarargs(instruction);
193     else {
194         int argCount = instruction[3].u.operand;
195         int registerOffset = -instruction[4].u.operand;
196
197         if (opcodeID == op_call && shouldEmitProfiling()) {
198             emitGetVirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0);
199             Jump done = emitJumpIfNotJSCell(regT0);
200             load32(Address(regT0, JSCell::structureIDOffset()), regT0);
201             store32(regT0, instruction[OPCODE_LENGTH(op_call) - 2].u.arrayProfile->addressOfLastSeenStructureID());
202             done.link(this);
203         }
204     
205         addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
206         store32(TrustedImm32(argCount), Address(stackPointerRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
207     } // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
208     
209     uint32_t bytecodeOffset = instruction - m_codeBlock->instructions().begin();
210     uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(bytecodeOffset);
211     store32(TrustedImm32(locationBits), Address(callFrameRegister, JSStack::ArgumentCount * static_cast<int>(sizeof(Register)) + TagOffset));
212     emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
213
214     store64(regT0, Address(stackPointerRegister, JSStack::Callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC)));
215
216     if (opcodeID == op_call_eval) {
217         compileCallEval(instruction);
218         return;
219     }
220
221     DataLabelPtr addressOfLinkedFunctionCheck;
222     Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
223     addSlowCase(slowCase);
224
225     ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex);
226     CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
227     info->callType = CallLinkInfo::callTypeFor(opcodeID);
228     info->codeOrigin = CodeOrigin(m_bytecodeOffset);
229     info->calleeGPR = regT0;
230     m_callCompilationInfo.append(CallCompilationInfo());
231     m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
232     m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info;
233
234     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT2);
235     store64(regT2, Address(MacroAssembler::stackPointerRegister, JSStack::ScopeChain * sizeof(Register) - sizeof(CallerFrameAndPC)));
236
237     m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
238
239     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
240     checkStackPointerAlignment();
241
242     sampleCodeBlock(m_codeBlock);
243     
244     emitPutCallResult(instruction);
245 }
246
247 void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
248 {
249     if (opcodeID == op_call_eval) {
250         compileCallEvalSlowCase(instruction, iter);
251         return;
252     }
253
254     linkSlowCase(iter);
255
256     ThunkGenerator generator = linkThunkGeneratorFor(
257         (opcodeID == op_construct || opcodeID == op_construct_varargs) ? CodeForConstruct : CodeForCall,
258         RegisterPreservationNotRequired);
259     
260     move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
261     m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(generator).code());
262
263     addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
264     checkStackPointerAlignment();
265
266     sampleCodeBlock(m_codeBlock);
267     
268     emitPutCallResult(instruction);
269 }
270
271 void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr)
272 {
273     JumpList slowCases;
274
275     slowCases.append(branchTestPtr(NonZero, regT0, tagMaskRegister));
276     slowCases.append(branchStructure(NotEqual, Address(regT0, JSCell::structureIDOffset()), expectedStructure));
277     slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable)));
278     
279     loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1);
280     emitPutToCallFrameHeader(regT1, JSStack::ScopeChain);
281     
282     Call call = nearCall();
283     Jump done = jump();
284     
285     slowCases.link(this);
286     move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2);
287     restoreReturnAddressBeforeReturn(regT2);
288     Jump slow = jump();
289     
290     LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
291     
292     patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
293     patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
294     patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallThunkGenerator).code()));
295     
296     RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
297         FINALIZE_CODE(
298             patchBuffer,
299             ("Baseline closure call stub for %s, return point %p, target %p (%s)",
300                 toCString(*m_codeBlock).data(),
301                 callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(),
302                 codePtr.executableAddress(),
303                 toCString(pointerDump(calleeCodeBlock)).data())),
304         *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable,
305         callLinkInfo->codeOrigin));
306     
307     RepatchBuffer repatchBuffer(m_codeBlock);
308     
309     repatchBuffer.replaceWithJump(
310         RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
311         CodeLocationLabel(stubRoutine->code().code()));
312     repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallThunkGenerator).code());
313
314     callLinkInfo->stub = stubRoutine.release();
315 }
316
317 void JIT::emit_op_call(Instruction* currentInstruction)
318 {
319     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
320 }
321
322 void JIT::emit_op_call_eval(Instruction* currentInstruction)
323 {
324     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
325 }
326
327 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
328 {
329     compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
330 }
331     
332 void JIT::emit_op_construct_varargs(Instruction* currentInstruction)
333 {
334     compileOpCall(op_construct_varargs, currentInstruction, m_callLinkInfoIndex++);
335 }
336
337 void JIT::emit_op_construct(Instruction* currentInstruction)
338 {
339     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
340 }
341
342 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
343 {
344     compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
345 }
346
347 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
348 {
349     compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
350 }
351  
352 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
353 {
354     compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
355 }
356     
357 void JIT::emitSlow_op_construct_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
358 {
359     compileOpCallSlowCase(op_construct_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
360 }
361     
362 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
363 {
364     compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
365 }
366
367 } // namespace JSC
368
369 #endif // USE(JSVALUE64)
370 #endif // ENABLE(JIT)