32-bit call code clobbers the function cell tag
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall32_64.cpp
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
40
41 #ifndef NDEBUG
42 #include <stdio.h>
43 #endif
44
45 using namespace std;
46
47 namespace JSC {
48
49 void JIT::compileOpCallInitializeCallFrame()
50 {
51     // regT0 holds callee, regT1 holds argCount
52     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT3); // scopeChain
53     emitPutIntToCallFrameHeader(regT1, RegisterFile::ArgumentCount);
54     emitPutCellToCallFrameHeader(regT0, RegisterFile::Callee);
55     emitPutCellToCallFrameHeader(regT3, RegisterFile::ScopeChain);
56 }
57
58 void JIT::emit_op_call_put_result(Instruction* instruction)
59 {
60     int dst = instruction[1].u.operand;
61     emitStore(dst, regT1, regT0);
62 }
63
64 void JIT::compileOpCallVarargs(Instruction* instruction)
65 {
66     int callee = instruction[1].u.operand;
67     int argCountRegister = instruction[2].u.operand;
68     int registerOffset = instruction[3].u.operand;
69
70     emitLoad(callee, regT1, regT0);
71     emitLoadPayload(argCountRegister, regT2); // argCount
72     addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
73
74     emitJumpSlowCaseIfNotJSCell(callee, regT1);
75     addSlowCase(emitJumpIfNotType(regT0, regT1, JSFunctionType));
76
77     // Speculatively roll the callframe, assuming argCount will match the arity.
78     mul32(TrustedImm32(sizeof(Register)), regT3, regT3);
79     addPtr(callFrameRegister, regT3);
80     store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame, regT3));
81     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame, regT3));
82     move(regT3, callFrameRegister);
83
84     move(regT2, regT1); // argCount
85
86     emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
87
88     sampleCodeBlock(m_codeBlock);
89 }
90
91 void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
92 {
93     int callee = instruction[1].u.operand;
94
95     linkSlowCaseIfNotJSCell(iter, callee);
96     Jump notCell = jump();
97     linkSlowCase(iter);
98     move(TrustedImm32(JSValue::CellTag), regT1); // Need to restore cell tag in regT1 because it was clobbered.
99     notCell.link(this);
100
101     JITStubCall stubCall(this, cti_op_call_NotJSFunction);
102     stubCall.addArgument(regT1, regT0);
103     stubCall.addArgument(regT3);
104     stubCall.addArgument(regT2);
105     stubCall.call();
106
107     sampleCodeBlock(m_codeBlock);
108 }
109
110 void JIT::emit_op_ret(Instruction* currentInstruction)
111 {
112     emitOptimizationCheck(RetOptimizationCheck);
113     
114     unsigned dst = currentInstruction[1].u.operand;
115
116     emitLoad(dst, regT1, regT0);
117     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
118     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
119
120     restoreReturnAddressBeforeReturn(regT2);
121     ret();
122 }
123
124 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
125 {
126     emitOptimizationCheck(RetOptimizationCheck);
127     
128     unsigned result = currentInstruction[1].u.operand;
129     unsigned thisReg = currentInstruction[2].u.operand;
130
131     emitLoad(result, regT1, regT0);
132     Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
133     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
134     Jump notObject = emitJumpIfNotObject(regT2);
135
136     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
137     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
138
139     restoreReturnAddressBeforeReturn(regT2);
140     ret();
141
142     notJSCell.link(this);
143     notObject.link(this);
144     emitLoad(thisReg, regT1, regT0);
145
146     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
147     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
148
149     restoreReturnAddressBeforeReturn(regT2);
150     ret();
151 }
152
153 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
154 {
155     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
156 }
157
158 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
159 {
160     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
161 }
162
163 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
164 {
165     compileOpCallVarargsSlowCase(currentInstruction, iter);
166 }
167
168 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
169 {
170     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
171 }
172
173 void JIT::emit_op_call(Instruction* currentInstruction)
174 {
175     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
176 }
177
178 void JIT::emit_op_call_eval(Instruction* currentInstruction)
179 {
180     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
181 }
182
183 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
184 {
185     compileOpCallVarargs(currentInstruction);
186 }
187
188 void JIT::emit_op_construct(Instruction* currentInstruction)
189 {
190     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
191 }
192
193 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
194 {
195     int callee = instruction[1].u.operand;
196     int argCount = instruction[2].u.operand;
197     int registerOffset = instruction[3].u.operand;
198
199     Jump wasEval;
200     if (opcodeID == op_call_eval) {
201         JITStubCall stubCall(this, cti_op_call_eval);
202         stubCall.addArgument(callee);
203         stubCall.addArgument(JIT::Imm32(registerOffset));
204         stubCall.addArgument(JIT::Imm32(argCount));
205         stubCall.call();
206         wasEval = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
207     }
208
209     emitLoad(callee, regT1, regT0);
210
211     DataLabelPtr addressOfLinkedFunctionCheck;
212
213     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
214
215     Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0));
216
217     END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
218
219     addSlowCase(jumpToSlow);
220     ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
221     ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex);
222     m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo());
223     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
224     m_callStructureStubCompilationInfo[callLinkInfoIndex].isCall = opcodeID != op_construct;
225
226     addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
227
228     // The following is the fast case, only used whan a callee can be linked.
229
230     // Fast version of stack frame initialization, directly relative to edi.
231     // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
232     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT2);
233
234     store32(TrustedImm32(JSValue::Int32Tag), tagFor(registerOffset + RegisterFile::ArgumentCount));
235     store32(Imm32(argCount), payloadFor(registerOffset + RegisterFile::ArgumentCount));
236     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
237     emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
238     store32(TrustedImm32(JSValue::CellTag), tagFor(registerOffset + RegisterFile::ScopeChain));
239     store32(regT2, payloadFor(registerOffset + RegisterFile::ScopeChain));
240     addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
241
242     // Call to the callee
243     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
244     
245     if (opcodeID == op_call_eval)
246         wasEval.link(this);
247
248     sampleCodeBlock(m_codeBlock);
249 }
250
251 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
252 {
253     int callee = instruction[1].u.operand;
254     int argCount = instruction[2].u.operand;
255     int registerOffset = instruction[3].u.operand;
256
257     linkSlowCase(iter);
258     linkSlowCase(iter);
259
260     // Fast check for JS function.
261     Jump callLinkFailNotObject = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag));
262     Jump callLinkFailNotJSFunction = emitJumpIfNotType(regT0, regT1, JSFunctionType);
263
264     // Speculatively roll the callframe, assuming argCount will match the arity.
265     store32(TrustedImm32(JSValue::CellTag), tagFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
266     storePtr(callFrameRegister, payloadFor(RegisterFile::CallerFrame + registerOffset, callFrameRegister));
267     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
268     move(Imm32(argCount), regT1);
269
270     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
271
272     // Done! - return back to the hot path.
273     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
274     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
275     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
276
277     // This handles host functions
278     callLinkFailNotJSFunction.link(this);
279     move(TrustedImm32(JSValue::CellTag), regT1); // Restore cell tag since it was clobbered.
280     callLinkFailNotObject.link(this);
281
282     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
283     stubCall.addArgument(callee);
284     stubCall.addArgument(JIT::Imm32(registerOffset));
285     stubCall.addArgument(JIT::Imm32(argCount));
286     stubCall.call();
287
288     sampleCodeBlock(m_codeBlock);
289 }
290
291 } // namespace JSC
292
293 #endif // USE(JSVALUE32_64)
294 #endif // ENABLE(JIT)