Bug 56270 - The JIT 'friend's many classes in JSC; start unwinding this.
[WebKit-https.git] / Source / JavaScriptCore / jit / JITCall32_64.cpp
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "Interpreter.h"
34 #include "JITInlineMethods.h"
35 #include "JITStubCall.h"
36 #include "JSArray.h"
37 #include "JSFunction.h"
38 #include "ResultType.h"
39 #include "SamplingTool.h"
40
41 #ifndef NDEBUG
42 #include <stdio.h>
43 #endif
44
45 using namespace std;
46
47 namespace JSC {
48
49 void JIT::compileOpCallInitializeCallFrame()
50 {
51     // regT0 holds callee, regT1 holds argCount
52     store32(regT1, Address(callFrameRegister, RegisterFile::ArgumentCount * static_cast<int>(sizeof(Register))));
53     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT3); // scopeChain
54     storePtr(regT0, Address(callFrameRegister, RegisterFile::Callee * static_cast<int>(sizeof(Register)))); // callee
55     storePtr(regT3, Address(callFrameRegister, RegisterFile::ScopeChain * static_cast<int>(sizeof(Register)))); // scopeChain
56 }
57
58 void JIT::emit_op_call_put_result(Instruction* instruction)
59 {
60     int dst = instruction[1].u.operand;
61     emitStore(dst, regT1, regT0);
62 }
63
64 void JIT::compileOpCallVarargs(Instruction* instruction)
65 {
66     int callee = instruction[1].u.operand;
67     int argCountRegister = instruction[2].u.operand;
68     int registerOffset = instruction[3].u.operand;
69
70     emitLoad(callee, regT1, regT0);
71     emitLoadPayload(argCountRegister, regT2); // argCount
72     addPtr(Imm32(registerOffset), regT2, regT3); // registerOffset
73
74     emitJumpSlowCaseIfNotJSCell(callee, regT1);
75     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
76
77     // Speculatively roll the callframe, assuming argCount will match the arity.
78     mul32(Imm32(sizeof(Register)), regT3, regT3);
79     addPtr(callFrameRegister, regT3);
80     storePtr(callFrameRegister, Address(regT3, RegisterFile::CallerFrame * static_cast<int>(sizeof(Register))));
81     move(regT3, callFrameRegister);
82
83     move(regT2, regT1); // argCount
84
85     emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
86
87     sampleCodeBlock(m_codeBlock);
88 }
89
90 void JIT::compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
91 {
92     int callee = instruction[1].u.operand;
93
94     linkSlowCaseIfNotJSCell(iter, callee);
95     linkSlowCase(iter);
96
97     JITStubCall stubCall(this, cti_op_call_NotJSFunction);
98     stubCall.addArgument(regT1, regT0);
99     stubCall.addArgument(regT3);
100     stubCall.addArgument(regT2);
101     stubCall.call();
102
103     sampleCodeBlock(m_codeBlock);
104 }
105
106 void JIT::emit_op_ret(Instruction* currentInstruction)
107 {
108     unsigned dst = currentInstruction[1].u.operand;
109
110     emitLoad(dst, regT1, regT0);
111     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
112     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
113
114     restoreReturnAddressBeforeReturn(regT2);
115     ret();
116 }
117
118 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
119 {
120     unsigned result = currentInstruction[1].u.operand;
121     unsigned thisReg = currentInstruction[2].u.operand;
122
123     emitLoad(result, regT1, regT0);
124     Jump notJSCell = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
125     loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
126     Jump notObject = branch8(NotEqual, Address(regT2, Structure::typeInfoTypeOffset()), Imm32(ObjectType));
127
128     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
129     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
130
131     restoreReturnAddressBeforeReturn(regT2);
132     ret();
133
134     notJSCell.link(this);
135     notObject.link(this);
136     emitLoad(thisReg, regT1, regT0);
137
138     emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT2);
139     emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
140
141     restoreReturnAddressBeforeReturn(regT2);
142     ret();
143 }
144
145 void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
146 {
147     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call);
148 }
149
150 void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
151 {
152     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_call_eval);
153 }
154
155 void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
156 {
157     compileOpCallVarargsSlowCase(currentInstruction, iter);
158 }
159
160 void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
161 {
162     compileOpCallSlowCase(currentInstruction, iter, m_callLinkInfoIndex++, op_construct);
163 }
164
165 void JIT::emit_op_call(Instruction* currentInstruction)
166 {
167     compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
168 }
169
170 void JIT::emit_op_call_eval(Instruction* currentInstruction)
171 {
172     compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex++);
173 }
174
175 void JIT::emit_op_call_varargs(Instruction* currentInstruction)
176 {
177     compileOpCallVarargs(currentInstruction);
178 }
179
180 void JIT::emit_op_construct(Instruction* currentInstruction)
181 {
182     compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
183 }
184
185 #if !ENABLE(JIT_OPTIMIZE_CALL)
186
187 /* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
188
189 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned)
190 {
191     int callee = instruction[1].u.operand;
192     int argCount = instruction[2].u.operand;
193     int registerOffset = instruction[3].u.operand;
194
195     Jump wasEval;
196     if (opcodeID == op_call_eval) {
197         JITStubCall stubCall(this, cti_op_call_eval);
198         stubCall.addArgument(callee);
199         stubCall.addArgument(JIT::Imm32(registerOffset));
200         stubCall.addArgument(JIT::Imm32(argCount));
201         stubCall.call();
202         wasEval = branch32(NotEqual, regT1, Imm32(JSValue::EmptyValueTag));
203     }
204
205     emitLoad(callee, regT1, regT0);
206
207     emitJumpSlowCaseIfNotJSCell(callee, regT1);
208     addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr)));
209
210     // Speculatively roll the callframe, assuming argCount will match the arity.
211     storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
212     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
213     move(Imm32(argCount), regT1);
214
215     emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstruct() : m_globalData->jitStubs->ctiVirtualCall());
216
217     if (opcodeID == op_call_eval)
218         wasEval.link(this);
219
220     sampleCodeBlock(m_codeBlock);
221 }
222
223 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned, OpcodeID opcodeID)
224 {
225     int callee = instruction[1].u.operand;
226     int argCount = instruction[2].u.operand;
227     int registerOffset = instruction[3].u.operand;
228
229     linkSlowCaseIfNotJSCell(iter, callee);
230     linkSlowCase(iter);
231
232     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
233     stubCall.addArgument(callee);
234     stubCall.addArgument(JIT::Imm32(registerOffset));
235     stubCall.addArgument(JIT::Imm32(argCount));
236     stubCall.call();
237
238     sampleCodeBlock(m_codeBlock);
239 }
240
241 #else // !ENABLE(JIT_OPTIMIZE_CALL)
242
243 /* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
244
245 void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex)
246 {
247     int callee = instruction[1].u.operand;
248     int argCount = instruction[2].u.operand;
249     int registerOffset = instruction[3].u.operand;
250
251     Jump wasEval;
252     if (opcodeID == op_call_eval) {
253         JITStubCall stubCall(this, cti_op_call_eval);
254         stubCall.addArgument(callee);
255         stubCall.addArgument(JIT::Imm32(registerOffset));
256         stubCall.addArgument(JIT::Imm32(argCount));
257         stubCall.call();
258         wasEval = branch32(NotEqual, regT1, Imm32(JSValue::EmptyValueTag));
259     }
260
261     emitLoad(callee, regT1, regT0);
262
263     DataLabelPtr addressOfLinkedFunctionCheck;
264
265     BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
266
267     Jump jumpToSlow = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, ImmPtr(0));
268
269     END_UNINTERRUPTED_SEQUENCE(sequenceOpCall);
270
271     addSlowCase(jumpToSlow);
272     ASSERT_JIT_OFFSET(differenceBetween(addressOfLinkedFunctionCheck, jumpToSlow), patchOffsetOpCallCompareToJump);
273     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
274
275     addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
276
277     // The following is the fast case, only used whan a callee can be linked.
278
279     // Fast version of stack frame initialization, directly relative to edi.
280     // Note that this omits to set up RegisterFile::CodeBlock, which is set in the callee
281     loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scopeChain)), regT2);
282
283     store32(Imm32(argCount), Address(callFrameRegister, (registerOffset + RegisterFile::ArgumentCount) * static_cast<int>(sizeof(Register))));
284     storePtr(callFrameRegister, Address(callFrameRegister, (registerOffset + RegisterFile::CallerFrame) * static_cast<int>(sizeof(Register))));
285     emitStore(registerOffset + RegisterFile::Callee, regT1, regT0);
286     storePtr(regT2, Address(callFrameRegister, (registerOffset + RegisterFile::ScopeChain) * static_cast<int>(sizeof(Register))));
287     addPtr(Imm32(registerOffset * sizeof(Register)), callFrameRegister);
288
289     // Call to the callee
290     m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
291     
292     if (opcodeID == op_call_eval)
293         wasEval.link(this);
294
295     sampleCodeBlock(m_codeBlock);
296 }
297
298 void JIT::compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID)
299 {
300     int callee = instruction[1].u.operand;
301     int argCount = instruction[2].u.operand;
302     int registerOffset = instruction[3].u.operand;
303
304     linkSlowCase(iter);
305     linkSlowCase(iter);
306
307     // Fast check for JS function.
308     Jump callLinkFailNotObject = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
309     Jump callLinkFailNotJSFunction = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsFunctionVPtr));
310
311     // Speculatively roll the callframe, assuming argCount will match the arity.
312     storePtr(callFrameRegister, Address(callFrameRegister, (RegisterFile::CallerFrame + registerOffset) * static_cast<int>(sizeof(Register))));
313     addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
314     move(Imm32(argCount), regT1);
315
316     m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_globalData->jitStubs->ctiVirtualConstructLink() : m_globalData->jitStubs->ctiVirtualCallLink());
317
318     // Done! - return back to the hot path.
319     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_call_eval));
320     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
321     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_call));
322
323     // This handles host functions
324     callLinkFailNotObject.link(this);
325     callLinkFailNotJSFunction.link(this);
326
327     JITStubCall stubCall(this, opcodeID == op_construct ? cti_op_construct_NotJSConstruct : cti_op_call_NotJSFunction);
328     stubCall.addArgument(callee);
329     stubCall.addArgument(JIT::Imm32(registerOffset));
330     stubCall.addArgument(JIT::Imm32(argCount));
331     stubCall.call();
332
333     sampleCodeBlock(m_codeBlock);
334 }
335
336 /* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_CALL) ------------------------------ */
337
338 #endif // !ENABLE(JIT_OPTIMIZE_CALL)
339
340 } // namespace JSC
341
342 #endif // USE(JSVALUE32_64)
343 #endif // ENABLE(JIT)